Extend tree code folds to IFN_COND_*
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blob4a2f70293b885bf6ba419bca62b760bdf9baadb6
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2018 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "insn-config.h"
32 #include "memmodel.h"
33 #include "emit-rtl.h"
34 #include "cgraph.h"
35 #include "gimple-pretty-print.h"
36 #include "alias.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "cfganal.h"
40 #include "tree-inline.h"
41 #include "internal-fn.h"
42 #include "gimple-fold.h"
43 #include "tree-eh.h"
44 #include "gimplify.h"
45 #include "flags.h"
46 #include "dojump.h"
47 #include "explow.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "stmt.h"
51 #include "expr.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "dumpfile.h"
55 #include "cfgloop.h"
56 #include "params.h"
57 #include "tree-ssa-propagate.h"
58 #include "tree-cfg.h"
59 #include "domwalk.h"
60 #include "gimple-iterator.h"
61 #include "gimple-match.h"
62 #include "stringpool.h"
63 #include "attribs.h"
64 #include "tree-pass.h"
65 #include "statistics.h"
66 #include "langhooks.h"
67 #include "ipa-utils.h"
68 #include "dbgcnt.h"
69 #include "tree-cfgcleanup.h"
70 #include "tree-ssa-loop.h"
71 #include "tree-scalar-evolution.h"
72 #include "tree-ssa-sccvn.h"
74 /* This algorithm is based on the SCC algorithm presented by Keith
75 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
76 (http://citeseer.ist.psu.edu/41805.html). In
77 straight line code, it is equivalent to a regular hash based value
78 numbering that is performed in reverse postorder.
80 For code with cycles, there are two alternatives, both of which
81 require keeping the hashtables separate from the actual list of
82 value numbers for SSA names.
84 1. Iterate value numbering in an RPO walk of the blocks, removing
85 all the entries from the hashtable after each iteration (but
86 keeping the SSA name->value number mapping between iterations).
87 Iterate until it does not change.
89 2. Perform value numbering as part of an SCC walk on the SSA graph,
90 iterating only the cycles in the SSA graph until they do not change
91 (using a separate, optimistic hashtable for value numbering the SCC
92 operands).
94 The second is not just faster in practice (because most SSA graph
95 cycles do not involve all the variables in the graph), it also has
96 some nice properties.
98 One of these nice properties is that when we pop an SCC off the
99 stack, we are guaranteed to have processed all the operands coming from
100 *outside of that SCC*, so we do not need to do anything special to
101 ensure they have value numbers.
103 Another nice property is that the SCC walk is done as part of a DFS
104 of the SSA graph, which makes it easy to perform combining and
105 simplifying operations at the same time.
107 The code below is deliberately written in a way that makes it easy
108 to separate the SCC walk from the other work it does.
110 In order to propagate constants through the code, we track which
111 expressions contain constants, and use those while folding. In
112 theory, we could also track expressions whose value numbers are
113 replaced, in case we end up folding based on expression
114 identities.
116 In order to value number memory, we assign value numbers to vuses.
117 This enables us to note that, for example, stores to the same
118 address of the same value from the same starting memory states are
119 equivalent.
120 TODO:
122 1. We can iterate only the changing portions of the SCC's, but
123 I have not seen an SCC big enough for this to be a win.
124 2. If you differentiate between phi nodes for loops and phi nodes
125 for if-then-else, you can properly consider phi nodes in different
126 blocks for equivalence.
127 3. We could value number vuses in more cases, particularly, whole
128 structure copies.
132 static tree *last_vuse_ptr;
133 static vn_lookup_kind vn_walk_kind;
134 static vn_lookup_kind default_vn_walk_kind;
135 bitmap const_parms;
137 /* vn_nary_op hashtable helpers. */
139 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
141 typedef vn_nary_op_s *compare_type;
142 static inline hashval_t hash (const vn_nary_op_s *);
143 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
146 /* Return the computed hashcode for nary operation P1. */
148 inline hashval_t
149 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
151 return vno1->hashcode;
154 /* Compare nary operations P1 and P2 and return true if they are
155 equivalent. */
157 inline bool
158 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
160 return vn_nary_op_eq (vno1, vno2);
163 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
164 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
167 /* vn_phi hashtable helpers. */
169 static int
170 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
172 struct vn_phi_hasher : pointer_hash <vn_phi_s>
174 static inline hashval_t hash (const vn_phi_s *);
175 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
176 static inline void remove (vn_phi_s *);
179 /* Return the computed hashcode for phi operation P1. */
181 inline hashval_t
182 vn_phi_hasher::hash (const vn_phi_s *vp1)
184 return vp1->hashcode;
187 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
189 inline bool
190 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
192 return vn_phi_eq (vp1, vp2);
195 /* Free a phi operation structure VP. */
197 inline void
198 vn_phi_hasher::remove (vn_phi_s *phi)
200 phi->phiargs.release ();
203 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
204 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
207 /* Compare two reference operands P1 and P2 for equality. Return true if
208 they are equal, and false otherwise. */
210 static int
211 vn_reference_op_eq (const void *p1, const void *p2)
213 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
214 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
216 return (vro1->opcode == vro2->opcode
217 /* We do not care for differences in type qualification. */
218 && (vro1->type == vro2->type
219 || (vro1->type && vro2->type
220 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
221 TYPE_MAIN_VARIANT (vro2->type))))
222 && expressions_equal_p (vro1->op0, vro2->op0)
223 && expressions_equal_p (vro1->op1, vro2->op1)
224 && expressions_equal_p (vro1->op2, vro2->op2));
227 /* Free a reference operation structure VP. */
229 static inline void
230 free_reference (vn_reference_s *vr)
232 vr->operands.release ();
236 /* vn_reference hashtable helpers. */
238 struct vn_reference_hasher : pointer_hash <vn_reference_s>
240 static inline hashval_t hash (const vn_reference_s *);
241 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
242 static inline void remove (vn_reference_s *);
245 /* Return the hashcode for a given reference operation P1. */
247 inline hashval_t
248 vn_reference_hasher::hash (const vn_reference_s *vr1)
250 return vr1->hashcode;
253 inline bool
254 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
256 return vn_reference_eq (v, c);
259 inline void
260 vn_reference_hasher::remove (vn_reference_s *v)
262 free_reference (v);
265 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
266 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
269 /* The set of hashtables and alloc_pool's for their items. */
271 typedef struct vn_tables_s
273 vn_nary_op_table_type *nary;
274 vn_phi_table_type *phis;
275 vn_reference_table_type *references;
276 struct obstack nary_obstack;
277 object_allocator<vn_phi_s> *phis_pool;
278 object_allocator<vn_reference_s> *references_pool;
279 } *vn_tables_t;
282 /* vn_constant hashtable helpers. */
284 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
286 static inline hashval_t hash (const vn_constant_s *);
287 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
290 /* Hash table hash function for vn_constant_t. */
292 inline hashval_t
293 vn_constant_hasher::hash (const vn_constant_s *vc1)
295 return vc1->hashcode;
298 /* Hash table equality function for vn_constant_t. */
300 inline bool
301 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
303 if (vc1->hashcode != vc2->hashcode)
304 return false;
306 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
309 static hash_table<vn_constant_hasher> *constant_to_value_id;
310 static bitmap constant_value_ids;
313 /* Valid hashtables storing information we have proven to be
314 correct. */
316 static vn_tables_t valid_info;
318 /* Optimistic hashtables storing information we are making assumptions about
319 during iterations. */
321 static vn_tables_t optimistic_info;
323 /* Pointer to the set of hashtables that is currently being used.
324 Should always point to either the optimistic_info, or the
325 valid_info. */
327 static vn_tables_t current_info;
330 /* Reverse post order index for each basic block. */
332 static int *rpo_numbers;
334 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
336 /* Return the SSA value of the VUSE x, supporting released VDEFs
337 during elimination which will value-number the VDEF to the
338 associated VUSE (but not substitute in the whole lattice). */
340 static inline tree
341 vuse_ssa_val (tree x)
343 if (!x)
344 return NULL_TREE;
348 tree tem = SSA_VAL (x);
349 /* stmt walking can walk over a backedge and reach code we didn't
350 value-number yet. */
351 if (tem == VN_TOP)
352 return x;
353 x = tem;
355 while (SSA_NAME_IN_FREE_LIST (x));
357 return x;
360 /* This represents the top of the VN lattice, which is the universal
361 value. */
363 tree VN_TOP;
365 /* Unique counter for our value ids. */
367 static unsigned int next_value_id;
369 /* Next DFS number and the stack for strongly connected component
370 detection. */
372 static unsigned int next_dfs_num;
373 static vec<tree> sccstack;
377 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
378 are allocated on an obstack for locality reasons, and to free them
379 without looping over the vec. */
381 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
382 static struct obstack vn_ssa_aux_obstack;
384 /* Return whether there is value numbering information for a given SSA name. */
386 bool
387 has_VN_INFO (tree name)
389 if (SSA_NAME_VERSION (name) < vn_ssa_aux_table.length ())
390 return vn_ssa_aux_table[SSA_NAME_VERSION (name)] != NULL;
391 return false;
394 /* Return the value numbering information for a given SSA name. */
396 vn_ssa_aux_t
397 VN_INFO (tree name)
399 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
400 gcc_checking_assert (res);
401 return res;
404 /* Set the value numbering info for a given SSA name to a given
405 value. */
407 static inline void
408 VN_INFO_SET (tree name, vn_ssa_aux_t value)
410 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
413 /* Initialize the value numbering info for a given SSA name.
414 This should be called just once for every SSA name. */
416 vn_ssa_aux_t
417 VN_INFO_GET (tree name)
419 vn_ssa_aux_t newinfo;
421 gcc_assert (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ()
422 || vn_ssa_aux_table[SSA_NAME_VERSION (name)] == NULL);
423 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
424 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
425 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
426 vn_ssa_aux_table.safe_grow_cleared (SSA_NAME_VERSION (name) + 1);
427 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
428 return newinfo;
432 /* Return the vn_kind the expression computed by the stmt should be
433 associated with. */
435 enum vn_kind
436 vn_get_stmt_kind (gimple *stmt)
438 switch (gimple_code (stmt))
440 case GIMPLE_CALL:
441 return VN_REFERENCE;
442 case GIMPLE_PHI:
443 return VN_PHI;
444 case GIMPLE_ASSIGN:
446 enum tree_code code = gimple_assign_rhs_code (stmt);
447 tree rhs1 = gimple_assign_rhs1 (stmt);
448 switch (get_gimple_rhs_class (code))
450 case GIMPLE_UNARY_RHS:
451 case GIMPLE_BINARY_RHS:
452 case GIMPLE_TERNARY_RHS:
453 return VN_NARY;
454 case GIMPLE_SINGLE_RHS:
455 switch (TREE_CODE_CLASS (code))
457 case tcc_reference:
458 /* VOP-less references can go through unary case. */
459 if ((code == REALPART_EXPR
460 || code == IMAGPART_EXPR
461 || code == VIEW_CONVERT_EXPR
462 || code == BIT_FIELD_REF)
463 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
464 return VN_NARY;
466 /* Fallthrough. */
467 case tcc_declaration:
468 return VN_REFERENCE;
470 case tcc_constant:
471 return VN_CONSTANT;
473 default:
474 if (code == ADDR_EXPR)
475 return (is_gimple_min_invariant (rhs1)
476 ? VN_CONSTANT : VN_REFERENCE);
477 else if (code == CONSTRUCTOR)
478 return VN_NARY;
479 return VN_NONE;
481 default:
482 return VN_NONE;
485 default:
486 return VN_NONE;
490 /* Lookup a value id for CONSTANT and return it. If it does not
491 exist returns 0. */
493 unsigned int
494 get_constant_value_id (tree constant)
496 vn_constant_s **slot;
497 struct vn_constant_s vc;
499 vc.hashcode = vn_hash_constant_with_type (constant);
500 vc.constant = constant;
501 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
502 if (slot)
503 return (*slot)->value_id;
504 return 0;
507 /* Lookup a value id for CONSTANT, and if it does not exist, create a
508 new one and return it. If it does exist, return it. */
510 unsigned int
511 get_or_alloc_constant_value_id (tree constant)
513 vn_constant_s **slot;
514 struct vn_constant_s vc;
515 vn_constant_t vcp;
517 vc.hashcode = vn_hash_constant_with_type (constant);
518 vc.constant = constant;
519 slot = constant_to_value_id->find_slot (&vc, INSERT);
520 if (*slot)
521 return (*slot)->value_id;
523 vcp = XNEW (struct vn_constant_s);
524 vcp->hashcode = vc.hashcode;
525 vcp->constant = constant;
526 vcp->value_id = get_next_value_id ();
527 *slot = vcp;
528 bitmap_set_bit (constant_value_ids, vcp->value_id);
529 return vcp->value_id;
532 /* Return true if V is a value id for a constant. */
534 bool
535 value_id_constant_p (unsigned int v)
537 return bitmap_bit_p (constant_value_ids, v);
540 /* Compute the hash for a reference operand VRO1. */
542 static void
543 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
545 hstate.add_int (vro1->opcode);
546 if (vro1->op0)
547 inchash::add_expr (vro1->op0, hstate);
548 if (vro1->op1)
549 inchash::add_expr (vro1->op1, hstate);
550 if (vro1->op2)
551 inchash::add_expr (vro1->op2, hstate);
554 /* Compute a hash for the reference operation VR1 and return it. */
556 static hashval_t
557 vn_reference_compute_hash (const vn_reference_t vr1)
559 inchash::hash hstate;
560 hashval_t result;
561 int i;
562 vn_reference_op_t vro;
563 poly_int64 off = -1;
564 bool deref = false;
566 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
568 if (vro->opcode == MEM_REF)
569 deref = true;
570 else if (vro->opcode != ADDR_EXPR)
571 deref = false;
572 if (maybe_ne (vro->off, -1))
574 if (known_eq (off, -1))
575 off = 0;
576 off += vro->off;
578 else
580 if (maybe_ne (off, -1)
581 && maybe_ne (off, 0))
582 hstate.add_poly_int (off);
583 off = -1;
584 if (deref
585 && vro->opcode == ADDR_EXPR)
587 if (vro->op0)
589 tree op = TREE_OPERAND (vro->op0, 0);
590 hstate.add_int (TREE_CODE (op));
591 inchash::add_expr (op, hstate);
594 else
595 vn_reference_op_compute_hash (vro, hstate);
598 result = hstate.end ();
599 /* ??? We would ICE later if we hash instead of adding that in. */
600 if (vr1->vuse)
601 result += SSA_NAME_VERSION (vr1->vuse);
603 return result;
606 /* Return true if reference operations VR1 and VR2 are equivalent. This
607 means they have the same set of operands and vuses. */
609 bool
610 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
612 unsigned i, j;
614 /* Early out if this is not a hash collision. */
615 if (vr1->hashcode != vr2->hashcode)
616 return false;
618 /* The VOP needs to be the same. */
619 if (vr1->vuse != vr2->vuse)
620 return false;
622 /* If the operands are the same we are done. */
623 if (vr1->operands == vr2->operands)
624 return true;
626 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
627 return false;
629 if (INTEGRAL_TYPE_P (vr1->type)
630 && INTEGRAL_TYPE_P (vr2->type))
632 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
633 return false;
635 else if (INTEGRAL_TYPE_P (vr1->type)
636 && (TYPE_PRECISION (vr1->type)
637 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
638 return false;
639 else if (INTEGRAL_TYPE_P (vr2->type)
640 && (TYPE_PRECISION (vr2->type)
641 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
642 return false;
644 i = 0;
645 j = 0;
648 poly_int64 off1 = 0, off2 = 0;
649 vn_reference_op_t vro1, vro2;
650 vn_reference_op_s tem1, tem2;
651 bool deref1 = false, deref2 = false;
652 for (; vr1->operands.iterate (i, &vro1); i++)
654 if (vro1->opcode == MEM_REF)
655 deref1 = true;
656 /* Do not look through a storage order barrier. */
657 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
658 return false;
659 if (known_eq (vro1->off, -1))
660 break;
661 off1 += vro1->off;
663 for (; vr2->operands.iterate (j, &vro2); j++)
665 if (vro2->opcode == MEM_REF)
666 deref2 = true;
667 /* Do not look through a storage order barrier. */
668 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
669 return false;
670 if (known_eq (vro2->off, -1))
671 break;
672 off2 += vro2->off;
674 if (maybe_ne (off1, off2))
675 return false;
676 if (deref1 && vro1->opcode == ADDR_EXPR)
678 memset (&tem1, 0, sizeof (tem1));
679 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
680 tem1.type = TREE_TYPE (tem1.op0);
681 tem1.opcode = TREE_CODE (tem1.op0);
682 vro1 = &tem1;
683 deref1 = false;
685 if (deref2 && vro2->opcode == ADDR_EXPR)
687 memset (&tem2, 0, sizeof (tem2));
688 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
689 tem2.type = TREE_TYPE (tem2.op0);
690 tem2.opcode = TREE_CODE (tem2.op0);
691 vro2 = &tem2;
692 deref2 = false;
694 if (deref1 != deref2)
695 return false;
696 if (!vn_reference_op_eq (vro1, vro2))
697 return false;
698 ++j;
699 ++i;
701 while (vr1->operands.length () != i
702 || vr2->operands.length () != j);
704 return true;
707 /* Copy the operations present in load/store REF into RESULT, a vector of
708 vn_reference_op_s's. */
710 static void
711 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
713 if (TREE_CODE (ref) == TARGET_MEM_REF)
715 vn_reference_op_s temp;
717 result->reserve (3);
719 memset (&temp, 0, sizeof (temp));
720 temp.type = TREE_TYPE (ref);
721 temp.opcode = TREE_CODE (ref);
722 temp.op0 = TMR_INDEX (ref);
723 temp.op1 = TMR_STEP (ref);
724 temp.op2 = TMR_OFFSET (ref);
725 temp.off = -1;
726 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
727 temp.base = MR_DEPENDENCE_BASE (ref);
728 result->quick_push (temp);
730 memset (&temp, 0, sizeof (temp));
731 temp.type = NULL_TREE;
732 temp.opcode = ERROR_MARK;
733 temp.op0 = TMR_INDEX2 (ref);
734 temp.off = -1;
735 result->quick_push (temp);
737 memset (&temp, 0, sizeof (temp));
738 temp.type = NULL_TREE;
739 temp.opcode = TREE_CODE (TMR_BASE (ref));
740 temp.op0 = TMR_BASE (ref);
741 temp.off = -1;
742 result->quick_push (temp);
743 return;
746 /* For non-calls, store the information that makes up the address. */
747 tree orig = ref;
748 while (ref)
750 vn_reference_op_s temp;
752 memset (&temp, 0, sizeof (temp));
753 temp.type = TREE_TYPE (ref);
754 temp.opcode = TREE_CODE (ref);
755 temp.off = -1;
757 switch (temp.opcode)
759 case MODIFY_EXPR:
760 temp.op0 = TREE_OPERAND (ref, 1);
761 break;
762 case WITH_SIZE_EXPR:
763 temp.op0 = TREE_OPERAND (ref, 1);
764 temp.off = 0;
765 break;
766 case MEM_REF:
767 /* The base address gets its own vn_reference_op_s structure. */
768 temp.op0 = TREE_OPERAND (ref, 1);
769 if (!mem_ref_offset (ref).to_shwi (&temp.off))
770 temp.off = -1;
771 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
772 temp.base = MR_DEPENDENCE_BASE (ref);
773 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
774 break;
775 case BIT_FIELD_REF:
776 /* Record bits, position and storage order. */
777 temp.op0 = TREE_OPERAND (ref, 1);
778 temp.op1 = TREE_OPERAND (ref, 2);
779 if (!multiple_p (bit_field_offset (ref), BITS_PER_UNIT, &temp.off))
780 temp.off = -1;
781 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
782 break;
783 case COMPONENT_REF:
784 /* The field decl is enough to unambiguously specify the field,
785 a matching type is not necessary and a mismatching type
786 is always a spurious difference. */
787 temp.type = NULL_TREE;
788 temp.op0 = TREE_OPERAND (ref, 1);
789 temp.op1 = TREE_OPERAND (ref, 2);
791 tree this_offset = component_ref_field_offset (ref);
792 if (this_offset
793 && poly_int_tree_p (this_offset))
795 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
796 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
798 poly_offset_int off
799 = (wi::to_poly_offset (this_offset)
800 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
801 /* Probibit value-numbering zero offset components
802 of addresses the same before the pass folding
803 __builtin_object_size had a chance to run
804 (checking cfun->after_inlining does the
805 trick here). */
806 if (TREE_CODE (orig) != ADDR_EXPR
807 || maybe_ne (off, 0)
808 || cfun->after_inlining)
809 off.to_shwi (&temp.off);
813 break;
814 case ARRAY_RANGE_REF:
815 case ARRAY_REF:
817 tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
818 /* Record index as operand. */
819 temp.op0 = TREE_OPERAND (ref, 1);
820 /* Always record lower bounds and element size. */
821 temp.op1 = array_ref_low_bound (ref);
822 /* But record element size in units of the type alignment. */
823 temp.op2 = TREE_OPERAND (ref, 3);
824 temp.align = eltype->type_common.align;
825 if (! temp.op2)
826 temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
827 size_int (TYPE_ALIGN_UNIT (eltype)));
828 if (poly_int_tree_p (temp.op0)
829 && poly_int_tree_p (temp.op1)
830 && TREE_CODE (temp.op2) == INTEGER_CST)
832 poly_offset_int off = ((wi::to_poly_offset (temp.op0)
833 - wi::to_poly_offset (temp.op1))
834 * wi::to_offset (temp.op2)
835 * vn_ref_op_align_unit (&temp));
836 off.to_shwi (&temp.off);
839 break;
840 case VAR_DECL:
841 if (DECL_HARD_REGISTER (ref))
843 temp.op0 = ref;
844 break;
846 /* Fallthru. */
847 case PARM_DECL:
848 case CONST_DECL:
849 case RESULT_DECL:
850 /* Canonicalize decls to MEM[&decl] which is what we end up with
851 when valueizing MEM[ptr] with ptr = &decl. */
852 temp.opcode = MEM_REF;
853 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
854 temp.off = 0;
855 result->safe_push (temp);
856 temp.opcode = ADDR_EXPR;
857 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
858 temp.type = TREE_TYPE (temp.op0);
859 temp.off = -1;
860 break;
861 case STRING_CST:
862 case INTEGER_CST:
863 case COMPLEX_CST:
864 case VECTOR_CST:
865 case REAL_CST:
866 case FIXED_CST:
867 case CONSTRUCTOR:
868 case SSA_NAME:
869 temp.op0 = ref;
870 break;
871 case ADDR_EXPR:
872 if (is_gimple_min_invariant (ref))
874 temp.op0 = ref;
875 break;
877 break;
878 /* These are only interesting for their operands, their
879 existence, and their type. They will never be the last
880 ref in the chain of references (IE they require an
881 operand), so we don't have to put anything
882 for op* as it will be handled by the iteration */
883 case REALPART_EXPR:
884 temp.off = 0;
885 break;
886 case VIEW_CONVERT_EXPR:
887 temp.off = 0;
888 temp.reverse = storage_order_barrier_p (ref);
889 break;
890 case IMAGPART_EXPR:
891 /* This is only interesting for its constant offset. */
892 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
893 break;
894 default:
895 gcc_unreachable ();
897 result->safe_push (temp);
899 if (REFERENCE_CLASS_P (ref)
900 || TREE_CODE (ref) == MODIFY_EXPR
901 || TREE_CODE (ref) == WITH_SIZE_EXPR
902 || (TREE_CODE (ref) == ADDR_EXPR
903 && !is_gimple_min_invariant (ref)))
904 ref = TREE_OPERAND (ref, 0);
905 else
906 ref = NULL_TREE;
910 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
911 operands in *OPS, the reference alias set SET and the reference type TYPE.
912 Return true if something useful was produced. */
914 bool
915 ao_ref_init_from_vn_reference (ao_ref *ref,
916 alias_set_type set, tree type,
917 vec<vn_reference_op_s> ops)
919 vn_reference_op_t op;
920 unsigned i;
921 tree base = NULL_TREE;
922 tree *op0_p = &base;
923 poly_offset_int offset = 0;
924 poly_offset_int max_size;
925 poly_offset_int size = -1;
926 tree size_tree = NULL_TREE;
927 alias_set_type base_alias_set = -1;
929 /* First get the final access size from just the outermost expression. */
930 op = &ops[0];
931 if (op->opcode == COMPONENT_REF)
932 size_tree = DECL_SIZE (op->op0);
933 else if (op->opcode == BIT_FIELD_REF)
934 size_tree = op->op0;
935 else
937 machine_mode mode = TYPE_MODE (type);
938 if (mode == BLKmode)
939 size_tree = TYPE_SIZE (type);
940 else
941 size = GET_MODE_BITSIZE (mode);
943 if (size_tree != NULL_TREE
944 && poly_int_tree_p (size_tree))
945 size = wi::to_poly_offset (size_tree);
947 /* Initially, maxsize is the same as the accessed element size.
948 In the following it will only grow (or become -1). */
949 max_size = size;
951 /* Compute cumulative bit-offset for nested component-refs and array-refs,
952 and find the ultimate containing object. */
953 FOR_EACH_VEC_ELT (ops, i, op)
955 switch (op->opcode)
957 /* These may be in the reference ops, but we cannot do anything
958 sensible with them here. */
959 case ADDR_EXPR:
960 /* Apart from ADDR_EXPR arguments to MEM_REF. */
961 if (base != NULL_TREE
962 && TREE_CODE (base) == MEM_REF
963 && op->op0
964 && DECL_P (TREE_OPERAND (op->op0, 0)))
966 vn_reference_op_t pop = &ops[i-1];
967 base = TREE_OPERAND (op->op0, 0);
968 if (known_eq (pop->off, -1))
970 max_size = -1;
971 offset = 0;
973 else
974 offset += pop->off * BITS_PER_UNIT;
975 op0_p = NULL;
976 break;
978 /* Fallthru. */
979 case CALL_EXPR:
980 return false;
982 /* Record the base objects. */
983 case MEM_REF:
984 base_alias_set = get_deref_alias_set (op->op0);
985 *op0_p = build2 (MEM_REF, op->type,
986 NULL_TREE, op->op0);
987 MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
988 MR_DEPENDENCE_BASE (*op0_p) = op->base;
989 op0_p = &TREE_OPERAND (*op0_p, 0);
990 break;
992 case VAR_DECL:
993 case PARM_DECL:
994 case RESULT_DECL:
995 case SSA_NAME:
996 *op0_p = op->op0;
997 op0_p = NULL;
998 break;
1000 /* And now the usual component-reference style ops. */
1001 case BIT_FIELD_REF:
1002 offset += wi::to_poly_offset (op->op1);
1003 break;
1005 case COMPONENT_REF:
1007 tree field = op->op0;
1008 /* We do not have a complete COMPONENT_REF tree here so we
1009 cannot use component_ref_field_offset. Do the interesting
1010 parts manually. */
1011 tree this_offset = DECL_FIELD_OFFSET (field);
1013 if (op->op1 || !poly_int_tree_p (this_offset))
1014 max_size = -1;
1015 else
1017 poly_offset_int woffset = (wi::to_poly_offset (this_offset)
1018 << LOG2_BITS_PER_UNIT);
1019 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1020 offset += woffset;
1022 break;
1025 case ARRAY_RANGE_REF:
1026 case ARRAY_REF:
1027 /* We recorded the lower bound and the element size. */
1028 if (!poly_int_tree_p (op->op0)
1029 || !poly_int_tree_p (op->op1)
1030 || TREE_CODE (op->op2) != INTEGER_CST)
1031 max_size = -1;
1032 else
1034 poly_offset_int woffset
1035 = wi::sext (wi::to_poly_offset (op->op0)
1036 - wi::to_poly_offset (op->op1),
1037 TYPE_PRECISION (TREE_TYPE (op->op0)));
1038 woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
1039 woffset <<= LOG2_BITS_PER_UNIT;
1040 offset += woffset;
1042 break;
1044 case REALPART_EXPR:
1045 break;
1047 case IMAGPART_EXPR:
1048 offset += size;
1049 break;
1051 case VIEW_CONVERT_EXPR:
1052 break;
1054 case STRING_CST:
1055 case INTEGER_CST:
1056 case COMPLEX_CST:
1057 case VECTOR_CST:
1058 case REAL_CST:
1059 case CONSTRUCTOR:
1060 case CONST_DECL:
1061 return false;
1063 default:
1064 return false;
1068 if (base == NULL_TREE)
1069 return false;
1071 ref->ref = NULL_TREE;
1072 ref->base = base;
1073 ref->ref_alias_set = set;
1074 if (base_alias_set != -1)
1075 ref->base_alias_set = base_alias_set;
1076 else
1077 ref->base_alias_set = get_alias_set (base);
1078 /* We discount volatiles from value-numbering elsewhere. */
1079 ref->volatile_p = false;
1081 if (!size.to_shwi (&ref->size) || maybe_lt (ref->size, 0))
1083 ref->offset = 0;
1084 ref->size = -1;
1085 ref->max_size = -1;
1086 return true;
1089 if (!offset.to_shwi (&ref->offset))
1091 ref->offset = 0;
1092 ref->max_size = -1;
1093 return true;
1096 if (!max_size.to_shwi (&ref->max_size) || maybe_lt (ref->max_size, 0))
1097 ref->max_size = -1;
1099 return true;
1102 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1103 vn_reference_op_s's. */
1105 static void
1106 copy_reference_ops_from_call (gcall *call,
1107 vec<vn_reference_op_s> *result)
1109 vn_reference_op_s temp;
1110 unsigned i;
1111 tree lhs = gimple_call_lhs (call);
1112 int lr;
1114 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1115 different. By adding the lhs here in the vector, we ensure that the
1116 hashcode is different, guaranteeing a different value number. */
1117 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1119 memset (&temp, 0, sizeof (temp));
1120 temp.opcode = MODIFY_EXPR;
1121 temp.type = TREE_TYPE (lhs);
1122 temp.op0 = lhs;
1123 temp.off = -1;
1124 result->safe_push (temp);
1127 /* Copy the type, opcode, function, static chain and EH region, if any. */
1128 memset (&temp, 0, sizeof (temp));
1129 temp.type = gimple_call_return_type (call);
1130 temp.opcode = CALL_EXPR;
1131 temp.op0 = gimple_call_fn (call);
1132 temp.op1 = gimple_call_chain (call);
1133 if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1134 temp.op2 = size_int (lr);
1135 temp.off = -1;
1136 result->safe_push (temp);
1138 /* Copy the call arguments. As they can be references as well,
1139 just chain them together. */
1140 for (i = 0; i < gimple_call_num_args (call); ++i)
1142 tree callarg = gimple_call_arg (call, i);
1143 copy_reference_ops_from_ref (callarg, result);
1147 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1148 *I_P to point to the last element of the replacement. */
1149 static bool
1150 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1151 unsigned int *i_p)
1153 unsigned int i = *i_p;
1154 vn_reference_op_t op = &(*ops)[i];
1155 vn_reference_op_t mem_op = &(*ops)[i - 1];
1156 tree addr_base;
1157 poly_int64 addr_offset = 0;
1159 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1160 from .foo.bar to the preceding MEM_REF offset and replace the
1161 address with &OBJ. */
1162 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1163 &addr_offset);
1164 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1165 if (addr_base != TREE_OPERAND (op->op0, 0))
1167 poly_offset_int off
1168 = (poly_offset_int::from (wi::to_poly_wide (mem_op->op0),
1169 SIGNED)
1170 + addr_offset);
1171 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1172 op->op0 = build_fold_addr_expr (addr_base);
1173 if (tree_fits_shwi_p (mem_op->op0))
1174 mem_op->off = tree_to_shwi (mem_op->op0);
1175 else
1176 mem_op->off = -1;
1177 return true;
1179 return false;
1182 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1183 *I_P to point to the last element of the replacement. */
1184 static bool
1185 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1186 unsigned int *i_p)
1188 unsigned int i = *i_p;
1189 vn_reference_op_t op = &(*ops)[i];
1190 vn_reference_op_t mem_op = &(*ops)[i - 1];
1191 gimple *def_stmt;
1192 enum tree_code code;
1193 poly_offset_int off;
1195 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1196 if (!is_gimple_assign (def_stmt))
1197 return false;
1199 code = gimple_assign_rhs_code (def_stmt);
1200 if (code != ADDR_EXPR
1201 && code != POINTER_PLUS_EXPR)
1202 return false;
1204 off = poly_offset_int::from (wi::to_poly_wide (mem_op->op0), SIGNED);
1206 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1207 from .foo.bar to the preceding MEM_REF offset and replace the
1208 address with &OBJ. */
1209 if (code == ADDR_EXPR)
1211 tree addr, addr_base;
1212 poly_int64 addr_offset;
1214 addr = gimple_assign_rhs1 (def_stmt);
1215 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1216 &addr_offset);
1217 /* If that didn't work because the address isn't invariant propagate
1218 the reference tree from the address operation in case the current
1219 dereference isn't offsetted. */
1220 if (!addr_base
1221 && *i_p == ops->length () - 1
1222 && known_eq (off, 0)
1223 /* This makes us disable this transform for PRE where the
1224 reference ops might be also used for code insertion which
1225 is invalid. */
1226 && default_vn_walk_kind == VN_WALKREWRITE)
1228 auto_vec<vn_reference_op_s, 32> tem;
1229 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1230 /* Make sure to preserve TBAA info. The only objects not
1231 wrapped in MEM_REFs that can have their address taken are
1232 STRING_CSTs. */
1233 if (tem.length () >= 2
1234 && tem[tem.length () - 2].opcode == MEM_REF)
1236 vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1237 new_mem_op->op0
1238 = wide_int_to_tree (TREE_TYPE (mem_op->op0),
1239 wi::to_poly_wide (new_mem_op->op0));
1241 else
1242 gcc_assert (tem.last ().opcode == STRING_CST);
1243 ops->pop ();
1244 ops->pop ();
1245 ops->safe_splice (tem);
1246 --*i_p;
1247 return true;
1249 if (!addr_base
1250 || TREE_CODE (addr_base) != MEM_REF
1251 || (TREE_CODE (TREE_OPERAND (addr_base, 0)) == SSA_NAME
1252 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (addr_base, 0))))
1253 return false;
1255 off += addr_offset;
1256 off += mem_ref_offset (addr_base);
1257 op->op0 = TREE_OPERAND (addr_base, 0);
1259 else
1261 tree ptr, ptroff;
1262 ptr = gimple_assign_rhs1 (def_stmt);
1263 ptroff = gimple_assign_rhs2 (def_stmt);
1264 if (TREE_CODE (ptr) != SSA_NAME
1265 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1266 || !poly_int_tree_p (ptroff))
1267 return false;
1269 off += wi::to_poly_offset (ptroff);
1270 op->op0 = ptr;
1273 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1274 if (tree_fits_shwi_p (mem_op->op0))
1275 mem_op->off = tree_to_shwi (mem_op->op0);
1276 else
1277 mem_op->off = -1;
1278 if (TREE_CODE (op->op0) == SSA_NAME)
1279 op->op0 = SSA_VAL (op->op0);
1280 if (TREE_CODE (op->op0) != SSA_NAME)
1281 op->opcode = TREE_CODE (op->op0);
1283 /* And recurse. */
1284 if (TREE_CODE (op->op0) == SSA_NAME)
1285 vn_reference_maybe_forwprop_address (ops, i_p);
1286 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1287 vn_reference_fold_indirect (ops, i_p);
1288 return true;
1291 /* Optimize the reference REF to a constant if possible or return
1292 NULL_TREE if not. */
1294 tree
1295 fully_constant_vn_reference_p (vn_reference_t ref)
1297 vec<vn_reference_op_s> operands = ref->operands;
1298 vn_reference_op_t op;
1300 /* Try to simplify the translated expression if it is
1301 a call to a builtin function with at most two arguments. */
1302 op = &operands[0];
1303 if (op->opcode == CALL_EXPR
1304 && TREE_CODE (op->op0) == ADDR_EXPR
1305 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1306 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1307 && operands.length () >= 2
1308 && operands.length () <= 3)
1310 vn_reference_op_t arg0, arg1 = NULL;
1311 bool anyconst = false;
1312 arg0 = &operands[1];
1313 if (operands.length () > 2)
1314 arg1 = &operands[2];
1315 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1316 || (arg0->opcode == ADDR_EXPR
1317 && is_gimple_min_invariant (arg0->op0)))
1318 anyconst = true;
1319 if (arg1
1320 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1321 || (arg1->opcode == ADDR_EXPR
1322 && is_gimple_min_invariant (arg1->op0))))
1323 anyconst = true;
1324 if (anyconst)
1326 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1327 arg1 ? 2 : 1,
1328 arg0->op0,
1329 arg1 ? arg1->op0 : NULL);
1330 if (folded
1331 && TREE_CODE (folded) == NOP_EXPR)
1332 folded = TREE_OPERAND (folded, 0);
1333 if (folded
1334 && is_gimple_min_invariant (folded))
1335 return folded;
1339 /* Simplify reads from constants or constant initializers. */
1340 else if (BITS_PER_UNIT == 8
1341 && is_gimple_reg_type (ref->type)
1342 && (!INTEGRAL_TYPE_P (ref->type)
1343 || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
1345 poly_int64 off = 0;
1346 HOST_WIDE_INT size;
1347 if (INTEGRAL_TYPE_P (ref->type))
1348 size = TYPE_PRECISION (ref->type);
1349 else
1350 size = tree_to_shwi (TYPE_SIZE (ref->type));
1351 if (size % BITS_PER_UNIT != 0
1352 || size > MAX_BITSIZE_MODE_ANY_MODE)
1353 return NULL_TREE;
1354 size /= BITS_PER_UNIT;
1355 unsigned i;
1356 for (i = 0; i < operands.length (); ++i)
1358 if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1360 ++i;
1361 break;
1363 if (known_eq (operands[i].off, -1))
1364 return NULL_TREE;
1365 off += operands[i].off;
1366 if (operands[i].opcode == MEM_REF)
1368 ++i;
1369 break;
1372 vn_reference_op_t base = &operands[--i];
1373 tree ctor = error_mark_node;
1374 tree decl = NULL_TREE;
1375 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1376 ctor = base->op0;
1377 else if (base->opcode == MEM_REF
1378 && base[1].opcode == ADDR_EXPR
1379 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1380 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL
1381 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == STRING_CST))
1383 decl = TREE_OPERAND (base[1].op0, 0);
1384 if (TREE_CODE (decl) == STRING_CST)
1385 ctor = decl;
1386 else
1387 ctor = ctor_for_folding (decl);
1389 if (ctor == NULL_TREE)
1390 return build_zero_cst (ref->type);
1391 else if (ctor != error_mark_node)
1393 HOST_WIDE_INT const_off;
1394 if (decl)
1396 tree res = fold_ctor_reference (ref->type, ctor,
1397 off * BITS_PER_UNIT,
1398 size * BITS_PER_UNIT, decl);
1399 if (res)
1401 STRIP_USELESS_TYPE_CONVERSION (res);
1402 if (is_gimple_min_invariant (res))
1403 return res;
1406 else if (off.is_constant (&const_off))
1408 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1409 int len = native_encode_expr (ctor, buf, size, const_off);
1410 if (len > 0)
1411 return native_interpret_expr (ref->type, buf, len);
1416 return NULL_TREE;
1419 /* Return true if OPS contain a storage order barrier. */
1421 static bool
1422 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1424 vn_reference_op_t op;
1425 unsigned i;
1427 FOR_EACH_VEC_ELT (ops, i, op)
1428 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1429 return true;
1431 return false;
1434 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1435 structures into their value numbers. This is done in-place, and
1436 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1437 whether any operands were valueized. */
1439 static vec<vn_reference_op_s>
1440 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1442 vn_reference_op_t vro;
1443 unsigned int i;
1445 *valueized_anything = false;
1447 FOR_EACH_VEC_ELT (orig, i, vro)
1449 if (vro->opcode == SSA_NAME
1450 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1452 tree tem = SSA_VAL (vro->op0);
1453 if (tem != vro->op0)
1455 *valueized_anything = true;
1456 vro->op0 = tem;
1458 /* If it transforms from an SSA_NAME to a constant, update
1459 the opcode. */
1460 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1461 vro->opcode = TREE_CODE (vro->op0);
1463 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1465 tree tem = SSA_VAL (vro->op1);
1466 if (tem != vro->op1)
1468 *valueized_anything = true;
1469 vro->op1 = tem;
1472 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1474 tree tem = SSA_VAL (vro->op2);
1475 if (tem != vro->op2)
1477 *valueized_anything = true;
1478 vro->op2 = tem;
1481 /* If it transforms from an SSA_NAME to an address, fold with
1482 a preceding indirect reference. */
1483 if (i > 0
1484 && vro->op0
1485 && TREE_CODE (vro->op0) == ADDR_EXPR
1486 && orig[i - 1].opcode == MEM_REF)
1488 if (vn_reference_fold_indirect (&orig, &i))
1489 *valueized_anything = true;
1491 else if (i > 0
1492 && vro->opcode == SSA_NAME
1493 && orig[i - 1].opcode == MEM_REF)
1495 if (vn_reference_maybe_forwprop_address (&orig, &i))
1496 *valueized_anything = true;
1498 /* If it transforms a non-constant ARRAY_REF into a constant
1499 one, adjust the constant offset. */
1500 else if (vro->opcode == ARRAY_REF
1501 && known_eq (vro->off, -1)
1502 && poly_int_tree_p (vro->op0)
1503 && poly_int_tree_p (vro->op1)
1504 && TREE_CODE (vro->op2) == INTEGER_CST)
1506 poly_offset_int off = ((wi::to_poly_offset (vro->op0)
1507 - wi::to_poly_offset (vro->op1))
1508 * wi::to_offset (vro->op2)
1509 * vn_ref_op_align_unit (vro));
1510 off.to_shwi (&vro->off);
1514 return orig;
1517 static vec<vn_reference_op_s>
1518 valueize_refs (vec<vn_reference_op_s> orig)
1520 bool tem;
1521 return valueize_refs_1 (orig, &tem);
1524 static vec<vn_reference_op_s> shared_lookup_references;
1526 /* Create a vector of vn_reference_op_s structures from REF, a
1527 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1528 this function. *VALUEIZED_ANYTHING will specify whether any
1529 operands were valueized. */
1531 static vec<vn_reference_op_s>
1532 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1534 if (!ref)
1535 return vNULL;
1536 shared_lookup_references.truncate (0);
1537 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1538 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1539 valueized_anything);
1540 return shared_lookup_references;
1543 /* Create a vector of vn_reference_op_s structures from CALL, a
1544 call statement. The vector is shared among all callers of
1545 this function. */
1547 static vec<vn_reference_op_s>
1548 valueize_shared_reference_ops_from_call (gcall *call)
1550 if (!call)
1551 return vNULL;
1552 shared_lookup_references.truncate (0);
1553 copy_reference_ops_from_call (call, &shared_lookup_references);
1554 shared_lookup_references = valueize_refs (shared_lookup_references);
1555 return shared_lookup_references;
1558 /* Lookup a SCCVN reference operation VR in the current hash table.
1559 Returns the resulting value number if it exists in the hash table,
1560 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1561 vn_reference_t stored in the hashtable if something is found. */
1563 static tree
1564 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1566 vn_reference_s **slot;
1567 hashval_t hash;
1569 hash = vr->hashcode;
1570 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1571 if (!slot && current_info == optimistic_info)
1572 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1573 if (slot)
1575 if (vnresult)
1576 *vnresult = (vn_reference_t)*slot;
1577 return ((vn_reference_t)*slot)->result;
1580 return NULL_TREE;
1583 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1584 with the current VUSE and performs the expression lookup. */
1586 static void *
1587 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1588 unsigned int cnt, void *vr_)
1590 vn_reference_t vr = (vn_reference_t)vr_;
1591 vn_reference_s **slot;
1592 hashval_t hash;
1594 /* This bounds the stmt walks we perform on reference lookups
1595 to O(1) instead of O(N) where N is the number of dominating
1596 stores. */
1597 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1598 return (void *)-1;
1600 if (last_vuse_ptr)
1601 *last_vuse_ptr = vuse;
1603 /* Fixup vuse and hash. */
1604 if (vr->vuse)
1605 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1606 vr->vuse = vuse_ssa_val (vuse);
1607 if (vr->vuse)
1608 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1610 hash = vr->hashcode;
1611 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1612 if (!slot && current_info == optimistic_info)
1613 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1614 if (slot)
1615 return *slot;
1617 return NULL;
1620 /* Lookup an existing or insert a new vn_reference entry into the
1621 value table for the VUSE, SET, TYPE, OPERANDS reference which
1622 has the value VALUE which is either a constant or an SSA name. */
1624 static vn_reference_t
1625 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1626 alias_set_type set,
1627 tree type,
1628 vec<vn_reference_op_s,
1629 va_heap> operands,
1630 tree value)
1632 vn_reference_s vr1;
1633 vn_reference_t result;
1634 unsigned value_id;
1635 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1636 vr1.operands = operands;
1637 vr1.type = type;
1638 vr1.set = set;
1639 vr1.hashcode = vn_reference_compute_hash (&vr1);
1640 if (vn_reference_lookup_1 (&vr1, &result))
1641 return result;
1642 if (TREE_CODE (value) == SSA_NAME)
1643 value_id = VN_INFO (value)->value_id;
1644 else
1645 value_id = get_or_alloc_constant_value_id (value);
1646 return vn_reference_insert_pieces (vuse, set, type,
1647 operands.copy (), value, value_id);
1650 static vn_nary_op_t vn_nary_op_insert_stmt (gimple *stmt, tree result);
1652 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
1654 static tree
1655 vn_lookup_simplify_result (gimple_match_op *res_op)
1657 if (!res_op->code.is_tree_code ())
1658 return NULL_TREE;
1659 tree *ops = res_op->ops;
1660 unsigned int length = res_op->num_ops;
1661 if (res_op->code == CONSTRUCTOR
1662 /* ??? We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
1663 and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree. */
1664 && TREE_CODE (res_op->ops[0]) == CONSTRUCTOR)
1666 length = CONSTRUCTOR_NELTS (res_op->ops[0]);
1667 ops = XALLOCAVEC (tree, length);
1668 for (unsigned i = 0; i < length; ++i)
1669 ops[i] = CONSTRUCTOR_ELT (res_op->ops[0], i)->value;
1671 vn_nary_op_t vnresult = NULL;
1672 return vn_nary_op_lookup_pieces (length, (tree_code) res_op->code,
1673 res_op->type, ops, &vnresult);
1676 /* Return a value-number for RCODE OPS... either by looking up an existing
1677 value-number for the simplified result or by inserting the operation if
1678 INSERT is true. */
1680 static tree
1681 vn_nary_build_or_lookup_1 (gimple_match_op *res_op, bool insert)
1683 tree result = NULL_TREE;
1684 /* We will be creating a value number for
1685 RCODE (OPS...).
1686 So first simplify and lookup this expression to see if it
1687 is already available. */
1688 mprts_hook = vn_lookup_simplify_result;
1689 bool res = false;
1690 switch (TREE_CODE_LENGTH ((tree_code) res_op->code))
1692 case 1:
1693 res = gimple_resimplify1 (NULL, res_op, vn_valueize);
1694 break;
1695 case 2:
1696 res = gimple_resimplify2 (NULL, res_op, vn_valueize);
1697 break;
1698 case 3:
1699 res = gimple_resimplify3 (NULL, res_op, vn_valueize);
1700 break;
1702 mprts_hook = NULL;
1703 gimple *new_stmt = NULL;
1704 if (res
1705 && gimple_simplified_result_is_gimple_val (res_op))
1706 /* The expression is already available. */
1707 result = res_op->ops[0];
1708 else
1710 tree val = vn_lookup_simplify_result (res_op);
1711 if (!val && insert)
1713 gimple_seq stmts = NULL;
1714 result = maybe_push_res_to_seq (res_op, &stmts);
1715 if (result)
1717 gcc_assert (gimple_seq_singleton_p (stmts));
1718 new_stmt = gimple_seq_first_stmt (stmts);
1721 else
1722 /* The expression is already available. */
1723 result = val;
1725 if (new_stmt)
1727 /* The expression is not yet available, value-number lhs to
1728 the new SSA_NAME we created. */
1729 /* Initialize value-number information properly. */
1730 VN_INFO_GET (result)->valnum = result;
1731 VN_INFO (result)->value_id = get_next_value_id ();
1732 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
1733 new_stmt);
1734 VN_INFO (result)->needs_insertion = true;
1735 /* ??? PRE phi-translation inserts NARYs without corresponding
1736 SSA name result. Re-use those but set their result according
1737 to the stmt we just built. */
1738 vn_nary_op_t nary = NULL;
1739 vn_nary_op_lookup_stmt (new_stmt, &nary);
1740 if (nary)
1742 gcc_assert (nary->result == NULL_TREE);
1743 nary->result = gimple_assign_lhs (new_stmt);
1745 /* As all "inserted" statements are singleton SCCs, insert
1746 to the valid table. This is strictly needed to
1747 avoid re-generating new value SSA_NAMEs for the same
1748 expression during SCC iteration over and over (the
1749 optimistic table gets cleared after each iteration).
1750 We do not need to insert into the optimistic table, as
1751 lookups there will fall back to the valid table. */
1752 else if (current_info == optimistic_info)
1754 current_info = valid_info;
1755 vn_nary_op_insert_stmt (new_stmt, result);
1756 current_info = optimistic_info;
1758 else
1759 vn_nary_op_insert_stmt (new_stmt, result);
1760 if (dump_file && (dump_flags & TDF_DETAILS))
1762 fprintf (dump_file, "Inserting name ");
1763 print_generic_expr (dump_file, result);
1764 fprintf (dump_file, " for expression ");
1765 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
1766 fprintf (dump_file, "\n");
1769 return result;
1772 /* Return a value-number for RCODE OPS... either by looking up an existing
1773 value-number for the simplified result or by inserting the operation. */
1775 static tree
1776 vn_nary_build_or_lookup (gimple_match_op *res_op)
1778 return vn_nary_build_or_lookup_1 (res_op, true);
1781 /* Try to simplify the expression RCODE OPS... of type TYPE and return
1782 its value if present. */
1784 tree
1785 vn_nary_simplify (vn_nary_op_t nary)
1787 if (nary->length > gimple_match_op::MAX_NUM_OPS)
1788 return NULL_TREE;
1789 gimple_match_op op (gimple_match_cond::UNCOND, nary->opcode,
1790 nary->type, nary->length);
1791 memcpy (op.ops, nary->op, sizeof (tree) * nary->length);
1792 return vn_nary_build_or_lookup_1 (&op, false);
1796 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1797 from the statement defining VUSE and if not successful tries to
1798 translate *REFP and VR_ through an aggregate copy at the definition
1799 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
1800 of *REF and *VR. If only disambiguation was performed then
1801 *DISAMBIGUATE_ONLY is set to true. */
1803 static void *
1804 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1805 bool *disambiguate_only)
1807 vn_reference_t vr = (vn_reference_t)vr_;
1808 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
1809 tree base = ao_ref_base (ref);
1810 HOST_WIDE_INT offseti, maxsizei;
1811 static vec<vn_reference_op_s> lhs_ops;
1812 ao_ref lhs_ref;
1813 bool lhs_ref_ok = false;
1814 poly_int64 copy_size;
1816 /* If the reference is based on a parameter that was determined as
1817 pointing to readonly memory it doesn't change. */
1818 if (TREE_CODE (base) == MEM_REF
1819 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
1820 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
1821 && bitmap_bit_p (const_parms,
1822 SSA_NAME_VERSION (TREE_OPERAND (base, 0))))
1824 *disambiguate_only = true;
1825 return NULL;
1828 /* First try to disambiguate after value-replacing in the definitions LHS. */
1829 if (is_gimple_assign (def_stmt))
1831 tree lhs = gimple_assign_lhs (def_stmt);
1832 bool valueized_anything = false;
1833 /* Avoid re-allocation overhead. */
1834 lhs_ops.truncate (0);
1835 copy_reference_ops_from_ref (lhs, &lhs_ops);
1836 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1837 if (valueized_anything)
1839 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1840 get_alias_set (lhs),
1841 TREE_TYPE (lhs), lhs_ops);
1842 if (lhs_ref_ok
1843 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1845 *disambiguate_only = true;
1846 return NULL;
1849 else
1851 ao_ref_init (&lhs_ref, lhs);
1852 lhs_ref_ok = true;
1855 /* If we reach a clobbering statement try to skip it and see if
1856 we find a VN result with exactly the same value as the
1857 possible clobber. In this case we can ignore the clobber
1858 and return the found value.
1859 Note that we don't need to worry about partial overlapping
1860 accesses as we then can use TBAA to disambiguate against the
1861 clobbering statement when looking up a load (thus the
1862 VN_WALKREWRITE guard). */
1863 if (vn_walk_kind == VN_WALKREWRITE
1864 && is_gimple_reg_type (TREE_TYPE (lhs))
1865 && types_compatible_p (TREE_TYPE (lhs), vr->type))
1867 tree *saved_last_vuse_ptr = last_vuse_ptr;
1868 /* Do not update last_vuse_ptr in vn_reference_lookup_2. */
1869 last_vuse_ptr = NULL;
1870 tree saved_vuse = vr->vuse;
1871 hashval_t saved_hashcode = vr->hashcode;
1872 void *res = vn_reference_lookup_2 (ref,
1873 gimple_vuse (def_stmt), 0, vr);
1874 /* Need to restore vr->vuse and vr->hashcode. */
1875 vr->vuse = saved_vuse;
1876 vr->hashcode = saved_hashcode;
1877 last_vuse_ptr = saved_last_vuse_ptr;
1878 if (res && res != (void *)-1)
1880 vn_reference_t vnresult = (vn_reference_t) res;
1881 if (vnresult->result
1882 && operand_equal_p (vnresult->result,
1883 gimple_assign_rhs1 (def_stmt), 0))
1884 return res;
1888 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1889 && gimple_call_num_args (def_stmt) <= 4)
1891 /* For builtin calls valueize its arguments and call the
1892 alias oracle again. Valueization may improve points-to
1893 info of pointers and constify size and position arguments.
1894 Originally this was motivated by PR61034 which has
1895 conditional calls to free falsely clobbering ref because
1896 of imprecise points-to info of the argument. */
1897 tree oldargs[4];
1898 bool valueized_anything = false;
1899 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1901 oldargs[i] = gimple_call_arg (def_stmt, i);
1902 tree val = vn_valueize (oldargs[i]);
1903 if (val != oldargs[i])
1905 gimple_call_set_arg (def_stmt, i, val);
1906 valueized_anything = true;
1909 if (valueized_anything)
1911 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1912 ref);
1913 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1914 gimple_call_set_arg (def_stmt, i, oldargs[i]);
1915 if (!res)
1917 *disambiguate_only = true;
1918 return NULL;
1923 if (*disambiguate_only)
1924 return (void *)-1;
1926 /* If we cannot constrain the size of the reference we cannot
1927 test if anything kills it. */
1928 if (!ref->max_size_known_p ())
1929 return (void *)-1;
1931 poly_int64 offset = ref->offset;
1932 poly_int64 maxsize = ref->max_size;
1934 /* We can't deduce anything useful from clobbers. */
1935 if (gimple_clobber_p (def_stmt))
1936 return (void *)-1;
1938 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1939 from that definition.
1940 1) Memset. */
1941 if (is_gimple_reg_type (vr->type)
1942 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1943 && (integer_zerop (gimple_call_arg (def_stmt, 1))
1944 || ((TREE_CODE (gimple_call_arg (def_stmt, 1)) == INTEGER_CST
1945 || (INTEGRAL_TYPE_P (vr->type) && known_eq (ref->size, 8)))
1946 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1947 && offset.is_constant (&offseti)
1948 && offseti % BITS_PER_UNIT == 0))
1949 && poly_int_tree_p (gimple_call_arg (def_stmt, 2))
1950 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
1951 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME))
1953 tree base2;
1954 poly_int64 offset2, size2, maxsize2;
1955 bool reverse;
1956 tree ref2 = gimple_call_arg (def_stmt, 0);
1957 if (TREE_CODE (ref2) == SSA_NAME)
1959 ref2 = SSA_VAL (ref2);
1960 if (TREE_CODE (ref2) == SSA_NAME
1961 && (TREE_CODE (base) != MEM_REF
1962 || TREE_OPERAND (base, 0) != ref2))
1964 gimple *def_stmt = SSA_NAME_DEF_STMT (ref2);
1965 if (gimple_assign_single_p (def_stmt)
1966 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
1967 ref2 = gimple_assign_rhs1 (def_stmt);
1970 if (TREE_CODE (ref2) == ADDR_EXPR)
1972 ref2 = TREE_OPERAND (ref2, 0);
1973 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
1974 &reverse);
1975 if (!known_size_p (maxsize2)
1976 || !known_eq (maxsize2, size2)
1977 || !operand_equal_p (base, base2, OEP_ADDRESS_OF))
1978 return (void *)-1;
1980 else if (TREE_CODE (ref2) == SSA_NAME)
1982 poly_int64 soff;
1983 if (TREE_CODE (base) != MEM_REF
1984 || !(mem_ref_offset (base) << LOG2_BITS_PER_UNIT).to_shwi (&soff))
1985 return (void *)-1;
1986 offset += soff;
1987 offset2 = 0;
1988 if (TREE_OPERAND (base, 0) != ref2)
1990 gimple *def = SSA_NAME_DEF_STMT (ref2);
1991 if (is_gimple_assign (def)
1992 && gimple_assign_rhs_code (def) == POINTER_PLUS_EXPR
1993 && gimple_assign_rhs1 (def) == TREE_OPERAND (base, 0)
1994 && poly_int_tree_p (gimple_assign_rhs2 (def))
1995 && (wi::to_poly_offset (gimple_assign_rhs2 (def))
1996 << LOG2_BITS_PER_UNIT).to_shwi (&offset2))
1998 ref2 = gimple_assign_rhs1 (def);
1999 if (TREE_CODE (ref2) == SSA_NAME)
2000 ref2 = SSA_VAL (ref2);
2002 else
2003 return (void *)-1;
2006 else
2007 return (void *)-1;
2008 tree len = gimple_call_arg (def_stmt, 2);
2009 if (known_subrange_p (offset, maxsize, offset2,
2010 wi::to_poly_offset (len) << LOG2_BITS_PER_UNIT))
2012 tree val;
2013 if (integer_zerop (gimple_call_arg (def_stmt, 1)))
2014 val = build_zero_cst (vr->type);
2015 else if (INTEGRAL_TYPE_P (vr->type)
2016 && known_eq (ref->size, 8))
2018 gimple_match_op res_op (gimple_match_cond::UNCOND, NOP_EXPR,
2019 vr->type, gimple_call_arg (def_stmt, 1));
2020 val = vn_nary_build_or_lookup (&res_op);
2021 if (!val
2022 || (TREE_CODE (val) == SSA_NAME
2023 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2024 return (void *)-1;
2026 else
2028 unsigned len = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (vr->type));
2029 unsigned char *buf = XALLOCAVEC (unsigned char, len);
2030 memset (buf, TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 1)),
2031 len);
2032 val = native_interpret_expr (vr->type, buf, len);
2033 if (!val)
2034 return (void *)-1;
2036 return vn_reference_lookup_or_insert_for_pieces
2037 (vuse, vr->set, vr->type, vr->operands, val);
2041 /* 2) Assignment from an empty CONSTRUCTOR. */
2042 else if (is_gimple_reg_type (vr->type)
2043 && gimple_assign_single_p (def_stmt)
2044 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
2045 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
2047 tree base2;
2048 poly_int64 offset2, size2, maxsize2;
2049 bool reverse;
2050 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2051 &offset2, &size2, &maxsize2, &reverse);
2052 if (known_size_p (maxsize2)
2053 && operand_equal_p (base, base2, 0)
2054 && known_subrange_p (offset, maxsize, offset2, size2))
2056 tree val = build_zero_cst (vr->type);
2057 return vn_reference_lookup_or_insert_for_pieces
2058 (vuse, vr->set, vr->type, vr->operands, val);
2062 /* 3) Assignment from a constant. We can use folds native encode/interpret
2063 routines to extract the assigned bits. */
2064 else if (known_eq (ref->size, maxsize)
2065 && is_gimple_reg_type (vr->type)
2066 && !contains_storage_order_barrier_p (vr->operands)
2067 && gimple_assign_single_p (def_stmt)
2068 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
2069 /* native_encode and native_decode operate on arrays of bytes
2070 and so fundamentally need a compile-time size and offset. */
2071 && maxsize.is_constant (&maxsizei)
2072 && maxsizei % BITS_PER_UNIT == 0
2073 && offset.is_constant (&offseti)
2074 && offseti % BITS_PER_UNIT == 0
2075 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
2076 || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
2077 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
2079 tree base2;
2080 HOST_WIDE_INT offset2, size2;
2081 bool reverse;
2082 base2 = get_ref_base_and_extent_hwi (gimple_assign_lhs (def_stmt),
2083 &offset2, &size2, &reverse);
2084 if (base2
2085 && !reverse
2086 && size2 % BITS_PER_UNIT == 0
2087 && offset2 % BITS_PER_UNIT == 0
2088 && operand_equal_p (base, base2, 0)
2089 && known_subrange_p (offseti, maxsizei, offset2, size2))
2091 /* We support up to 512-bit values (for V8DFmode). */
2092 unsigned char buffer[64];
2093 int len;
2095 tree rhs = gimple_assign_rhs1 (def_stmt);
2096 if (TREE_CODE (rhs) == SSA_NAME)
2097 rhs = SSA_VAL (rhs);
2098 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
2099 buffer, sizeof (buffer),
2100 (offseti - offset2) / BITS_PER_UNIT);
2101 if (len > 0 && len * BITS_PER_UNIT >= maxsizei)
2103 tree type = vr->type;
2104 /* Make sure to interpret in a type that has a range
2105 covering the whole access size. */
2106 if (INTEGRAL_TYPE_P (vr->type)
2107 && maxsizei != TYPE_PRECISION (vr->type))
2108 type = build_nonstandard_integer_type (maxsizei,
2109 TYPE_UNSIGNED (type));
2110 tree val = native_interpret_expr (type, buffer,
2111 maxsizei / BITS_PER_UNIT);
2112 /* If we chop off bits because the types precision doesn't
2113 match the memory access size this is ok when optimizing
2114 reads but not when called from the DSE code during
2115 elimination. */
2116 if (val
2117 && type != vr->type)
2119 if (! int_fits_type_p (val, vr->type))
2120 val = NULL_TREE;
2121 else
2122 val = fold_convert (vr->type, val);
2125 if (val)
2126 return vn_reference_lookup_or_insert_for_pieces
2127 (vuse, vr->set, vr->type, vr->operands, val);
2132 /* 4) Assignment from an SSA name which definition we may be able
2133 to access pieces from. */
2134 else if (known_eq (ref->size, maxsize)
2135 && is_gimple_reg_type (vr->type)
2136 && !contains_storage_order_barrier_p (vr->operands)
2137 && gimple_assign_single_p (def_stmt)
2138 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
2140 tree base2;
2141 poly_int64 offset2, size2, maxsize2;
2142 bool reverse;
2143 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2144 &offset2, &size2, &maxsize2,
2145 &reverse);
2146 if (!reverse
2147 && known_size_p (maxsize2)
2148 && known_eq (maxsize2, size2)
2149 && operand_equal_p (base, base2, 0)
2150 && known_subrange_p (offset, maxsize, offset2, size2)
2151 /* ??? We can't handle bitfield precision extracts without
2152 either using an alternate type for the BIT_FIELD_REF and
2153 then doing a conversion or possibly adjusting the offset
2154 according to endianness. */
2155 && (! INTEGRAL_TYPE_P (vr->type)
2156 || known_eq (ref->size, TYPE_PRECISION (vr->type)))
2157 && multiple_p (ref->size, BITS_PER_UNIT))
2159 gimple_match_op op (gimple_match_cond::UNCOND,
2160 BIT_FIELD_REF, vr->type,
2161 SSA_VAL (gimple_assign_rhs1 (def_stmt)),
2162 bitsize_int (ref->size),
2163 bitsize_int (offset - offset2));
2164 tree val = vn_nary_build_or_lookup (&op);
2165 if (val
2166 && (TREE_CODE (val) != SSA_NAME
2167 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2169 vn_reference_t res = vn_reference_lookup_or_insert_for_pieces
2170 (vuse, vr->set, vr->type, vr->operands, val);
2171 return res;
2176 /* 5) For aggregate copies translate the reference through them if
2177 the copy kills ref. */
2178 else if (vn_walk_kind == VN_WALKREWRITE
2179 && gimple_assign_single_p (def_stmt)
2180 && (DECL_P (gimple_assign_rhs1 (def_stmt))
2181 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
2182 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
2184 tree base2;
2185 int i, j, k;
2186 auto_vec<vn_reference_op_s> rhs;
2187 vn_reference_op_t vro;
2188 ao_ref r;
2190 if (!lhs_ref_ok)
2191 return (void *)-1;
2193 /* See if the assignment kills REF. */
2194 base2 = ao_ref_base (&lhs_ref);
2195 if (!lhs_ref.max_size_known_p ()
2196 || (base != base2
2197 && (TREE_CODE (base) != MEM_REF
2198 || TREE_CODE (base2) != MEM_REF
2199 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
2200 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
2201 TREE_OPERAND (base2, 1))))
2202 || !stmt_kills_ref_p (def_stmt, ref))
2203 return (void *)-1;
2205 /* Find the common base of ref and the lhs. lhs_ops already
2206 contains valueized operands for the lhs. */
2207 i = vr->operands.length () - 1;
2208 j = lhs_ops.length () - 1;
2209 while (j >= 0 && i >= 0
2210 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
2212 i--;
2213 j--;
2216 /* ??? The innermost op should always be a MEM_REF and we already
2217 checked that the assignment to the lhs kills vr. Thus for
2218 aggregate copies using char[] types the vn_reference_op_eq
2219 may fail when comparing types for compatibility. But we really
2220 don't care here - further lookups with the rewritten operands
2221 will simply fail if we messed up types too badly. */
2222 poly_int64 extra_off = 0;
2223 if (j == 0 && i >= 0
2224 && lhs_ops[0].opcode == MEM_REF
2225 && maybe_ne (lhs_ops[0].off, -1))
2227 if (known_eq (lhs_ops[0].off, vr->operands[i].off))
2228 i--, j--;
2229 else if (vr->operands[i].opcode == MEM_REF
2230 && maybe_ne (vr->operands[i].off, -1))
2232 extra_off = vr->operands[i].off - lhs_ops[0].off;
2233 i--, j--;
2237 /* i now points to the first additional op.
2238 ??? LHS may not be completely contained in VR, one or more
2239 VIEW_CONVERT_EXPRs could be in its way. We could at least
2240 try handling outermost VIEW_CONVERT_EXPRs. */
2241 if (j != -1)
2242 return (void *)-1;
2244 /* Punt if the additional ops contain a storage order barrier. */
2245 for (k = i; k >= 0; k--)
2247 vro = &vr->operands[k];
2248 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
2249 return (void *)-1;
2252 /* Now re-write REF to be based on the rhs of the assignment. */
2253 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
2255 /* Apply an extra offset to the inner MEM_REF of the RHS. */
2256 if (maybe_ne (extra_off, 0))
2258 if (rhs.length () < 2)
2259 return (void *)-1;
2260 int ix = rhs.length () - 2;
2261 if (rhs[ix].opcode != MEM_REF
2262 || known_eq (rhs[ix].off, -1))
2263 return (void *)-1;
2264 rhs[ix].off += extra_off;
2265 rhs[ix].op0 = int_const_binop (PLUS_EXPR, rhs[ix].op0,
2266 build_int_cst (TREE_TYPE (rhs[ix].op0),
2267 extra_off));
2270 /* We need to pre-pend vr->operands[0..i] to rhs. */
2271 vec<vn_reference_op_s> old = vr->operands;
2272 if (i + 1 + rhs.length () > vr->operands.length ())
2273 vr->operands.safe_grow (i + 1 + rhs.length ());
2274 else
2275 vr->operands.truncate (i + 1 + rhs.length ());
2276 FOR_EACH_VEC_ELT (rhs, j, vro)
2277 vr->operands[i + 1 + j] = *vro;
2278 vr->operands = valueize_refs (vr->operands);
2279 if (old == shared_lookup_references)
2280 shared_lookup_references = vr->operands;
2281 vr->hashcode = vn_reference_compute_hash (vr);
2283 /* Try folding the new reference to a constant. */
2284 tree val = fully_constant_vn_reference_p (vr);
2285 if (val)
2286 return vn_reference_lookup_or_insert_for_pieces
2287 (vuse, vr->set, vr->type, vr->operands, val);
2289 /* Adjust *ref from the new operands. */
2290 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2291 return (void *)-1;
2292 /* This can happen with bitfields. */
2293 if (maybe_ne (ref->size, r.size))
2294 return (void *)-1;
2295 *ref = r;
2297 /* Do not update last seen VUSE after translating. */
2298 last_vuse_ptr = NULL;
2300 /* Keep looking for the adjusted *REF / VR pair. */
2301 return NULL;
2304 /* 6) For memcpy copies translate the reference through them if
2305 the copy kills ref. */
2306 else if (vn_walk_kind == VN_WALKREWRITE
2307 && is_gimple_reg_type (vr->type)
2308 /* ??? Handle BCOPY as well. */
2309 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2310 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2311 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2312 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2313 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2314 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2315 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2316 && poly_int_tree_p (gimple_call_arg (def_stmt, 2), &copy_size))
2318 tree lhs, rhs;
2319 ao_ref r;
2320 poly_int64 rhs_offset, lhs_offset;
2321 vn_reference_op_s op;
2322 poly_uint64 mem_offset;
2323 poly_int64 at, byte_maxsize;
2325 /* Only handle non-variable, addressable refs. */
2326 if (maybe_ne (ref->size, maxsize)
2327 || !multiple_p (offset, BITS_PER_UNIT, &at)
2328 || !multiple_p (maxsize, BITS_PER_UNIT, &byte_maxsize))
2329 return (void *)-1;
2331 /* Extract a pointer base and an offset for the destination. */
2332 lhs = gimple_call_arg (def_stmt, 0);
2333 lhs_offset = 0;
2334 if (TREE_CODE (lhs) == SSA_NAME)
2336 lhs = SSA_VAL (lhs);
2337 if (TREE_CODE (lhs) == SSA_NAME)
2339 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
2340 if (gimple_assign_single_p (def_stmt)
2341 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2342 lhs = gimple_assign_rhs1 (def_stmt);
2345 if (TREE_CODE (lhs) == ADDR_EXPR)
2347 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2348 &lhs_offset);
2349 if (!tem)
2350 return (void *)-1;
2351 if (TREE_CODE (tem) == MEM_REF
2352 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
2354 lhs = TREE_OPERAND (tem, 0);
2355 if (TREE_CODE (lhs) == SSA_NAME)
2356 lhs = SSA_VAL (lhs);
2357 lhs_offset += mem_offset;
2359 else if (DECL_P (tem))
2360 lhs = build_fold_addr_expr (tem);
2361 else
2362 return (void *)-1;
2364 if (TREE_CODE (lhs) != SSA_NAME
2365 && TREE_CODE (lhs) != ADDR_EXPR)
2366 return (void *)-1;
2368 /* Extract a pointer base and an offset for the source. */
2369 rhs = gimple_call_arg (def_stmt, 1);
2370 rhs_offset = 0;
2371 if (TREE_CODE (rhs) == SSA_NAME)
2372 rhs = SSA_VAL (rhs);
2373 if (TREE_CODE (rhs) == ADDR_EXPR)
2375 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2376 &rhs_offset);
2377 if (!tem)
2378 return (void *)-1;
2379 if (TREE_CODE (tem) == MEM_REF
2380 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
2382 rhs = TREE_OPERAND (tem, 0);
2383 rhs_offset += mem_offset;
2385 else if (DECL_P (tem)
2386 || TREE_CODE (tem) == STRING_CST)
2387 rhs = build_fold_addr_expr (tem);
2388 else
2389 return (void *)-1;
2391 if (TREE_CODE (rhs) != SSA_NAME
2392 && TREE_CODE (rhs) != ADDR_EXPR)
2393 return (void *)-1;
2395 /* The bases of the destination and the references have to agree. */
2396 if (TREE_CODE (base) == MEM_REF)
2398 if (TREE_OPERAND (base, 0) != lhs
2399 || !poly_int_tree_p (TREE_OPERAND (base, 1), &mem_offset))
2400 return (void *) -1;
2401 at += mem_offset;
2403 else if (!DECL_P (base)
2404 || TREE_CODE (lhs) != ADDR_EXPR
2405 || TREE_OPERAND (lhs, 0) != base)
2406 return (void *)-1;
2408 /* If the access is completely outside of the memcpy destination
2409 area there is no aliasing. */
2410 if (!ranges_maybe_overlap_p (lhs_offset, copy_size, at, byte_maxsize))
2411 return NULL;
2412 /* And the access has to be contained within the memcpy destination. */
2413 if (!known_subrange_p (at, byte_maxsize, lhs_offset, copy_size))
2414 return (void *)-1;
2416 /* Make room for 2 operands in the new reference. */
2417 if (vr->operands.length () < 2)
2419 vec<vn_reference_op_s> old = vr->operands;
2420 vr->operands.safe_grow_cleared (2);
2421 if (old == shared_lookup_references)
2422 shared_lookup_references = vr->operands;
2424 else
2425 vr->operands.truncate (2);
2427 /* The looked-through reference is a simple MEM_REF. */
2428 memset (&op, 0, sizeof (op));
2429 op.type = vr->type;
2430 op.opcode = MEM_REF;
2431 op.op0 = build_int_cst (ptr_type_node, at - lhs_offset + rhs_offset);
2432 op.off = at - lhs_offset + rhs_offset;
2433 vr->operands[0] = op;
2434 op.type = TREE_TYPE (rhs);
2435 op.opcode = TREE_CODE (rhs);
2436 op.op0 = rhs;
2437 op.off = -1;
2438 vr->operands[1] = op;
2439 vr->hashcode = vn_reference_compute_hash (vr);
2441 /* Try folding the new reference to a constant. */
2442 tree val = fully_constant_vn_reference_p (vr);
2443 if (val)
2444 return vn_reference_lookup_or_insert_for_pieces
2445 (vuse, vr->set, vr->type, vr->operands, val);
2447 /* Adjust *ref from the new operands. */
2448 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2449 return (void *)-1;
2450 /* This can happen with bitfields. */
2451 if (maybe_ne (ref->size, r.size))
2452 return (void *)-1;
2453 *ref = r;
2455 /* Do not update last seen VUSE after translating. */
2456 last_vuse_ptr = NULL;
2458 /* Keep looking for the adjusted *REF / VR pair. */
2459 return NULL;
2462 /* Bail out and stop walking. */
2463 return (void *)-1;
2466 /* Return a reference op vector from OP that can be used for
2467 vn_reference_lookup_pieces. The caller is responsible for releasing
2468 the vector. */
2470 vec<vn_reference_op_s>
2471 vn_reference_operands_for_lookup (tree op)
2473 bool valueized;
2474 return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
2477 /* Lookup a reference operation by it's parts, in the current hash table.
2478 Returns the resulting value number if it exists in the hash table,
2479 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2480 vn_reference_t stored in the hashtable if something is found. */
2482 tree
2483 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2484 vec<vn_reference_op_s> operands,
2485 vn_reference_t *vnresult, vn_lookup_kind kind)
2487 struct vn_reference_s vr1;
2488 vn_reference_t tmp;
2489 tree cst;
2491 if (!vnresult)
2492 vnresult = &tmp;
2493 *vnresult = NULL;
2495 vr1.vuse = vuse_ssa_val (vuse);
2496 shared_lookup_references.truncate (0);
2497 shared_lookup_references.safe_grow (operands.length ());
2498 memcpy (shared_lookup_references.address (),
2499 operands.address (),
2500 sizeof (vn_reference_op_s)
2501 * operands.length ());
2502 vr1.operands = operands = shared_lookup_references
2503 = valueize_refs (shared_lookup_references);
2504 vr1.type = type;
2505 vr1.set = set;
2506 vr1.hashcode = vn_reference_compute_hash (&vr1);
2507 if ((cst = fully_constant_vn_reference_p (&vr1)))
2508 return cst;
2510 vn_reference_lookup_1 (&vr1, vnresult);
2511 if (!*vnresult
2512 && kind != VN_NOWALK
2513 && vr1.vuse)
2515 ao_ref r;
2516 vn_walk_kind = kind;
2517 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2518 *vnresult =
2519 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2520 vn_reference_lookup_2,
2521 vn_reference_lookup_3,
2522 vuse_ssa_val, &vr1);
2523 gcc_checking_assert (vr1.operands == shared_lookup_references);
2526 if (*vnresult)
2527 return (*vnresult)->result;
2529 return NULL_TREE;
2532 /* Lookup OP in the current hash table, and return the resulting value
2533 number if it exists in the hash table. Return NULL_TREE if it does
2534 not exist in the hash table or if the result field of the structure
2535 was NULL.. VNRESULT will be filled in with the vn_reference_t
2536 stored in the hashtable if one exists. When TBAA_P is false assume
2537 we are looking up a store and treat it as having alias-set zero. */
2539 tree
2540 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2541 vn_reference_t *vnresult, bool tbaa_p)
2543 vec<vn_reference_op_s> operands;
2544 struct vn_reference_s vr1;
2545 tree cst;
2546 bool valuezied_anything;
2548 if (vnresult)
2549 *vnresult = NULL;
2551 vr1.vuse = vuse_ssa_val (vuse);
2552 vr1.operands = operands
2553 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2554 vr1.type = TREE_TYPE (op);
2555 vr1.set = tbaa_p ? get_alias_set (op) : 0;
2556 vr1.hashcode = vn_reference_compute_hash (&vr1);
2557 if ((cst = fully_constant_vn_reference_p (&vr1)))
2558 return cst;
2560 if (kind != VN_NOWALK
2561 && vr1.vuse)
2563 vn_reference_t wvnresult;
2564 ao_ref r;
2565 /* Make sure to use a valueized reference if we valueized anything.
2566 Otherwise preserve the full reference for advanced TBAA. */
2567 if (!valuezied_anything
2568 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2569 vr1.operands))
2570 ao_ref_init (&r, op);
2571 if (! tbaa_p)
2572 r.ref_alias_set = r.base_alias_set = 0;
2573 vn_walk_kind = kind;
2574 wvnresult =
2575 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2576 vn_reference_lookup_2,
2577 vn_reference_lookup_3,
2578 vuse_ssa_val, &vr1);
2579 gcc_checking_assert (vr1.operands == shared_lookup_references);
2580 if (wvnresult)
2582 if (vnresult)
2583 *vnresult = wvnresult;
2584 return wvnresult->result;
2587 return NULL_TREE;
2590 return vn_reference_lookup_1 (&vr1, vnresult);
2593 /* Lookup CALL in the current hash table and return the entry in
2594 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2596 void
2597 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2598 vn_reference_t vr)
2600 if (vnresult)
2601 *vnresult = NULL;
2603 tree vuse = gimple_vuse (call);
2605 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2606 vr->operands = valueize_shared_reference_ops_from_call (call);
2607 vr->type = gimple_expr_type (call);
2608 vr->set = 0;
2609 vr->hashcode = vn_reference_compute_hash (vr);
2610 vn_reference_lookup_1 (vr, vnresult);
2613 /* Insert OP into the current hash table with a value number of
2614 RESULT, and return the resulting reference structure we created. */
2616 static vn_reference_t
2617 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2619 vn_reference_s **slot;
2620 vn_reference_t vr1;
2621 bool tem;
2623 vr1 = current_info->references_pool->allocate ();
2624 if (TREE_CODE (result) == SSA_NAME)
2625 vr1->value_id = VN_INFO (result)->value_id;
2626 else
2627 vr1->value_id = get_or_alloc_constant_value_id (result);
2628 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2629 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2630 vr1->type = TREE_TYPE (op);
2631 vr1->set = get_alias_set (op);
2632 vr1->hashcode = vn_reference_compute_hash (vr1);
2633 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2634 vr1->result_vdef = vdef;
2636 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2637 INSERT);
2639 /* Because we lookup stores using vuses, and value number failures
2640 using the vdefs (see visit_reference_op_store for how and why),
2641 it's possible that on failure we may try to insert an already
2642 inserted store. This is not wrong, there is no ssa name for a
2643 store that we could use as a differentiator anyway. Thus, unlike
2644 the other lookup functions, you cannot gcc_assert (!*slot)
2645 here. */
2647 /* But free the old slot in case of a collision. */
2648 if (*slot)
2649 free_reference (*slot);
2651 *slot = vr1;
2652 return vr1;
2655 /* Insert a reference by it's pieces into the current hash table with
2656 a value number of RESULT. Return the resulting reference
2657 structure we created. */
2659 vn_reference_t
2660 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2661 vec<vn_reference_op_s> operands,
2662 tree result, unsigned int value_id)
2665 vn_reference_s **slot;
2666 vn_reference_t vr1;
2668 vr1 = current_info->references_pool->allocate ();
2669 vr1->value_id = value_id;
2670 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2671 vr1->operands = valueize_refs (operands);
2672 vr1->type = type;
2673 vr1->set = set;
2674 vr1->hashcode = vn_reference_compute_hash (vr1);
2675 if (result && TREE_CODE (result) == SSA_NAME)
2676 result = SSA_VAL (result);
2677 vr1->result = result;
2679 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2680 INSERT);
2682 /* At this point we should have all the things inserted that we have
2683 seen before, and we should never try inserting something that
2684 already exists. */
2685 gcc_assert (!*slot);
2686 if (*slot)
2687 free_reference (*slot);
2689 *slot = vr1;
2690 return vr1;
2693 /* Compute and return the hash value for nary operation VBO1. */
2695 static hashval_t
2696 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2698 inchash::hash hstate;
2699 unsigned i;
2701 for (i = 0; i < vno1->length; ++i)
2702 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2703 vno1->op[i] = SSA_VAL (vno1->op[i]);
2705 if (((vno1->length == 2
2706 && commutative_tree_code (vno1->opcode))
2707 || (vno1->length == 3
2708 && commutative_ternary_tree_code (vno1->opcode)))
2709 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2710 std::swap (vno1->op[0], vno1->op[1]);
2711 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
2712 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2714 std::swap (vno1->op[0], vno1->op[1]);
2715 vno1->opcode = swap_tree_comparison (vno1->opcode);
2718 hstate.add_int (vno1->opcode);
2719 for (i = 0; i < vno1->length; ++i)
2720 inchash::add_expr (vno1->op[i], hstate);
2722 return hstate.end ();
2725 /* Compare nary operations VNO1 and VNO2 and return true if they are
2726 equivalent. */
2728 bool
2729 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2731 unsigned i;
2733 if (vno1->hashcode != vno2->hashcode)
2734 return false;
2736 if (vno1->length != vno2->length)
2737 return false;
2739 if (vno1->opcode != vno2->opcode
2740 || !types_compatible_p (vno1->type, vno2->type))
2741 return false;
2743 for (i = 0; i < vno1->length; ++i)
2744 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2745 return false;
2747 /* BIT_INSERT_EXPR has an implict operand as the type precision
2748 of op1. Need to check to make sure they are the same. */
2749 if (vno1->opcode == BIT_INSERT_EXPR
2750 && TREE_CODE (vno1->op[1]) == INTEGER_CST
2751 && TYPE_PRECISION (TREE_TYPE (vno1->op[1]))
2752 != TYPE_PRECISION (TREE_TYPE (vno2->op[1])))
2753 return false;
2755 return true;
2758 /* Initialize VNO from the pieces provided. */
2760 static void
2761 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2762 enum tree_code code, tree type, tree *ops)
2764 vno->opcode = code;
2765 vno->length = length;
2766 vno->type = type;
2767 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2770 /* Initialize VNO from OP. */
2772 static void
2773 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2775 unsigned i;
2777 vno->opcode = TREE_CODE (op);
2778 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2779 vno->type = TREE_TYPE (op);
2780 for (i = 0; i < vno->length; ++i)
2781 vno->op[i] = TREE_OPERAND (op, i);
2784 /* Return the number of operands for a vn_nary ops structure from STMT. */
2786 static unsigned int
2787 vn_nary_length_from_stmt (gimple *stmt)
2789 switch (gimple_assign_rhs_code (stmt))
2791 case REALPART_EXPR:
2792 case IMAGPART_EXPR:
2793 case VIEW_CONVERT_EXPR:
2794 return 1;
2796 case BIT_FIELD_REF:
2797 return 3;
2799 case CONSTRUCTOR:
2800 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2802 default:
2803 return gimple_num_ops (stmt) - 1;
2807 /* Initialize VNO from STMT. */
2809 static void
2810 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
2812 unsigned i;
2814 vno->opcode = gimple_assign_rhs_code (stmt);
2815 vno->type = gimple_expr_type (stmt);
2816 switch (vno->opcode)
2818 case REALPART_EXPR:
2819 case IMAGPART_EXPR:
2820 case VIEW_CONVERT_EXPR:
2821 vno->length = 1;
2822 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2823 break;
2825 case BIT_FIELD_REF:
2826 vno->length = 3;
2827 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2828 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2829 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2830 break;
2832 case CONSTRUCTOR:
2833 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2834 for (i = 0; i < vno->length; ++i)
2835 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2836 break;
2838 default:
2839 gcc_checking_assert (!gimple_assign_single_p (stmt));
2840 vno->length = gimple_num_ops (stmt) - 1;
2841 for (i = 0; i < vno->length; ++i)
2842 vno->op[i] = gimple_op (stmt, i + 1);
2846 /* Compute the hashcode for VNO and look for it in the hash table;
2847 return the resulting value number if it exists in the hash table.
2848 Return NULL_TREE if it does not exist in the hash table or if the
2849 result field of the operation is NULL. VNRESULT will contain the
2850 vn_nary_op_t from the hashtable if it exists. */
2852 static tree
2853 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2855 vn_nary_op_s **slot;
2857 if (vnresult)
2858 *vnresult = NULL;
2860 vno->hashcode = vn_nary_op_compute_hash (vno);
2861 slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2862 NO_INSERT);
2863 if (!slot && current_info == optimistic_info)
2864 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2865 NO_INSERT);
2866 if (!slot)
2867 return NULL_TREE;
2868 if (vnresult)
2869 *vnresult = *slot;
2870 return (*slot)->result;
2873 /* Lookup a n-ary operation by its pieces and return the resulting value
2874 number if it exists in the hash table. Return NULL_TREE if it does
2875 not exist in the hash table or if the result field of the operation
2876 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2877 if it exists. */
2879 tree
2880 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2881 tree type, tree *ops, vn_nary_op_t *vnresult)
2883 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2884 sizeof_vn_nary_op (length));
2885 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2886 return vn_nary_op_lookup_1 (vno1, vnresult);
2889 /* Lookup OP in the current hash table, and return the resulting value
2890 number if it exists in the hash table. Return NULL_TREE if it does
2891 not exist in the hash table or if the result field of the operation
2892 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2893 if it exists. */
2895 tree
2896 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2898 vn_nary_op_t vno1
2899 = XALLOCAVAR (struct vn_nary_op_s,
2900 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2901 init_vn_nary_op_from_op (vno1, op);
2902 return vn_nary_op_lookup_1 (vno1, vnresult);
2905 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2906 value number if it exists in the hash table. Return NULL_TREE if
2907 it does not exist in the hash table. VNRESULT will contain the
2908 vn_nary_op_t from the hashtable if it exists. */
2910 tree
2911 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
2913 vn_nary_op_t vno1
2914 = XALLOCAVAR (struct vn_nary_op_s,
2915 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2916 init_vn_nary_op_from_stmt (vno1, stmt);
2917 return vn_nary_op_lookup_1 (vno1, vnresult);
2920 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2922 static vn_nary_op_t
2923 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2925 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2928 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2929 obstack. */
2931 static vn_nary_op_t
2932 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2934 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2935 &current_info->nary_obstack);
2937 vno1->value_id = value_id;
2938 vno1->length = length;
2939 vno1->result = result;
2941 return vno1;
2944 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2945 VNO->HASHCODE first. */
2947 static vn_nary_op_t
2948 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
2949 bool compute_hash)
2951 vn_nary_op_s **slot;
2953 if (compute_hash)
2954 vno->hashcode = vn_nary_op_compute_hash (vno);
2956 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
2957 /* While we do not want to insert things twice it's awkward to
2958 avoid it in the case where visit_nary_op pattern-matches stuff
2959 and ends up simplifying the replacement to itself. We then
2960 get two inserts, one from visit_nary_op and one from
2961 vn_nary_build_or_lookup.
2962 So allow inserts with the same value number. */
2963 if (*slot && (*slot)->result == vno->result)
2964 return *slot;
2966 gcc_assert (!*slot);
2968 *slot = vno;
2969 return vno;
2972 /* Insert a n-ary operation into the current hash table using it's
2973 pieces. Return the vn_nary_op_t structure we created and put in
2974 the hashtable. */
2976 vn_nary_op_t
2977 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2978 tree type, tree *ops,
2979 tree result, unsigned int value_id)
2981 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2982 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2983 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2986 /* Insert OP into the current hash table with a value number of
2987 RESULT. Return the vn_nary_op_t structure we created and put in
2988 the hashtable. */
2990 vn_nary_op_t
2991 vn_nary_op_insert (tree op, tree result)
2993 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2994 vn_nary_op_t vno1;
2996 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2997 init_vn_nary_op_from_op (vno1, op);
2998 return vn_nary_op_insert_into (vno1, current_info->nary, true);
3001 /* Insert the rhs of STMT into the current hash table with a value number of
3002 RESULT. */
3004 static vn_nary_op_t
3005 vn_nary_op_insert_stmt (gimple *stmt, tree result)
3007 vn_nary_op_t vno1
3008 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
3009 result, VN_INFO (result)->value_id);
3010 init_vn_nary_op_from_stmt (vno1, stmt);
3011 return vn_nary_op_insert_into (vno1, current_info->nary, true);
3014 /* Compute a hashcode for PHI operation VP1 and return it. */
3016 static inline hashval_t
3017 vn_phi_compute_hash (vn_phi_t vp1)
3019 inchash::hash hstate (vp1->phiargs.length () > 2
3020 ? vp1->block->index : vp1->phiargs.length ());
3021 tree phi1op;
3022 tree type;
3023 edge e;
3024 edge_iterator ei;
3026 /* If all PHI arguments are constants we need to distinguish
3027 the PHI node via its type. */
3028 type = vp1->type;
3029 hstate.merge_hash (vn_hash_type (type));
3031 FOR_EACH_EDGE (e, ei, vp1->block->preds)
3033 /* Don't hash backedge values they need to be handled as VN_TOP
3034 for optimistic value-numbering. */
3035 if (e->flags & EDGE_DFS_BACK)
3036 continue;
3038 phi1op = vp1->phiargs[e->dest_idx];
3039 if (phi1op == VN_TOP)
3040 continue;
3041 inchash::add_expr (phi1op, hstate);
3044 return hstate.end ();
3048 /* Return true if COND1 and COND2 represent the same condition, set
3049 *INVERTED_P if one needs to be inverted to make it the same as
3050 the other. */
3052 static bool
3053 cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
3054 gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
3056 enum tree_code code1 = gimple_cond_code (cond1);
3057 enum tree_code code2 = gimple_cond_code (cond2);
3059 *inverted_p = false;
3060 if (code1 == code2)
3062 else if (code1 == swap_tree_comparison (code2))
3063 std::swap (lhs2, rhs2);
3064 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
3065 *inverted_p = true;
3066 else if (code1 == invert_tree_comparison
3067 (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
3069 std::swap (lhs2, rhs2);
3070 *inverted_p = true;
3072 else
3073 return false;
3075 return ((expressions_equal_p (lhs1, lhs2)
3076 && expressions_equal_p (rhs1, rhs2))
3077 || (commutative_tree_code (code1)
3078 && expressions_equal_p (lhs1, rhs2)
3079 && expressions_equal_p (rhs1, lhs2)));
3082 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
3084 static int
3085 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
3087 if (vp1->hashcode != vp2->hashcode)
3088 return false;
3090 if (vp1->block != vp2->block)
3092 if (vp1->phiargs.length () != vp2->phiargs.length ())
3093 return false;
3095 switch (vp1->phiargs.length ())
3097 case 1:
3098 /* Single-arg PHIs are just copies. */
3099 break;
3101 case 2:
3103 /* Rule out backedges into the PHI. */
3104 if (vp1->block->loop_father->header == vp1->block
3105 || vp2->block->loop_father->header == vp2->block)
3106 return false;
3108 /* If the PHI nodes do not have compatible types
3109 they are not the same. */
3110 if (!types_compatible_p (vp1->type, vp2->type))
3111 return false;
3113 basic_block idom1
3114 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3115 basic_block idom2
3116 = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
3117 /* If the immediate dominator end in switch stmts multiple
3118 values may end up in the same PHI arg via intermediate
3119 CFG merges. */
3120 if (EDGE_COUNT (idom1->succs) != 2
3121 || EDGE_COUNT (idom2->succs) != 2)
3122 return false;
3124 /* Verify the controlling stmt is the same. */
3125 gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1));
3126 gcond *last2 = safe_dyn_cast <gcond *> (last_stmt (idom2));
3127 if (! last1 || ! last2)
3128 return false;
3129 bool inverted_p;
3130 if (! cond_stmts_equal_p (last1, vp1->cclhs, vp1->ccrhs,
3131 last2, vp2->cclhs, vp2->ccrhs,
3132 &inverted_p))
3133 return false;
3135 /* Get at true/false controlled edges into the PHI. */
3136 edge te1, te2, fe1, fe2;
3137 if (! extract_true_false_controlled_edges (idom1, vp1->block,
3138 &te1, &fe1)
3139 || ! extract_true_false_controlled_edges (idom2, vp2->block,
3140 &te2, &fe2))
3141 return false;
3143 /* Swap edges if the second condition is the inverted of the
3144 first. */
3145 if (inverted_p)
3146 std::swap (te2, fe2);
3148 /* ??? Handle VN_TOP specially. */
3149 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
3150 vp2->phiargs[te2->dest_idx])
3151 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
3152 vp2->phiargs[fe2->dest_idx]))
3153 return false;
3155 return true;
3158 default:
3159 return false;
3163 /* If the PHI nodes do not have compatible types
3164 they are not the same. */
3165 if (!types_compatible_p (vp1->type, vp2->type))
3166 return false;
3168 /* Any phi in the same block will have it's arguments in the
3169 same edge order, because of how we store phi nodes. */
3170 int i;
3171 tree phi1op;
3172 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
3174 tree phi2op = vp2->phiargs[i];
3175 if (phi1op == VN_TOP || phi2op == VN_TOP)
3176 continue;
3177 if (!expressions_equal_p (phi1op, phi2op))
3178 return false;
3181 return true;
3184 static vec<tree> shared_lookup_phiargs;
3186 /* Lookup PHI in the current hash table, and return the resulting
3187 value number if it exists in the hash table. Return NULL_TREE if
3188 it does not exist in the hash table. */
3190 static tree
3191 vn_phi_lookup (gimple *phi)
3193 vn_phi_s **slot;
3194 struct vn_phi_s vp1;
3195 edge e;
3196 edge_iterator ei;
3198 shared_lookup_phiargs.truncate (0);
3199 shared_lookup_phiargs.safe_grow (gimple_phi_num_args (phi));
3201 /* Canonicalize the SSA_NAME's to their value number. */
3202 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3204 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3205 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3206 shared_lookup_phiargs[e->dest_idx] = def;
3208 vp1.type = TREE_TYPE (gimple_phi_result (phi));
3209 vp1.phiargs = shared_lookup_phiargs;
3210 vp1.block = gimple_bb (phi);
3211 /* Extract values of the controlling condition. */
3212 vp1.cclhs = NULL_TREE;
3213 vp1.ccrhs = NULL_TREE;
3214 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1.block);
3215 if (EDGE_COUNT (idom1->succs) == 2)
3216 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3218 vp1.cclhs = vn_valueize (gimple_cond_lhs (last1));
3219 vp1.ccrhs = vn_valueize (gimple_cond_rhs (last1));
3221 vp1.hashcode = vn_phi_compute_hash (&vp1);
3222 slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3223 NO_INSERT);
3224 if (!slot && current_info == optimistic_info)
3225 slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3226 NO_INSERT);
3227 if (!slot)
3228 return NULL_TREE;
3229 return (*slot)->result;
3232 /* Insert PHI into the current hash table with a value number of
3233 RESULT. */
3235 static vn_phi_t
3236 vn_phi_insert (gimple *phi, tree result)
3238 vn_phi_s **slot;
3239 vn_phi_t vp1 = current_info->phis_pool->allocate ();
3240 vec<tree> args = vNULL;
3241 edge e;
3242 edge_iterator ei;
3244 args.safe_grow (gimple_phi_num_args (phi));
3246 /* Canonicalize the SSA_NAME's to their value number. */
3247 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3249 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3250 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3251 args[e->dest_idx] = def;
3253 vp1->value_id = VN_INFO (result)->value_id;
3254 vp1->type = TREE_TYPE (gimple_phi_result (phi));
3255 vp1->phiargs = args;
3256 vp1->block = gimple_bb (phi);
3257 /* Extract values of the controlling condition. */
3258 vp1->cclhs = NULL_TREE;
3259 vp1->ccrhs = NULL_TREE;
3260 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3261 if (EDGE_COUNT (idom1->succs) == 2)
3262 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3264 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
3265 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
3267 vp1->result = result;
3268 vp1->hashcode = vn_phi_compute_hash (vp1);
3270 slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
3272 /* Because we iterate over phi operations more than once, it's
3273 possible the slot might already exist here, hence no assert.*/
3274 *slot = vp1;
3275 return vp1;
3279 /* Print set of components in strongly connected component SCC to OUT. */
3281 static void
3282 print_scc (FILE *out, vec<tree> scc)
3284 tree var;
3285 unsigned int i;
3287 fprintf (out, "SCC consists of %u:", scc.length ());
3288 FOR_EACH_VEC_ELT (scc, i, var)
3290 fprintf (out, " ");
3291 print_generic_expr (out, var);
3293 fprintf (out, "\n");
3296 /* Return true if BB1 is dominated by BB2 taking into account edges
3297 that are not executable. */
3299 static bool
3300 dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
3302 edge_iterator ei;
3303 edge e;
3305 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3306 return true;
3308 /* Before iterating we'd like to know if there exists a
3309 (executable) path from bb2 to bb1 at all, if not we can
3310 directly return false. For now simply iterate once. */
3312 /* Iterate to the single executable bb1 predecessor. */
3313 if (EDGE_COUNT (bb1->preds) > 1)
3315 edge prede = NULL;
3316 FOR_EACH_EDGE (e, ei, bb1->preds)
3317 if (e->flags & EDGE_EXECUTABLE)
3319 if (prede)
3321 prede = NULL;
3322 break;
3324 prede = e;
3326 if (prede)
3328 bb1 = prede->src;
3330 /* Re-do the dominance check with changed bb1. */
3331 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3332 return true;
3336 /* Iterate to the single executable bb2 successor. */
3337 edge succe = NULL;
3338 FOR_EACH_EDGE (e, ei, bb2->succs)
3339 if (e->flags & EDGE_EXECUTABLE)
3341 if (succe)
3343 succe = NULL;
3344 break;
3346 succe = e;
3348 if (succe)
3350 /* Verify the reached block is only reached through succe.
3351 If there is only one edge we can spare us the dominator
3352 check and iterate directly. */
3353 if (EDGE_COUNT (succe->dest->preds) > 1)
3355 FOR_EACH_EDGE (e, ei, succe->dest->preds)
3356 if (e != succe
3357 && (e->flags & EDGE_EXECUTABLE))
3359 succe = NULL;
3360 break;
3363 if (succe)
3365 bb2 = succe->dest;
3367 /* Re-do the dominance check with changed bb2. */
3368 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3369 return true;
3373 /* We could now iterate updating bb1 / bb2. */
3374 return false;
3377 /* Set the value number of FROM to TO, return true if it has changed
3378 as a result. */
3380 static inline bool
3381 set_ssa_val_to (tree from, tree to)
3383 tree currval = SSA_VAL (from);
3384 poly_int64 toff, coff;
3386 /* The only thing we allow as value numbers are ssa_names
3387 and invariants. So assert that here. We don't allow VN_TOP
3388 as visiting a stmt should produce a value-number other than
3389 that.
3390 ??? Still VN_TOP can happen for unreachable code, so force
3391 it to varying in that case. Not all code is prepared to
3392 get VN_TOP on valueization. */
3393 if (to == VN_TOP)
3395 if (dump_file && (dump_flags & TDF_DETAILS))
3396 fprintf (dump_file, "Forcing value number to varying on "
3397 "receiving VN_TOP\n");
3398 to = from;
3401 gcc_assert (to != NULL_TREE
3402 && ((TREE_CODE (to) == SSA_NAME
3403 && (to == from || SSA_VAL (to) == to))
3404 || is_gimple_min_invariant (to)));
3406 if (from != to)
3408 if (currval == from)
3410 if (dump_file && (dump_flags & TDF_DETAILS))
3412 fprintf (dump_file, "Not changing value number of ");
3413 print_generic_expr (dump_file, from);
3414 fprintf (dump_file, " from VARYING to ");
3415 print_generic_expr (dump_file, to);
3416 fprintf (dump_file, "\n");
3418 return false;
3420 else if (currval != VN_TOP
3421 && ! is_gimple_min_invariant (currval)
3422 && is_gimple_min_invariant (to))
3424 if (dump_file && (dump_flags & TDF_DETAILS))
3426 fprintf (dump_file, "Forcing VARYING instead of changing "
3427 "value number of ");
3428 print_generic_expr (dump_file, from);
3429 fprintf (dump_file, " from ");
3430 print_generic_expr (dump_file, currval);
3431 fprintf (dump_file, " (non-constant) to ");
3432 print_generic_expr (dump_file, to);
3433 fprintf (dump_file, " (constant)\n");
3435 to = from;
3437 else if (TREE_CODE (to) == SSA_NAME
3438 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
3439 to = from;
3442 if (dump_file && (dump_flags & TDF_DETAILS))
3444 fprintf (dump_file, "Setting value number of ");
3445 print_generic_expr (dump_file, from);
3446 fprintf (dump_file, " to ");
3447 print_generic_expr (dump_file, to);
3450 if (currval != to
3451 && !operand_equal_p (currval, to, 0)
3452 /* Different undefined SSA names are not actually different. See
3453 PR82320 for a testcase were we'd otherwise not terminate iteration. */
3454 && !(TREE_CODE (currval) == SSA_NAME
3455 && TREE_CODE (to) == SSA_NAME
3456 && ssa_undefined_value_p (currval, false)
3457 && ssa_undefined_value_p (to, false))
3458 /* ??? For addresses involving volatile objects or types operand_equal_p
3459 does not reliably detect ADDR_EXPRs as equal. We know we are only
3460 getting invariant gimple addresses here, so can use
3461 get_addr_base_and_unit_offset to do this comparison. */
3462 && !(TREE_CODE (currval) == ADDR_EXPR
3463 && TREE_CODE (to) == ADDR_EXPR
3464 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
3465 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
3466 && known_eq (coff, toff)))
3468 if (dump_file && (dump_flags & TDF_DETAILS))
3469 fprintf (dump_file, " (changed)\n");
3471 /* If we equate two SSA names we have to make the side-band info
3472 of the leader conservative (and remember whatever original value
3473 was present). */
3474 if (TREE_CODE (to) == SSA_NAME)
3476 if (INTEGRAL_TYPE_P (TREE_TYPE (to))
3477 && SSA_NAME_RANGE_INFO (to))
3479 if (SSA_NAME_IS_DEFAULT_DEF (to)
3480 || dominated_by_p_w_unex
3481 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3482 gimple_bb (SSA_NAME_DEF_STMT (to))))
3483 /* Keep the info from the dominator. */
3485 else
3487 /* Save old info. */
3488 if (! VN_INFO (to)->info.range_info)
3490 VN_INFO (to)->info.range_info = SSA_NAME_RANGE_INFO (to);
3491 VN_INFO (to)->range_info_anti_range_p
3492 = SSA_NAME_ANTI_RANGE_P (to);
3494 /* Rather than allocating memory and unioning the info
3495 just clear it. */
3496 if (dump_file && (dump_flags & TDF_DETAILS))
3498 fprintf (dump_file, "clearing range info of ");
3499 print_generic_expr (dump_file, to);
3500 fprintf (dump_file, "\n");
3502 SSA_NAME_RANGE_INFO (to) = NULL;
3505 else if (POINTER_TYPE_P (TREE_TYPE (to))
3506 && SSA_NAME_PTR_INFO (to))
3508 if (SSA_NAME_IS_DEFAULT_DEF (to)
3509 || dominated_by_p_w_unex
3510 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3511 gimple_bb (SSA_NAME_DEF_STMT (to))))
3512 /* Keep the info from the dominator. */
3514 else if (! SSA_NAME_PTR_INFO (from)
3515 /* Handle the case of trivially equivalent info. */
3516 || memcmp (SSA_NAME_PTR_INFO (to),
3517 SSA_NAME_PTR_INFO (from),
3518 sizeof (ptr_info_def)) != 0)
3520 /* Save old info. */
3521 if (! VN_INFO (to)->info.ptr_info)
3522 VN_INFO (to)->info.ptr_info = SSA_NAME_PTR_INFO (to);
3523 /* Rather than allocating memory and unioning the info
3524 just clear it. */
3525 if (dump_file && (dump_flags & TDF_DETAILS))
3527 fprintf (dump_file, "clearing points-to info of ");
3528 print_generic_expr (dump_file, to);
3529 fprintf (dump_file, "\n");
3531 SSA_NAME_PTR_INFO (to) = NULL;
3536 VN_INFO (from)->valnum = to;
3537 return true;
3539 if (dump_file && (dump_flags & TDF_DETAILS))
3540 fprintf (dump_file, "\n");
3541 return false;
3544 /* Mark as processed all the definitions in the defining stmt of USE, or
3545 the USE itself. */
3547 static void
3548 mark_use_processed (tree use)
3550 ssa_op_iter iter;
3551 def_operand_p defp;
3552 gimple *stmt = SSA_NAME_DEF_STMT (use);
3554 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
3556 VN_INFO (use)->use_processed = true;
3557 return;
3560 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3562 tree def = DEF_FROM_PTR (defp);
3564 VN_INFO (def)->use_processed = true;
3568 /* Set all definitions in STMT to value number to themselves.
3569 Return true if a value number changed. */
3571 static bool
3572 defs_to_varying (gimple *stmt)
3574 bool changed = false;
3575 ssa_op_iter iter;
3576 def_operand_p defp;
3578 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3580 tree def = DEF_FROM_PTR (defp);
3581 changed |= set_ssa_val_to (def, def);
3583 return changed;
3586 /* Visit a copy between LHS and RHS, return true if the value number
3587 changed. */
3589 static bool
3590 visit_copy (tree lhs, tree rhs)
3592 /* Valueize. */
3593 rhs = SSA_VAL (rhs);
3595 return set_ssa_val_to (lhs, rhs);
3598 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
3599 is the same. */
3601 static tree
3602 valueized_wider_op (tree wide_type, tree op)
3604 if (TREE_CODE (op) == SSA_NAME)
3605 op = SSA_VAL (op);
3607 /* Either we have the op widened available. */
3608 tree ops[3] = {};
3609 ops[0] = op;
3610 tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
3611 wide_type, ops, NULL);
3612 if (tem)
3613 return tem;
3615 /* Or the op is truncated from some existing value. */
3616 if (TREE_CODE (op) == SSA_NAME)
3618 gimple *def = SSA_NAME_DEF_STMT (op);
3619 if (is_gimple_assign (def)
3620 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3622 tem = gimple_assign_rhs1 (def);
3623 if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)))
3625 if (TREE_CODE (tem) == SSA_NAME)
3626 tem = SSA_VAL (tem);
3627 return tem;
3632 /* For constants simply extend it. */
3633 if (TREE_CODE (op) == INTEGER_CST)
3634 return wide_int_to_tree (wide_type, wi::to_wide (op));
3636 return NULL_TREE;
3639 /* Visit a nary operator RHS, value number it, and return true if the
3640 value number of LHS has changed as a result. */
3642 static bool
3643 visit_nary_op (tree lhs, gassign *stmt)
3645 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
3646 if (result)
3647 return set_ssa_val_to (lhs, result);
3649 /* Do some special pattern matching for redundancies of operations
3650 in different types. */
3651 enum tree_code code = gimple_assign_rhs_code (stmt);
3652 tree type = TREE_TYPE (lhs);
3653 tree rhs1 = gimple_assign_rhs1 (stmt);
3654 switch (code)
3656 CASE_CONVERT:
3657 /* Match arithmetic done in a different type where we can easily
3658 substitute the result from some earlier sign-changed or widened
3659 operation. */
3660 if (INTEGRAL_TYPE_P (type)
3661 && TREE_CODE (rhs1) == SSA_NAME
3662 /* We only handle sign-changes or zero-extension -> & mask. */
3663 && ((TYPE_UNSIGNED (TREE_TYPE (rhs1))
3664 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs1)))
3665 || TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (rhs1))))
3667 gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
3668 if (def
3669 && (gimple_assign_rhs_code (def) == PLUS_EXPR
3670 || gimple_assign_rhs_code (def) == MINUS_EXPR
3671 || gimple_assign_rhs_code (def) == MULT_EXPR))
3673 tree ops[3] = {};
3674 /* Either we have the op widened available. */
3675 ops[0] = valueized_wider_op (type,
3676 gimple_assign_rhs1 (def));
3677 if (ops[0])
3678 ops[1] = valueized_wider_op (type,
3679 gimple_assign_rhs2 (def));
3680 if (ops[0] && ops[1])
3682 ops[0] = vn_nary_op_lookup_pieces
3683 (2, gimple_assign_rhs_code (def), type, ops, NULL);
3684 /* We have wider operation available. */
3685 if (ops[0])
3687 unsigned lhs_prec = TYPE_PRECISION (type);
3688 unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1));
3689 if (lhs_prec == rhs_prec)
3691 gimple_match_op match_op (gimple_match_cond::UNCOND,
3692 NOP_EXPR, type, ops[0]);
3693 result = vn_nary_build_or_lookup (&match_op);
3694 if (result)
3696 bool changed = set_ssa_val_to (lhs, result);
3697 vn_nary_op_insert_stmt (stmt, result);
3698 return changed;
3701 else
3703 tree mask = wide_int_to_tree
3704 (type, wi::mask (rhs_prec, false, lhs_prec));
3705 gimple_match_op match_op (gimple_match_cond::UNCOND,
3706 BIT_AND_EXPR,
3707 TREE_TYPE (lhs),
3708 ops[0], mask);
3709 result = vn_nary_build_or_lookup (&match_op);
3710 if (result)
3712 bool changed = set_ssa_val_to (lhs, result);
3713 vn_nary_op_insert_stmt (stmt, result);
3714 return changed;
3721 default:;
3724 bool changed = set_ssa_val_to (lhs, lhs);
3725 vn_nary_op_insert_stmt (stmt, lhs);
3726 return changed;
3729 /* Visit a call STMT storing into LHS. Return true if the value number
3730 of the LHS has changed as a result. */
3732 static bool
3733 visit_reference_op_call (tree lhs, gcall *stmt)
3735 bool changed = false;
3736 struct vn_reference_s vr1;
3737 vn_reference_t vnresult = NULL;
3738 tree vdef = gimple_vdef (stmt);
3740 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
3741 if (lhs && TREE_CODE (lhs) != SSA_NAME)
3742 lhs = NULL_TREE;
3744 vn_reference_lookup_call (stmt, &vnresult, &vr1);
3745 if (vnresult)
3747 if (vnresult->result_vdef && vdef)
3748 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
3749 else if (vdef)
3750 /* If the call was discovered to be pure or const reflect
3751 that as far as possible. */
3752 changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt)));
3754 if (!vnresult->result && lhs)
3755 vnresult->result = lhs;
3757 if (vnresult->result && lhs)
3758 changed |= set_ssa_val_to (lhs, vnresult->result);
3760 else
3762 vn_reference_t vr2;
3763 vn_reference_s **slot;
3764 tree vdef_val = vdef;
3765 if (vdef)
3767 /* If we value numbered an indirect functions function to
3768 one not clobbering memory value number its VDEF to its
3769 VUSE. */
3770 tree fn = gimple_call_fn (stmt);
3771 if (fn && TREE_CODE (fn) == SSA_NAME)
3773 fn = SSA_VAL (fn);
3774 if (TREE_CODE (fn) == ADDR_EXPR
3775 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
3776 && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
3777 & (ECF_CONST | ECF_PURE)))
3778 vdef_val = vuse_ssa_val (gimple_vuse (stmt));
3780 changed |= set_ssa_val_to (vdef, vdef_val);
3782 if (lhs)
3783 changed |= set_ssa_val_to (lhs, lhs);
3784 vr2 = current_info->references_pool->allocate ();
3785 vr2->vuse = vr1.vuse;
3786 /* As we are not walking the virtual operand chain we know the
3787 shared_lookup_references are still original so we can re-use
3788 them here. */
3789 vr2->operands = vr1.operands.copy ();
3790 vr2->type = vr1.type;
3791 vr2->set = vr1.set;
3792 vr2->hashcode = vr1.hashcode;
3793 vr2->result = lhs;
3794 vr2->result_vdef = vdef_val;
3795 slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3796 INSERT);
3797 gcc_assert (!*slot);
3798 *slot = vr2;
3801 return changed;
3804 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3805 and return true if the value number of the LHS has changed as a result. */
3807 static bool
3808 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
3810 bool changed = false;
3811 tree last_vuse;
3812 tree result;
3814 last_vuse = gimple_vuse (stmt);
3815 last_vuse_ptr = &last_vuse;
3816 result = vn_reference_lookup (op, gimple_vuse (stmt),
3817 default_vn_walk_kind, NULL, true);
3818 last_vuse_ptr = NULL;
3820 /* We handle type-punning through unions by value-numbering based
3821 on offset and size of the access. Be prepared to handle a
3822 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3823 if (result
3824 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3826 /* We will be setting the value number of lhs to the value number
3827 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3828 So first simplify and lookup this expression to see if it
3829 is already available. */
3830 gimple_match_op res_op (gimple_match_cond::UNCOND,
3831 VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
3832 result = vn_nary_build_or_lookup (&res_op);
3835 if (result)
3836 changed = set_ssa_val_to (lhs, result);
3837 else
3839 changed = set_ssa_val_to (lhs, lhs);
3840 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
3843 return changed;
3847 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3848 and return true if the value number of the LHS has changed as a result. */
3850 static bool
3851 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
3853 bool changed = false;
3854 vn_reference_t vnresult = NULL;
3855 tree assign;
3856 bool resultsame = false;
3857 tree vuse = gimple_vuse (stmt);
3858 tree vdef = gimple_vdef (stmt);
3860 if (TREE_CODE (op) == SSA_NAME)
3861 op = SSA_VAL (op);
3863 /* First we want to lookup using the *vuses* from the store and see
3864 if there the last store to this location with the same address
3865 had the same value.
3867 The vuses represent the memory state before the store. If the
3868 memory state, address, and value of the store is the same as the
3869 last store to this location, then this store will produce the
3870 same memory state as that store.
3872 In this case the vdef versions for this store are value numbered to those
3873 vuse versions, since they represent the same memory state after
3874 this store.
3876 Otherwise, the vdefs for the store are used when inserting into
3877 the table, since the store generates a new memory state. */
3879 vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
3880 if (vnresult
3881 && vnresult->result)
3883 tree result = vnresult->result;
3884 if (TREE_CODE (result) == SSA_NAME)
3885 result = SSA_VAL (result);
3886 resultsame = expressions_equal_p (result, op);
3887 if (resultsame)
3889 /* If the TBAA state isn't compatible for downstream reads
3890 we cannot value-number the VDEFs the same. */
3891 alias_set_type set = get_alias_set (lhs);
3892 if (vnresult->set != set
3893 && ! alias_set_subset_of (set, vnresult->set))
3894 resultsame = false;
3898 if (!resultsame)
3900 /* Only perform the following when being called from PRE
3901 which embeds tail merging. */
3902 if (default_vn_walk_kind == VN_WALK)
3904 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3905 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
3906 if (vnresult)
3908 VN_INFO (vdef)->use_processed = true;
3909 return set_ssa_val_to (vdef, vnresult->result_vdef);
3913 if (dump_file && (dump_flags & TDF_DETAILS))
3915 fprintf (dump_file, "No store match\n");
3916 fprintf (dump_file, "Value numbering store ");
3917 print_generic_expr (dump_file, lhs);
3918 fprintf (dump_file, " to ");
3919 print_generic_expr (dump_file, op);
3920 fprintf (dump_file, "\n");
3922 /* Have to set value numbers before insert, since insert is
3923 going to valueize the references in-place. */
3924 if (vdef)
3925 changed |= set_ssa_val_to (vdef, vdef);
3927 /* Do not insert structure copies into the tables. */
3928 if (is_gimple_min_invariant (op)
3929 || is_gimple_reg (op))
3930 vn_reference_insert (lhs, op, vdef, NULL);
3932 /* Only perform the following when being called from PRE
3933 which embeds tail merging. */
3934 if (default_vn_walk_kind == VN_WALK)
3936 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3937 vn_reference_insert (assign, lhs, vuse, vdef);
3940 else
3942 /* We had a match, so value number the vdef to have the value
3943 number of the vuse it came from. */
3945 if (dump_file && (dump_flags & TDF_DETAILS))
3946 fprintf (dump_file, "Store matched earlier value, "
3947 "value numbering store vdefs to matching vuses.\n");
3949 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3952 return changed;
3955 /* Visit and value number PHI, return true if the value number
3956 changed. */
3958 static bool
3959 visit_phi (gimple *phi)
3961 tree result, sameval = VN_TOP, seen_undef = NULL_TREE;
3962 unsigned n_executable = 0;
3963 bool allsame = true;
3964 edge_iterator ei;
3965 edge e;
3967 /* TODO: We could check for this in init_sccvn, and replace this
3968 with a gcc_assert. */
3969 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3970 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3972 /* See if all non-TOP arguments have the same value. TOP is
3973 equivalent to everything, so we can ignore it. */
3974 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3975 if (e->flags & EDGE_EXECUTABLE)
3977 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3979 ++n_executable;
3980 if (TREE_CODE (def) == SSA_NAME)
3981 def = SSA_VAL (def);
3982 if (def == VN_TOP)
3984 /* Ignore undefined defs for sameval but record one. */
3985 else if (TREE_CODE (def) == SSA_NAME
3986 && ssa_undefined_value_p (def, false))
3987 seen_undef = def;
3988 else if (sameval == VN_TOP)
3989 sameval = def;
3990 else if (!expressions_equal_p (def, sameval))
3992 allsame = false;
3993 break;
3998 /* If none of the edges was executable keep the value-number at VN_TOP,
3999 if only a single edge is exectuable use its value. */
4000 if (n_executable <= 1)
4001 result = seen_undef ? seen_undef : sameval;
4002 /* If we saw only undefined values and VN_TOP use one of the
4003 undefined values. */
4004 else if (sameval == VN_TOP)
4005 result = seen_undef ? seen_undef : sameval;
4006 /* First see if it is equivalent to a phi node in this block. We prefer
4007 this as it allows IV elimination - see PRs 66502 and 67167. */
4008 else if ((result = vn_phi_lookup (phi)))
4010 /* If all values are the same use that, unless we've seen undefined
4011 values as well and the value isn't constant.
4012 CCP/copyprop have the same restriction to not remove uninit warnings. */
4013 else if (allsame
4014 && (! seen_undef || is_gimple_min_invariant (sameval)))
4015 result = sameval;
4016 else
4018 result = PHI_RESULT (phi);
4019 /* Only insert PHIs that are varying, for constant value numbers
4020 we mess up equivalences otherwise as we are only comparing
4021 the immediate controlling predicates. */
4022 vn_phi_insert (phi, result);
4025 return set_ssa_val_to (PHI_RESULT (phi), result);
4028 /* Try to simplify RHS using equivalences and constant folding. */
4030 static tree
4031 try_to_simplify (gassign *stmt)
4033 enum tree_code code = gimple_assign_rhs_code (stmt);
4034 tree tem;
4036 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
4037 in this case, there is no point in doing extra work. */
4038 if (code == SSA_NAME)
4039 return NULL_TREE;
4041 /* First try constant folding based on our current lattice. */
4042 mprts_hook = vn_lookup_simplify_result;
4043 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
4044 mprts_hook = NULL;
4045 if (tem
4046 && (TREE_CODE (tem) == SSA_NAME
4047 || is_gimple_min_invariant (tem)))
4048 return tem;
4050 return NULL_TREE;
4053 /* Visit and value number USE, return true if the value number
4054 changed. */
4056 static bool
4057 visit_use (tree use)
4059 bool changed = false;
4060 gimple *stmt = SSA_NAME_DEF_STMT (use);
4062 mark_use_processed (use);
4064 gcc_assert (!SSA_NAME_IN_FREE_LIST (use)
4065 && !SSA_NAME_IS_DEFAULT_DEF (use));
4067 if (dump_file && (dump_flags & TDF_DETAILS))
4069 fprintf (dump_file, "Value numbering ");
4070 print_generic_expr (dump_file, use);
4071 fprintf (dump_file, " stmt = ");
4072 print_gimple_stmt (dump_file, stmt, 0);
4075 if (gimple_code (stmt) == GIMPLE_PHI)
4076 changed = visit_phi (stmt);
4077 else if (gimple_has_volatile_ops (stmt))
4078 changed = defs_to_varying (stmt);
4079 else if (gassign *ass = dyn_cast <gassign *> (stmt))
4081 enum tree_code code = gimple_assign_rhs_code (ass);
4082 tree lhs = gimple_assign_lhs (ass);
4083 tree rhs1 = gimple_assign_rhs1 (ass);
4084 tree simplified;
4086 /* Shortcut for copies. Simplifying copies is pointless,
4087 since we copy the expression and value they represent. */
4088 if (code == SSA_NAME
4089 && TREE_CODE (lhs) == SSA_NAME)
4091 changed = visit_copy (lhs, rhs1);
4092 goto done;
4094 simplified = try_to_simplify (ass);
4095 if (simplified)
4097 if (dump_file && (dump_flags & TDF_DETAILS))
4099 fprintf (dump_file, "RHS ");
4100 print_gimple_expr (dump_file, ass, 0);
4101 fprintf (dump_file, " simplified to ");
4102 print_generic_expr (dump_file, simplified);
4103 fprintf (dump_file, "\n");
4106 /* Setting value numbers to constants will occasionally
4107 screw up phi congruence because constants are not
4108 uniquely associated with a single ssa name that can be
4109 looked up. */
4110 if (simplified
4111 && is_gimple_min_invariant (simplified)
4112 && TREE_CODE (lhs) == SSA_NAME)
4114 changed = set_ssa_val_to (lhs, simplified);
4115 goto done;
4117 else if (simplified
4118 && TREE_CODE (simplified) == SSA_NAME
4119 && TREE_CODE (lhs) == SSA_NAME)
4121 changed = visit_copy (lhs, simplified);
4122 goto done;
4125 if ((TREE_CODE (lhs) == SSA_NAME
4126 /* We can substitute SSA_NAMEs that are live over
4127 abnormal edges with their constant value. */
4128 && !(gimple_assign_copy_p (ass)
4129 && is_gimple_min_invariant (rhs1))
4130 && !(simplified
4131 && is_gimple_min_invariant (simplified))
4132 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4133 /* Stores or copies from SSA_NAMEs that are live over
4134 abnormal edges are a problem. */
4135 || (code == SSA_NAME
4136 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
4137 changed = defs_to_varying (ass);
4138 else if (REFERENCE_CLASS_P (lhs)
4139 || DECL_P (lhs))
4140 changed = visit_reference_op_store (lhs, rhs1, ass);
4141 else if (TREE_CODE (lhs) == SSA_NAME)
4143 if ((gimple_assign_copy_p (ass)
4144 && is_gimple_min_invariant (rhs1))
4145 || (simplified
4146 && is_gimple_min_invariant (simplified)))
4148 if (simplified)
4149 changed = set_ssa_val_to (lhs, simplified);
4150 else
4151 changed = set_ssa_val_to (lhs, rhs1);
4153 else
4155 /* Visit the original statement. */
4156 switch (vn_get_stmt_kind (ass))
4158 case VN_NARY:
4159 changed = visit_nary_op (lhs, ass);
4160 break;
4161 case VN_REFERENCE:
4162 changed = visit_reference_op_load (lhs, rhs1, ass);
4163 break;
4164 default:
4165 changed = defs_to_varying (ass);
4166 break;
4170 else
4171 changed = defs_to_varying (ass);
4173 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
4175 tree lhs = gimple_call_lhs (call_stmt);
4176 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4178 /* Try constant folding based on our current lattice. */
4179 tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
4180 vn_valueize);
4181 if (simplified)
4183 if (dump_file && (dump_flags & TDF_DETAILS))
4185 fprintf (dump_file, "call ");
4186 print_gimple_expr (dump_file, call_stmt, 0);
4187 fprintf (dump_file, " simplified to ");
4188 print_generic_expr (dump_file, simplified);
4189 fprintf (dump_file, "\n");
4192 /* Setting value numbers to constants will occasionally
4193 screw up phi congruence because constants are not
4194 uniquely associated with a single ssa name that can be
4195 looked up. */
4196 if (simplified
4197 && is_gimple_min_invariant (simplified))
4199 changed = set_ssa_val_to (lhs, simplified);
4200 if (gimple_vdef (call_stmt))
4201 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4202 SSA_VAL (gimple_vuse (call_stmt)));
4203 goto done;
4205 else if (simplified
4206 && TREE_CODE (simplified) == SSA_NAME)
4208 changed = visit_copy (lhs, simplified);
4209 if (gimple_vdef (call_stmt))
4210 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4211 SSA_VAL (gimple_vuse (call_stmt)));
4212 goto done;
4214 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4216 changed = defs_to_varying (call_stmt);
4217 goto done;
4221 /* Pick up flags from a devirtualization target. */
4222 tree fn = gimple_call_fn (stmt);
4223 int extra_fnflags = 0;
4224 if (fn && TREE_CODE (fn) == SSA_NAME)
4226 fn = SSA_VAL (fn);
4227 if (TREE_CODE (fn) == ADDR_EXPR
4228 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
4229 extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0));
4231 if (!gimple_call_internal_p (call_stmt)
4232 && (/* Calls to the same function with the same vuse
4233 and the same operands do not necessarily return the same
4234 value, unless they're pure or const. */
4235 ((gimple_call_flags (call_stmt) | extra_fnflags)
4236 & (ECF_PURE | ECF_CONST))
4237 /* If calls have a vdef, subsequent calls won't have
4238 the same incoming vuse. So, if 2 calls with vdef have the
4239 same vuse, we know they're not subsequent.
4240 We can value number 2 calls to the same function with the
4241 same vuse and the same operands which are not subsequent
4242 the same, because there is no code in the program that can
4243 compare the 2 values... */
4244 || (gimple_vdef (call_stmt)
4245 /* ... unless the call returns a pointer which does
4246 not alias with anything else. In which case the
4247 information that the values are distinct are encoded
4248 in the IL. */
4249 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
4250 /* Only perform the following when being called from PRE
4251 which embeds tail merging. */
4252 && default_vn_walk_kind == VN_WALK)))
4253 changed = visit_reference_op_call (lhs, call_stmt);
4254 else
4255 changed = defs_to_varying (call_stmt);
4257 else
4258 changed = defs_to_varying (stmt);
4259 done:
4260 return changed;
4263 /* Compare two operands by reverse postorder index */
4265 static int
4266 compare_ops (const void *pa, const void *pb)
4268 const tree opa = *((const tree *)pa);
4269 const tree opb = *((const tree *)pb);
4270 gimple *opstmta = SSA_NAME_DEF_STMT (opa);
4271 gimple *opstmtb = SSA_NAME_DEF_STMT (opb);
4272 basic_block bba;
4273 basic_block bbb;
4275 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
4276 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4277 else if (gimple_nop_p (opstmta))
4278 return -1;
4279 else if (gimple_nop_p (opstmtb))
4280 return 1;
4282 bba = gimple_bb (opstmta);
4283 bbb = gimple_bb (opstmtb);
4285 if (!bba && !bbb)
4286 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4287 else if (!bba)
4288 return -1;
4289 else if (!bbb)
4290 return 1;
4292 if (bba == bbb)
4294 if (gimple_code (opstmta) == GIMPLE_PHI
4295 && gimple_code (opstmtb) == GIMPLE_PHI)
4296 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4297 else if (gimple_code (opstmta) == GIMPLE_PHI)
4298 return -1;
4299 else if (gimple_code (opstmtb) == GIMPLE_PHI)
4300 return 1;
4301 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
4302 return gimple_uid (opstmta) - gimple_uid (opstmtb);
4303 else
4304 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4306 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
4309 /* Sort an array containing members of a strongly connected component
4310 SCC so that the members are ordered by RPO number.
4311 This means that when the sort is complete, iterating through the
4312 array will give you the members in RPO order. */
4314 static void
4315 sort_scc (vec<tree> scc)
4317 scc.qsort (compare_ops);
4320 /* Insert the no longer used nary ONARY to the hash INFO. */
4322 static void
4323 copy_nary (vn_nary_op_t onary, vn_tables_t info)
4325 size_t size = sizeof_vn_nary_op (onary->length);
4326 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
4327 &info->nary_obstack);
4328 memcpy (nary, onary, size);
4329 vn_nary_op_insert_into (nary, info->nary, false);
4332 /* Insert the no longer used phi OPHI to the hash INFO. */
4334 static void
4335 copy_phi (vn_phi_t ophi, vn_tables_t info)
4337 vn_phi_t phi = info->phis_pool->allocate ();
4338 vn_phi_s **slot;
4339 memcpy (phi, ophi, sizeof (*phi));
4340 ophi->phiargs.create (0);
4341 slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
4342 gcc_assert (!*slot);
4343 *slot = phi;
4346 /* Insert the no longer used reference OREF to the hash INFO. */
4348 static void
4349 copy_reference (vn_reference_t oref, vn_tables_t info)
4351 vn_reference_t ref;
4352 vn_reference_s **slot;
4353 ref = info->references_pool->allocate ();
4354 memcpy (ref, oref, sizeof (*ref));
4355 oref->operands.create (0);
4356 slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
4357 if (*slot)
4358 free_reference (*slot);
4359 *slot = ref;
4362 /* Process a strongly connected component in the SSA graph. */
4364 static void
4365 process_scc (vec<tree> scc)
4367 tree var;
4368 unsigned int i;
4369 unsigned int iterations = 0;
4370 bool changed = true;
4371 vn_nary_op_iterator_type hin;
4372 vn_phi_iterator_type hip;
4373 vn_reference_iterator_type hir;
4374 vn_nary_op_t nary;
4375 vn_phi_t phi;
4376 vn_reference_t ref;
4378 /* If the SCC has a single member, just visit it. */
4379 if (scc.length () == 1)
4381 tree use = scc[0];
4382 if (VN_INFO (use)->use_processed)
4383 return;
4384 /* We need to make sure it doesn't form a cycle itself, which can
4385 happen for self-referential PHI nodes. In that case we would
4386 end up inserting an expression with VN_TOP operands into the
4387 valid table which makes us derive bogus equivalences later.
4388 The cheapest way to check this is to assume it for all PHI nodes. */
4389 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
4390 /* Fallthru to iteration. */ ;
4391 else
4393 visit_use (use);
4394 return;
4398 if (dump_file && (dump_flags & TDF_DETAILS))
4399 print_scc (dump_file, scc);
4401 /* Iterate over the SCC with the optimistic table until it stops
4402 changing. */
4403 current_info = optimistic_info;
4404 while (changed)
4406 changed = false;
4407 iterations++;
4408 if (dump_file && (dump_flags & TDF_DETAILS))
4409 fprintf (dump_file, "Starting iteration %d\n", iterations);
4410 /* As we are value-numbering optimistically we have to
4411 clear the expression tables and the simplified expressions
4412 in each iteration until we converge. */
4413 optimistic_info->nary->empty ();
4414 optimistic_info->phis->empty ();
4415 optimistic_info->references->empty ();
4416 obstack_free (&optimistic_info->nary_obstack, NULL);
4417 gcc_obstack_init (&optimistic_info->nary_obstack);
4418 optimistic_info->phis_pool->release ();
4419 optimistic_info->references_pool->release ();
4420 FOR_EACH_VEC_ELT (scc, i, var)
4421 gcc_assert (!VN_INFO (var)->needs_insertion
4422 && VN_INFO (var)->expr == NULL);
4423 FOR_EACH_VEC_ELT (scc, i, var)
4424 changed |= visit_use (var);
4427 if (dump_file && (dump_flags & TDF_DETAILS))
4428 fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
4429 statistics_histogram_event (cfun, "SCC iterations", iterations);
4431 /* Finally, copy the contents of the no longer used optimistic
4432 table to the valid table. */
4433 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
4434 copy_nary (nary, valid_info);
4435 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
4436 copy_phi (phi, valid_info);
4437 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
4438 ref, vn_reference_t, hir)
4439 copy_reference (ref, valid_info);
4441 current_info = valid_info;
4445 /* Pop the components of the found SCC for NAME off the SCC stack
4446 and process them. Returns true if all went well, false if
4447 we run into resource limits. */
4449 static void
4450 extract_and_process_scc_for_name (tree name)
4452 auto_vec<tree> scc;
4453 tree x;
4455 /* Found an SCC, pop the components off the SCC stack and
4456 process them. */
4459 x = sccstack.pop ();
4461 VN_INFO (x)->on_sccstack = false;
4462 scc.safe_push (x);
4463 } while (x != name);
4465 /* Drop all defs in the SCC to varying in case a SCC turns out to be
4466 incredibly large.
4467 ??? Just switch to a non-optimistic mode that avoids any iteration. */
4468 if (scc.length () > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
4470 if (dump_file)
4472 print_scc (dump_file, scc);
4473 fprintf (dump_file, "WARNING: Giving up value-numbering SCC due to "
4474 "size %u exceeding %u\n", scc.length (),
4475 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
4477 tree var;
4478 unsigned i;
4479 FOR_EACH_VEC_ELT (scc, i, var)
4481 gimple *def = SSA_NAME_DEF_STMT (var);
4482 mark_use_processed (var);
4483 if (SSA_NAME_IS_DEFAULT_DEF (var)
4484 || gimple_code (def) == GIMPLE_PHI)
4485 set_ssa_val_to (var, var);
4486 else
4487 defs_to_varying (def);
4489 return;
4492 if (scc.length () > 1)
4493 sort_scc (scc);
4495 process_scc (scc);
4498 /* Depth first search on NAME to discover and process SCC's in the SSA
4499 graph.
4500 Execution of this algorithm relies on the fact that the SCC's are
4501 popped off the stack in topological order.
4502 Returns true if successful, false if we stopped processing SCC's due
4503 to resource constraints. */
4505 static void
4506 DFS (tree name)
4508 auto_vec<ssa_op_iter> itervec;
4509 auto_vec<tree> namevec;
4510 use_operand_p usep = NULL;
4511 gimple *defstmt;
4512 tree use;
4513 ssa_op_iter iter;
4515 start_over:
4516 /* SCC info */
4517 VN_INFO (name)->dfsnum = next_dfs_num++;
4518 VN_INFO (name)->visited = true;
4519 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
4521 sccstack.safe_push (name);
4522 VN_INFO (name)->on_sccstack = true;
4523 defstmt = SSA_NAME_DEF_STMT (name);
4525 /* Recursively DFS on our operands, looking for SCC's. */
4526 if (!gimple_nop_p (defstmt))
4528 /* Push a new iterator. */
4529 if (gphi *phi = dyn_cast <gphi *> (defstmt))
4530 usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
4531 else
4532 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
4534 else
4535 clear_and_done_ssa_iter (&iter);
4537 while (1)
4539 /* If we are done processing uses of a name, go up the stack
4540 of iterators and process SCCs as we found them. */
4541 if (op_iter_done (&iter))
4543 /* See if we found an SCC. */
4544 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
4545 extract_and_process_scc_for_name (name);
4547 /* Check if we are done. */
4548 if (namevec.is_empty ())
4549 return;
4551 /* Restore the last use walker and continue walking there. */
4552 use = name;
4553 name = namevec.pop ();
4554 memcpy (&iter, &itervec.last (),
4555 sizeof (ssa_op_iter));
4556 itervec.pop ();
4557 goto continue_walking;
4560 use = USE_FROM_PTR (usep);
4562 /* Since we handle phi nodes, we will sometimes get
4563 invariants in the use expression. */
4564 if (TREE_CODE (use) == SSA_NAME)
4566 if (! (VN_INFO (use)->visited))
4568 /* Recurse by pushing the current use walking state on
4569 the stack and starting over. */
4570 itervec.safe_push (iter);
4571 namevec.safe_push (name);
4572 name = use;
4573 goto start_over;
4575 continue_walking:
4576 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
4577 VN_INFO (use)->low);
4579 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
4580 && VN_INFO (use)->on_sccstack)
4582 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
4583 VN_INFO (name)->low);
4587 usep = op_iter_next_use (&iter);
4591 /* Allocate a value number table. */
4593 static void
4594 allocate_vn_table (vn_tables_t table)
4596 table->phis = new vn_phi_table_type (23);
4597 table->nary = new vn_nary_op_table_type (23);
4598 table->references = new vn_reference_table_type (23);
4600 gcc_obstack_init (&table->nary_obstack);
4601 table->phis_pool = new object_allocator<vn_phi_s> ("VN phis");
4602 table->references_pool = new object_allocator<vn_reference_s>
4603 ("VN references");
4606 /* Free a value number table. */
4608 static void
4609 free_vn_table (vn_tables_t table)
4611 delete table->phis;
4612 table->phis = NULL;
4613 delete table->nary;
4614 table->nary = NULL;
4615 delete table->references;
4616 table->references = NULL;
4617 obstack_free (&table->nary_obstack, NULL);
4618 delete table->phis_pool;
4619 delete table->references_pool;
4622 static void
4623 init_scc_vn (void)
4625 int j;
4626 int *rpo_numbers_temp;
4628 calculate_dominance_info (CDI_DOMINATORS);
4629 mark_dfs_back_edges ();
4631 sccstack.create (0);
4632 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
4634 constant_value_ids = BITMAP_ALLOC (NULL);
4636 next_dfs_num = 1;
4637 next_value_id = 1;
4639 vn_ssa_aux_table.create (num_ssa_names + 1);
4640 /* VEC_alloc doesn't actually grow it to the right size, it just
4641 preallocates the space to do so. */
4642 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
4643 gcc_obstack_init (&vn_ssa_aux_obstack);
4645 shared_lookup_phiargs.create (0);
4646 shared_lookup_references.create (0);
4647 rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
4648 rpo_numbers_temp =
4649 XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
4650 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4652 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4653 the i'th block in RPO order is bb. We want to map bb's to RPO
4654 numbers, so we need to rearrange this array. */
4655 for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
4656 rpo_numbers[rpo_numbers_temp[j]] = j;
4658 XDELETE (rpo_numbers_temp);
4660 VN_TOP = build_decl (UNKNOWN_LOCATION, VAR_DECL,
4661 get_identifier ("VN_TOP"), error_mark_node);
4663 renumber_gimple_stmt_uids ();
4665 /* Create the valid and optimistic value numbering tables. */
4666 valid_info = XCNEW (struct vn_tables_s);
4667 allocate_vn_table (valid_info);
4668 optimistic_info = XCNEW (struct vn_tables_s);
4669 allocate_vn_table (optimistic_info);
4670 current_info = valid_info;
4672 /* Create the VN_INFO structures, and initialize value numbers to
4673 TOP or VARYING for parameters. */
4674 size_t i;
4675 tree name;
4677 FOR_EACH_SSA_NAME (i, name, cfun)
4679 VN_INFO_GET (name)->valnum = VN_TOP;
4680 VN_INFO (name)->needs_insertion = false;
4681 VN_INFO (name)->expr = NULL;
4682 VN_INFO (name)->value_id = 0;
4684 if (!SSA_NAME_IS_DEFAULT_DEF (name))
4685 continue;
4687 switch (TREE_CODE (SSA_NAME_VAR (name)))
4689 case VAR_DECL:
4690 /* All undefined vars are VARYING. */
4691 VN_INFO (name)->valnum = name;
4692 VN_INFO (name)->visited = true;
4693 break;
4695 case PARM_DECL:
4696 /* Parameters are VARYING but we can record a condition
4697 if we know it is a non-NULL pointer. */
4698 VN_INFO (name)->visited = true;
4699 VN_INFO (name)->valnum = name;
4700 if (POINTER_TYPE_P (TREE_TYPE (name))
4701 && nonnull_arg_p (SSA_NAME_VAR (name)))
4703 tree ops[2];
4704 ops[0] = name;
4705 ops[1] = build_int_cst (TREE_TYPE (name), 0);
4706 vn_nary_op_insert_pieces (2, NE_EXPR, boolean_type_node, ops,
4707 boolean_true_node, 0);
4708 if (dump_file && (dump_flags & TDF_DETAILS))
4710 fprintf (dump_file, "Recording ");
4711 print_generic_expr (dump_file, name, TDF_SLIM);
4712 fprintf (dump_file, " != 0\n");
4715 break;
4717 case RESULT_DECL:
4718 /* If the result is passed by invisible reference the default
4719 def is initialized, otherwise it's uninitialized. Still
4720 undefined is varying. */
4721 VN_INFO (name)->visited = true;
4722 VN_INFO (name)->valnum = name;
4723 break;
4725 default:
4726 gcc_unreachable ();
4731 /* Restore SSA info that has been reset on value leaders. */
4733 void
4734 scc_vn_restore_ssa_info (void)
4736 unsigned i;
4737 tree name;
4739 FOR_EACH_SSA_NAME (i, name, cfun)
4741 if (has_VN_INFO (name))
4743 if (VN_INFO (name)->needs_insertion)
4745 else if (POINTER_TYPE_P (TREE_TYPE (name))
4746 && VN_INFO (name)->info.ptr_info)
4747 SSA_NAME_PTR_INFO (name) = VN_INFO (name)->info.ptr_info;
4748 else if (INTEGRAL_TYPE_P (TREE_TYPE (name))
4749 && VN_INFO (name)->info.range_info)
4751 SSA_NAME_RANGE_INFO (name) = VN_INFO (name)->info.range_info;
4752 SSA_NAME_ANTI_RANGE_P (name)
4753 = VN_INFO (name)->range_info_anti_range_p;
4759 void
4760 free_scc_vn (void)
4762 size_t i;
4763 tree name;
4765 delete constant_to_value_id;
4766 constant_to_value_id = NULL;
4767 BITMAP_FREE (constant_value_ids);
4768 shared_lookup_phiargs.release ();
4769 shared_lookup_references.release ();
4770 XDELETEVEC (rpo_numbers);
4772 FOR_EACH_SSA_NAME (i, name, cfun)
4774 if (has_VN_INFO (name)
4775 && VN_INFO (name)->needs_insertion)
4776 release_ssa_name (name);
4778 obstack_free (&vn_ssa_aux_obstack, NULL);
4779 vn_ssa_aux_table.release ();
4781 sccstack.release ();
4782 free_vn_table (valid_info);
4783 XDELETE (valid_info);
4784 free_vn_table (optimistic_info);
4785 XDELETE (optimistic_info);
4787 BITMAP_FREE (const_parms);
4790 /* Set *ID according to RESULT. */
4792 static void
4793 set_value_id_for_result (tree result, unsigned int *id)
4795 if (result && TREE_CODE (result) == SSA_NAME)
4796 *id = VN_INFO (result)->value_id;
4797 else if (result && is_gimple_min_invariant (result))
4798 *id = get_or_alloc_constant_value_id (result);
4799 else
4800 *id = get_next_value_id ();
4803 /* Set the value ids in the valid hash tables. */
4805 static void
4806 set_hashtable_value_ids (void)
4808 vn_nary_op_iterator_type hin;
4809 vn_phi_iterator_type hip;
4810 vn_reference_iterator_type hir;
4811 vn_nary_op_t vno;
4812 vn_reference_t vr;
4813 vn_phi_t vp;
4815 /* Now set the value ids of the things we had put in the hash
4816 table. */
4818 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4819 set_value_id_for_result (vno->result, &vno->value_id);
4821 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4822 set_value_id_for_result (vp->result, &vp->value_id);
4824 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4825 hir)
4826 set_value_id_for_result (vr->result, &vr->value_id);
4829 class sccvn_dom_walker : public dom_walker
4831 public:
4832 sccvn_dom_walker ()
4833 : dom_walker (CDI_DOMINATORS, REACHABLE_BLOCKS), cond_stack (0) {}
4835 virtual edge before_dom_children (basic_block);
4836 virtual void after_dom_children (basic_block);
4838 void record_cond (basic_block,
4839 enum tree_code code, tree lhs, tree rhs, bool value);
4840 void record_conds (basic_block,
4841 enum tree_code code, tree lhs, tree rhs, bool value);
4843 auto_vec<std::pair <basic_block, std::pair <vn_nary_op_t, vn_nary_op_t> > >
4844 cond_stack;
4847 /* Record a temporary condition for the BB and its dominated blocks. */
4849 void
4850 sccvn_dom_walker::record_cond (basic_block bb,
4851 enum tree_code code, tree lhs, tree rhs,
4852 bool value)
4854 tree ops[2] = { lhs, rhs };
4855 vn_nary_op_t old = NULL;
4856 if (vn_nary_op_lookup_pieces (2, code, boolean_type_node, ops, &old))
4857 current_info->nary->remove_elt_with_hash (old, old->hashcode);
4858 vn_nary_op_t cond
4859 = vn_nary_op_insert_pieces (2, code, boolean_type_node, ops,
4860 value
4861 ? boolean_true_node
4862 : boolean_false_node, 0);
4863 if (dump_file && (dump_flags & TDF_DETAILS))
4865 fprintf (dump_file, "Recording temporarily ");
4866 print_generic_expr (dump_file, ops[0], TDF_SLIM);
4867 fprintf (dump_file, " %s ", get_tree_code_name (code));
4868 print_generic_expr (dump_file, ops[1], TDF_SLIM);
4869 fprintf (dump_file, " == %s%s\n",
4870 value ? "true" : "false",
4871 old ? " (old entry saved)" : "");
4873 cond_stack.safe_push (std::make_pair (bb, std::make_pair (cond, old)));
4876 /* Record temporary conditions for the BB and its dominated blocks
4877 according to LHS CODE RHS == VALUE and its dominated conditions. */
4879 void
4880 sccvn_dom_walker::record_conds (basic_block bb,
4881 enum tree_code code, tree lhs, tree rhs,
4882 bool value)
4884 /* Record the original condition. */
4885 record_cond (bb, code, lhs, rhs, value);
4887 if (!value)
4888 return;
4890 /* Record dominated conditions if the condition is true. Note that
4891 the inversion is already recorded. */
4892 switch (code)
4894 case LT_EXPR:
4895 case GT_EXPR:
4896 record_cond (bb, code == LT_EXPR ? LE_EXPR : GE_EXPR, lhs, rhs, true);
4897 record_cond (bb, NE_EXPR, lhs, rhs, true);
4898 record_cond (bb, EQ_EXPR, lhs, rhs, false);
4899 break;
4901 case EQ_EXPR:
4902 record_cond (bb, LE_EXPR, lhs, rhs, true);
4903 record_cond (bb, GE_EXPR, lhs, rhs, true);
4904 record_cond (bb, LT_EXPR, lhs, rhs, false);
4905 record_cond (bb, GT_EXPR, lhs, rhs, false);
4906 break;
4908 default:
4909 break;
4913 /* Restore expressions and values derived from conditionals. */
4915 void
4916 sccvn_dom_walker::after_dom_children (basic_block bb)
4918 while (!cond_stack.is_empty ()
4919 && cond_stack.last ().first == bb)
4921 vn_nary_op_t cond = cond_stack.last ().second.first;
4922 vn_nary_op_t old = cond_stack.last ().second.second;
4923 current_info->nary->remove_elt_with_hash (cond, cond->hashcode);
4924 if (old)
4925 vn_nary_op_insert_into (old, current_info->nary, false);
4926 cond_stack.pop ();
4930 /* Value number all statements in BB. */
4932 edge
4933 sccvn_dom_walker::before_dom_children (basic_block bb)
4935 if (dump_file && (dump_flags & TDF_DETAILS))
4936 fprintf (dump_file, "Visiting BB %d\n", bb->index);
4938 /* If we have a single predecessor record the equivalence from a
4939 possible condition on the predecessor edge. */
4940 edge pred_e = single_pred_edge_ignoring_loop_edges (bb, false);
4941 if (pred_e)
4943 /* Check if there are multiple executable successor edges in
4944 the source block. Otherwise there is no additional info
4945 to be recorded. */
4946 edge_iterator ei;
4947 edge e2;
4948 FOR_EACH_EDGE (e2, ei, pred_e->src->succs)
4949 if (e2 != pred_e
4950 && e2->flags & EDGE_EXECUTABLE)
4951 break;
4952 if (e2 && (e2->flags & EDGE_EXECUTABLE))
4954 gimple *stmt = last_stmt (pred_e->src);
4955 if (stmt
4956 && gimple_code (stmt) == GIMPLE_COND)
4958 enum tree_code code = gimple_cond_code (stmt);
4959 tree lhs = gimple_cond_lhs (stmt);
4960 tree rhs = gimple_cond_rhs (stmt);
4961 record_conds (bb, code, lhs, rhs,
4962 (pred_e->flags & EDGE_TRUE_VALUE) != 0);
4963 code = invert_tree_comparison (code, HONOR_NANS (lhs));
4964 if (code != ERROR_MARK)
4965 record_conds (bb, code, lhs, rhs,
4966 (pred_e->flags & EDGE_TRUE_VALUE) == 0);
4971 /* Value-number all defs in the basic-block. */
4972 for (gphi_iterator gsi = gsi_start_phis (bb);
4973 !gsi_end_p (gsi); gsi_next (&gsi))
4975 gphi *phi = gsi.phi ();
4976 tree res = PHI_RESULT (phi);
4977 if (!VN_INFO (res)->visited)
4978 DFS (res);
4980 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
4981 !gsi_end_p (gsi); gsi_next (&gsi))
4983 ssa_op_iter i;
4984 tree op;
4985 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
4986 if (!VN_INFO (op)->visited)
4987 DFS (op);
4990 /* Finally look at the last stmt. */
4991 gimple *stmt = last_stmt (bb);
4992 if (!stmt)
4993 return NULL;
4995 enum gimple_code code = gimple_code (stmt);
4996 if (code != GIMPLE_COND
4997 && code != GIMPLE_SWITCH
4998 && code != GIMPLE_GOTO)
4999 return NULL;
5001 if (dump_file && (dump_flags & TDF_DETAILS))
5003 fprintf (dump_file, "Visiting control stmt ending BB %d: ", bb->index);
5004 print_gimple_stmt (dump_file, stmt, 0);
5007 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
5008 if value-numbering can prove they are not reachable. Handling
5009 computed gotos is also possible. */
5010 tree val;
5011 switch (code)
5013 case GIMPLE_COND:
5015 tree lhs = vn_valueize (gimple_cond_lhs (stmt));
5016 tree rhs = vn_valueize (gimple_cond_rhs (stmt));
5017 val = gimple_simplify (gimple_cond_code (stmt),
5018 boolean_type_node, lhs, rhs,
5019 NULL, vn_valueize);
5020 /* If that didn't simplify to a constant see if we have recorded
5021 temporary expressions from taken edges. */
5022 if (!val || TREE_CODE (val) != INTEGER_CST)
5024 tree ops[2];
5025 ops[0] = lhs;
5026 ops[1] = rhs;
5027 val = vn_nary_op_lookup_pieces (2, gimple_cond_code (stmt),
5028 boolean_type_node, ops, NULL);
5030 break;
5032 case GIMPLE_SWITCH:
5033 val = gimple_switch_index (as_a <gswitch *> (stmt));
5034 break;
5035 case GIMPLE_GOTO:
5036 val = gimple_goto_dest (stmt);
5037 break;
5038 default:
5039 gcc_unreachable ();
5041 if (!val)
5042 return NULL;
5044 edge taken = find_taken_edge (bb, vn_valueize (val));
5045 if (!taken)
5046 return NULL;
5048 if (dump_file && (dump_flags & TDF_DETAILS))
5049 fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
5050 "not executable\n", bb->index, bb->index, taken->dest->index);
5052 return taken;
5055 /* Do SCCVN. Returns true if it finished, false if we bailed out
5056 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
5057 how we use the alias oracle walking during the VN process. */
5059 void
5060 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
5062 size_t i;
5064 default_vn_walk_kind = default_vn_walk_kind_;
5066 init_scc_vn ();
5068 /* Collect pointers we know point to readonly memory. */
5069 const_parms = BITMAP_ALLOC (NULL);
5070 tree fnspec = lookup_attribute ("fn spec",
5071 TYPE_ATTRIBUTES (TREE_TYPE (cfun->decl)));
5072 if (fnspec)
5074 fnspec = TREE_VALUE (TREE_VALUE (fnspec));
5075 i = 1;
5076 for (tree arg = DECL_ARGUMENTS (cfun->decl);
5077 arg; arg = DECL_CHAIN (arg), ++i)
5079 if (i >= (unsigned) TREE_STRING_LENGTH (fnspec))
5080 break;
5081 if (TREE_STRING_POINTER (fnspec)[i] == 'R'
5082 || TREE_STRING_POINTER (fnspec)[i] == 'r')
5084 tree name = ssa_default_def (cfun, arg);
5085 if (name)
5086 bitmap_set_bit (const_parms, SSA_NAME_VERSION (name));
5091 /* Walk all blocks in dominator order, value-numbering stmts
5092 SSA defs and decide whether outgoing edges are not executable. */
5093 sccvn_dom_walker walker;
5094 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5096 /* Initialize the value ids and prune out remaining VN_TOPs
5097 from dead code. */
5098 tree name;
5099 FOR_EACH_SSA_NAME (i, name, cfun)
5101 vn_ssa_aux_t info = VN_INFO (name);
5102 if (!info->visited
5103 || info->valnum == VN_TOP)
5104 info->valnum = name;
5105 if (info->valnum == name)
5106 info->value_id = get_next_value_id ();
5107 else if (is_gimple_min_invariant (info->valnum))
5108 info->value_id = get_or_alloc_constant_value_id (info->valnum);
5111 /* Propagate. */
5112 FOR_EACH_SSA_NAME (i, name, cfun)
5114 vn_ssa_aux_t info = VN_INFO (name);
5115 if (TREE_CODE (info->valnum) == SSA_NAME
5116 && info->valnum != name
5117 && info->value_id != VN_INFO (info->valnum)->value_id)
5118 info->value_id = VN_INFO (info->valnum)->value_id;
5121 set_hashtable_value_ids ();
5123 if (dump_file && (dump_flags & TDF_DETAILS))
5125 fprintf (dump_file, "Value numbers:\n");
5126 FOR_EACH_SSA_NAME (i, name, cfun)
5128 if (VN_INFO (name)->visited
5129 && SSA_VAL (name) != name)
5131 print_generic_expr (dump_file, name);
5132 fprintf (dump_file, " = ");
5133 print_generic_expr (dump_file, SSA_VAL (name));
5134 fprintf (dump_file, "\n");
5140 /* Return the maximum value id we have ever seen. */
5142 unsigned int
5143 get_max_value_id (void)
5145 return next_value_id;
5148 /* Return the next unique value id. */
5150 unsigned int
5151 get_next_value_id (void)
5153 return next_value_id++;
5157 /* Compare two expressions E1 and E2 and return true if they are equal. */
5159 bool
5160 expressions_equal_p (tree e1, tree e2)
5162 /* The obvious case. */
5163 if (e1 == e2)
5164 return true;
5166 /* If either one is VN_TOP consider them equal. */
5167 if (e1 == VN_TOP || e2 == VN_TOP)
5168 return true;
5170 /* If only one of them is null, they cannot be equal. */
5171 if (!e1 || !e2)
5172 return false;
5174 /* Now perform the actual comparison. */
5175 if (TREE_CODE (e1) == TREE_CODE (e2)
5176 && operand_equal_p (e1, e2, OEP_PURE_SAME))
5177 return true;
5179 return false;
5183 /* Return true if the nary operation NARY may trap. This is a copy
5184 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
5186 bool
5187 vn_nary_may_trap (vn_nary_op_t nary)
5189 tree type;
5190 tree rhs2 = NULL_TREE;
5191 bool honor_nans = false;
5192 bool honor_snans = false;
5193 bool fp_operation = false;
5194 bool honor_trapv = false;
5195 bool handled, ret;
5196 unsigned i;
5198 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
5199 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
5200 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
5202 type = nary->type;
5203 fp_operation = FLOAT_TYPE_P (type);
5204 if (fp_operation)
5206 honor_nans = flag_trapping_math && !flag_finite_math_only;
5207 honor_snans = flag_signaling_nans != 0;
5209 else if (INTEGRAL_TYPE_P (type)
5210 && TYPE_OVERFLOW_TRAPS (type))
5211 honor_trapv = true;
5213 if (nary->length >= 2)
5214 rhs2 = nary->op[1];
5215 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
5216 honor_trapv,
5217 honor_nans, honor_snans, rhs2,
5218 &handled);
5219 if (handled
5220 && ret)
5221 return true;
5223 for (i = 0; i < nary->length; ++i)
5224 if (tree_could_trap_p (nary->op[i]))
5225 return true;
5227 return false;
5231 class eliminate_dom_walker : public dom_walker
5233 public:
5234 eliminate_dom_walker (cdi_direction, bitmap);
5235 ~eliminate_dom_walker ();
5237 virtual edge before_dom_children (basic_block);
5238 virtual void after_dom_children (basic_block);
5240 tree eliminate_avail (tree op);
5241 void eliminate_push_avail (tree op);
5242 tree eliminate_insert (gimple_stmt_iterator *gsi, tree val);
5244 bool do_pre;
5245 unsigned int el_todo;
5246 unsigned int eliminations;
5247 unsigned int insertions;
5249 /* SSA names that had their defs inserted by PRE if do_pre. */
5250 bitmap inserted_exprs;
5252 /* Blocks with statements that have had their EH properties changed. */
5253 bitmap need_eh_cleanup;
5255 /* Blocks with statements that have had their AB properties changed. */
5256 bitmap need_ab_cleanup;
5258 auto_vec<gimple *> to_remove;
5259 auto_vec<gimple *> to_fixup;
5260 auto_vec<tree> avail;
5261 auto_vec<tree> avail_stack;
5264 eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction,
5265 bitmap inserted_exprs_)
5266 : dom_walker (direction), do_pre (inserted_exprs_ != NULL),
5267 el_todo (0), eliminations (0), insertions (0),
5268 inserted_exprs (inserted_exprs_)
5270 need_eh_cleanup = BITMAP_ALLOC (NULL);
5271 need_ab_cleanup = BITMAP_ALLOC (NULL);
5274 eliminate_dom_walker::~eliminate_dom_walker ()
5276 BITMAP_FREE (need_eh_cleanup);
5277 BITMAP_FREE (need_ab_cleanup);
5280 /* Return a leader for OP that is available at the current point of the
5281 eliminate domwalk. */
5283 tree
5284 eliminate_dom_walker::eliminate_avail (tree op)
5286 tree valnum = VN_INFO (op)->valnum;
5287 if (TREE_CODE (valnum) == SSA_NAME)
5289 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
5290 return valnum;
5291 if (avail.length () > SSA_NAME_VERSION (valnum))
5292 return avail[SSA_NAME_VERSION (valnum)];
5294 else if (is_gimple_min_invariant (valnum))
5295 return valnum;
5296 return NULL_TREE;
5299 /* At the current point of the eliminate domwalk make OP available. */
5301 void
5302 eliminate_dom_walker::eliminate_push_avail (tree op)
5304 tree valnum = VN_INFO (op)->valnum;
5305 if (TREE_CODE (valnum) == SSA_NAME)
5307 if (avail.length () <= SSA_NAME_VERSION (valnum))
5308 avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1);
5309 tree pushop = op;
5310 if (avail[SSA_NAME_VERSION (valnum)])
5311 pushop = avail[SSA_NAME_VERSION (valnum)];
5312 avail_stack.safe_push (pushop);
5313 avail[SSA_NAME_VERSION (valnum)] = op;
5317 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
5318 the leader for the expression if insertion was successful. */
5320 tree
5321 eliminate_dom_walker::eliminate_insert (gimple_stmt_iterator *gsi, tree val)
5323 /* We can insert a sequence with a single assignment only. */
5324 gimple_seq stmts = VN_INFO (val)->expr;
5325 if (!gimple_seq_singleton_p (stmts))
5326 return NULL_TREE;
5327 gassign *stmt = dyn_cast <gassign *> (gimple_seq_first_stmt (stmts));
5328 if (!stmt
5329 || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
5330 && gimple_assign_rhs_code (stmt) != VIEW_CONVERT_EXPR
5331 && gimple_assign_rhs_code (stmt) != BIT_FIELD_REF
5332 && (gimple_assign_rhs_code (stmt) != BIT_AND_EXPR
5333 || TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)))
5334 return NULL_TREE;
5336 tree op = gimple_assign_rhs1 (stmt);
5337 if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR
5338 || gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5339 op = TREE_OPERAND (op, 0);
5340 tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (op) : op;
5341 if (!leader)
5342 return NULL_TREE;
5344 tree res;
5345 stmts = NULL;
5346 if (gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5347 res = gimple_build (&stmts, BIT_FIELD_REF,
5348 TREE_TYPE (val), leader,
5349 TREE_OPERAND (gimple_assign_rhs1 (stmt), 1),
5350 TREE_OPERAND (gimple_assign_rhs1 (stmt), 2));
5351 else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
5352 res = gimple_build (&stmts, BIT_AND_EXPR,
5353 TREE_TYPE (val), leader, gimple_assign_rhs2 (stmt));
5354 else
5355 res = gimple_build (&stmts, gimple_assign_rhs_code (stmt),
5356 TREE_TYPE (val), leader);
5357 if (TREE_CODE (res) != SSA_NAME
5358 || SSA_NAME_IS_DEFAULT_DEF (res)
5359 || gimple_bb (SSA_NAME_DEF_STMT (res)))
5361 gimple_seq_discard (stmts);
5363 /* During propagation we have to treat SSA info conservatively
5364 and thus we can end up simplifying the inserted expression
5365 at elimination time to sth not defined in stmts. */
5366 /* But then this is a redundancy we failed to detect. Which means
5367 res now has two values. That doesn't play well with how
5368 we track availability here, so give up. */
5369 if (dump_file && (dump_flags & TDF_DETAILS))
5371 if (TREE_CODE (res) == SSA_NAME)
5372 res = eliminate_avail (res);
5373 if (res)
5375 fprintf (dump_file, "Failed to insert expression for value ");
5376 print_generic_expr (dump_file, val);
5377 fprintf (dump_file, " which is really fully redundant to ");
5378 print_generic_expr (dump_file, res);
5379 fprintf (dump_file, "\n");
5383 return NULL_TREE;
5385 else
5387 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
5388 VN_INFO_GET (res)->valnum = val;
5391 insertions++;
5392 if (dump_file && (dump_flags & TDF_DETAILS))
5394 fprintf (dump_file, "Inserted ");
5395 print_gimple_stmt (dump_file, SSA_NAME_DEF_STMT (res), 0);
5398 return res;
5403 /* Perform elimination for the basic-block B during the domwalk. */
5405 edge
5406 eliminate_dom_walker::before_dom_children (basic_block b)
5408 /* Mark new bb. */
5409 avail_stack.safe_push (NULL_TREE);
5411 /* Skip unreachable blocks marked unreachable during the SCCVN domwalk. */
5412 edge_iterator ei;
5413 edge e;
5414 FOR_EACH_EDGE (e, ei, b->preds)
5415 if (e->flags & EDGE_EXECUTABLE)
5416 break;
5417 if (! e)
5418 return NULL;
5420 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
5422 gphi *phi = gsi.phi ();
5423 tree res = PHI_RESULT (phi);
5425 if (virtual_operand_p (res))
5427 gsi_next (&gsi);
5428 continue;
5431 tree sprime = eliminate_avail (res);
5432 if (sprime
5433 && sprime != res)
5435 if (dump_file && (dump_flags & TDF_DETAILS))
5437 fprintf (dump_file, "Replaced redundant PHI node defining ");
5438 print_generic_expr (dump_file, res);
5439 fprintf (dump_file, " with ");
5440 print_generic_expr (dump_file, sprime);
5441 fprintf (dump_file, "\n");
5444 /* If we inserted this PHI node ourself, it's not an elimination. */
5445 if (! inserted_exprs
5446 || ! bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
5447 eliminations++;
5449 /* If we will propagate into all uses don't bother to do
5450 anything. */
5451 if (may_propagate_copy (res, sprime))
5453 /* Mark the PHI for removal. */
5454 to_remove.safe_push (phi);
5455 gsi_next (&gsi);
5456 continue;
5459 remove_phi_node (&gsi, false);
5461 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
5462 sprime = fold_convert (TREE_TYPE (res), sprime);
5463 gimple *stmt = gimple_build_assign (res, sprime);
5464 gimple_stmt_iterator gsi2 = gsi_after_labels (b);
5465 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
5466 continue;
5469 eliminate_push_avail (res);
5470 gsi_next (&gsi);
5473 for (gimple_stmt_iterator gsi = gsi_start_bb (b);
5474 !gsi_end_p (gsi);
5475 gsi_next (&gsi))
5477 tree sprime = NULL_TREE;
5478 gimple *stmt = gsi_stmt (gsi);
5479 tree lhs = gimple_get_lhs (stmt);
5480 if (lhs && TREE_CODE (lhs) == SSA_NAME
5481 && !gimple_has_volatile_ops (stmt)
5482 /* See PR43491. Do not replace a global register variable when
5483 it is a the RHS of an assignment. Do replace local register
5484 variables since gcc does not guarantee a local variable will
5485 be allocated in register.
5486 ??? The fix isn't effective here. This should instead
5487 be ensured by not value-numbering them the same but treating
5488 them like volatiles? */
5489 && !(gimple_assign_single_p (stmt)
5490 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
5491 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))
5492 && is_global_var (gimple_assign_rhs1 (stmt)))))
5494 sprime = eliminate_avail (lhs);
5495 if (!sprime)
5497 /* If there is no existing usable leader but SCCVN thinks
5498 it has an expression it wants to use as replacement,
5499 insert that. */
5500 tree val = VN_INFO (lhs)->valnum;
5501 if (val != VN_TOP
5502 && TREE_CODE (val) == SSA_NAME
5503 && VN_INFO (val)->needs_insertion
5504 && VN_INFO (val)->expr != NULL
5505 && (sprime = eliminate_insert (&gsi, val)) != NULL_TREE)
5506 eliminate_push_avail (sprime);
5509 /* If this now constitutes a copy duplicate points-to
5510 and range info appropriately. This is especially
5511 important for inserted code. See tree-ssa-copy.c
5512 for similar code. */
5513 if (sprime
5514 && TREE_CODE (sprime) == SSA_NAME)
5516 basic_block sprime_b = gimple_bb (SSA_NAME_DEF_STMT (sprime));
5517 if (POINTER_TYPE_P (TREE_TYPE (lhs))
5518 && VN_INFO_PTR_INFO (lhs)
5519 && ! VN_INFO_PTR_INFO (sprime))
5521 duplicate_ssa_name_ptr_info (sprime,
5522 VN_INFO_PTR_INFO (lhs));
5523 if (b != sprime_b)
5524 mark_ptr_info_alignment_unknown
5525 (SSA_NAME_PTR_INFO (sprime));
5527 else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
5528 && VN_INFO_RANGE_INFO (lhs)
5529 && ! VN_INFO_RANGE_INFO (sprime)
5530 && b == sprime_b)
5531 duplicate_ssa_name_range_info (sprime,
5532 VN_INFO_RANGE_TYPE (lhs),
5533 VN_INFO_RANGE_INFO (lhs));
5536 /* Inhibit the use of an inserted PHI on a loop header when
5537 the address of the memory reference is a simple induction
5538 variable. In other cases the vectorizer won't do anything
5539 anyway (either it's loop invariant or a complicated
5540 expression). */
5541 if (sprime
5542 && TREE_CODE (sprime) == SSA_NAME
5543 && do_pre
5544 && (flag_tree_loop_vectorize || flag_tree_parallelize_loops > 1)
5545 && loop_outer (b->loop_father)
5546 && has_zero_uses (sprime)
5547 && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
5548 && gimple_assign_load_p (stmt))
5550 gimple *def_stmt = SSA_NAME_DEF_STMT (sprime);
5551 basic_block def_bb = gimple_bb (def_stmt);
5552 if (gimple_code (def_stmt) == GIMPLE_PHI
5553 && def_bb->loop_father->header == def_bb)
5555 loop_p loop = def_bb->loop_father;
5556 ssa_op_iter iter;
5557 tree op;
5558 bool found = false;
5559 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5561 affine_iv iv;
5562 def_bb = gimple_bb (SSA_NAME_DEF_STMT (op));
5563 if (def_bb
5564 && flow_bb_inside_loop_p (loop, def_bb)
5565 && simple_iv (loop, loop, op, &iv, true))
5567 found = true;
5568 break;
5571 if (found)
5573 if (dump_file && (dump_flags & TDF_DETAILS))
5575 fprintf (dump_file, "Not replacing ");
5576 print_gimple_expr (dump_file, stmt, 0);
5577 fprintf (dump_file, " with ");
5578 print_generic_expr (dump_file, sprime);
5579 fprintf (dump_file, " which would add a loop"
5580 " carried dependence to loop %d\n",
5581 loop->num);
5583 /* Don't keep sprime available. */
5584 sprime = NULL_TREE;
5589 if (sprime)
5591 /* If we can propagate the value computed for LHS into
5592 all uses don't bother doing anything with this stmt. */
5593 if (may_propagate_copy (lhs, sprime))
5595 /* Mark it for removal. */
5596 to_remove.safe_push (stmt);
5598 /* ??? Don't count copy/constant propagations. */
5599 if (gimple_assign_single_p (stmt)
5600 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5601 || gimple_assign_rhs1 (stmt) == sprime))
5602 continue;
5604 if (dump_file && (dump_flags & TDF_DETAILS))
5606 fprintf (dump_file, "Replaced ");
5607 print_gimple_expr (dump_file, stmt, 0);
5608 fprintf (dump_file, " with ");
5609 print_generic_expr (dump_file, sprime);
5610 fprintf (dump_file, " in all uses of ");
5611 print_gimple_stmt (dump_file, stmt, 0);
5614 eliminations++;
5615 continue;
5618 /* If this is an assignment from our leader (which
5619 happens in the case the value-number is a constant)
5620 then there is nothing to do. */
5621 if (gimple_assign_single_p (stmt)
5622 && sprime == gimple_assign_rhs1 (stmt))
5623 continue;
5625 /* Else replace its RHS. */
5626 bool can_make_abnormal_goto
5627 = is_gimple_call (stmt)
5628 && stmt_can_make_abnormal_goto (stmt);
5630 if (dump_file && (dump_flags & TDF_DETAILS))
5632 fprintf (dump_file, "Replaced ");
5633 print_gimple_expr (dump_file, stmt, 0);
5634 fprintf (dump_file, " with ");
5635 print_generic_expr (dump_file, sprime);
5636 fprintf (dump_file, " in ");
5637 print_gimple_stmt (dump_file, stmt, 0);
5640 eliminations++;
5641 gimple *orig_stmt = stmt;
5642 if (!useless_type_conversion_p (TREE_TYPE (lhs),
5643 TREE_TYPE (sprime)))
5644 sprime = fold_convert (TREE_TYPE (lhs), sprime);
5645 tree vdef = gimple_vdef (stmt);
5646 tree vuse = gimple_vuse (stmt);
5647 propagate_tree_value_into_stmt (&gsi, sprime);
5648 stmt = gsi_stmt (gsi);
5649 update_stmt (stmt);
5650 if (vdef != gimple_vdef (stmt))
5651 VN_INFO (vdef)->valnum = vuse;
5653 /* If we removed EH side-effects from the statement, clean
5654 its EH information. */
5655 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
5657 bitmap_set_bit (need_eh_cleanup,
5658 gimple_bb (stmt)->index);
5659 if (dump_file && (dump_flags & TDF_DETAILS))
5660 fprintf (dump_file, " Removed EH side-effects.\n");
5663 /* Likewise for AB side-effects. */
5664 if (can_make_abnormal_goto
5665 && !stmt_can_make_abnormal_goto (stmt))
5667 bitmap_set_bit (need_ab_cleanup,
5668 gimple_bb (stmt)->index);
5669 if (dump_file && (dump_flags & TDF_DETAILS))
5670 fprintf (dump_file, " Removed AB side-effects.\n");
5673 continue;
5677 /* If the statement is a scalar store, see if the expression
5678 has the same value number as its rhs. If so, the store is
5679 dead. */
5680 if (gimple_assign_single_p (stmt)
5681 && !gimple_has_volatile_ops (stmt)
5682 && !is_gimple_reg (gimple_assign_lhs (stmt))
5683 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5684 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
5686 tree val;
5687 tree rhs = gimple_assign_rhs1 (stmt);
5688 vn_reference_t vnresult;
5689 val = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_WALKREWRITE,
5690 &vnresult, false);
5691 if (TREE_CODE (rhs) == SSA_NAME)
5692 rhs = VN_INFO (rhs)->valnum;
5693 if (val
5694 && operand_equal_p (val, rhs, 0))
5696 /* We can only remove the later store if the former aliases
5697 at least all accesses the later one does or if the store
5698 was to readonly memory storing the same value. */
5699 alias_set_type set = get_alias_set (lhs);
5700 if (! vnresult
5701 || vnresult->set == set
5702 || alias_set_subset_of (set, vnresult->set))
5704 if (dump_file && (dump_flags & TDF_DETAILS))
5706 fprintf (dump_file, "Deleted redundant store ");
5707 print_gimple_stmt (dump_file, stmt, 0);
5710 /* Queue stmt for removal. */
5711 to_remove.safe_push (stmt);
5712 continue;
5717 /* If this is a control statement value numbering left edges
5718 unexecuted on force the condition in a way consistent with
5719 that. */
5720 if (gcond *cond = dyn_cast <gcond *> (stmt))
5722 if ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE)
5723 ^ (EDGE_SUCC (b, 1)->flags & EDGE_EXECUTABLE))
5725 if (dump_file && (dump_flags & TDF_DETAILS))
5727 fprintf (dump_file, "Removing unexecutable edge from ");
5728 print_gimple_stmt (dump_file, stmt, 0);
5730 if (((EDGE_SUCC (b, 0)->flags & EDGE_TRUE_VALUE) != 0)
5731 == ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE) != 0))
5732 gimple_cond_make_true (cond);
5733 else
5734 gimple_cond_make_false (cond);
5735 update_stmt (cond);
5736 el_todo |= TODO_cleanup_cfg;
5737 continue;
5741 bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
5742 bool was_noreturn = (is_gimple_call (stmt)
5743 && gimple_call_noreturn_p (stmt));
5744 tree vdef = gimple_vdef (stmt);
5745 tree vuse = gimple_vuse (stmt);
5747 /* If we didn't replace the whole stmt (or propagate the result
5748 into all uses), replace all uses on this stmt with their
5749 leaders. */
5750 bool modified = false;
5751 use_operand_p use_p;
5752 ssa_op_iter iter;
5753 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5755 tree use = USE_FROM_PTR (use_p);
5756 /* ??? The call code above leaves stmt operands un-updated. */
5757 if (TREE_CODE (use) != SSA_NAME)
5758 continue;
5759 tree sprime = eliminate_avail (use);
5760 if (sprime && sprime != use
5761 && may_propagate_copy (use, sprime)
5762 /* We substitute into debug stmts to avoid excessive
5763 debug temporaries created by removed stmts, but we need
5764 to avoid doing so for inserted sprimes as we never want
5765 to create debug temporaries for them. */
5766 && (!inserted_exprs
5767 || TREE_CODE (sprime) != SSA_NAME
5768 || !is_gimple_debug (stmt)
5769 || !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
5771 propagate_value (use_p, sprime);
5772 modified = true;
5776 /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
5777 into which is a requirement for the IPA devirt machinery. */
5778 gimple *old_stmt = stmt;
5779 if (modified)
5781 /* If a formerly non-invariant ADDR_EXPR is turned into an
5782 invariant one it was on a separate stmt. */
5783 if (gimple_assign_single_p (stmt)
5784 && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
5785 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
5786 gimple_stmt_iterator prev = gsi;
5787 gsi_prev (&prev);
5788 if (fold_stmt (&gsi))
5790 /* fold_stmt may have created new stmts inbetween
5791 the previous stmt and the folded stmt. Mark
5792 all defs created there as varying to not confuse
5793 the SCCVN machinery as we're using that even during
5794 elimination. */
5795 if (gsi_end_p (prev))
5796 prev = gsi_start_bb (b);
5797 else
5798 gsi_next (&prev);
5799 if (gsi_stmt (prev) != gsi_stmt (gsi))
5802 tree def;
5803 ssa_op_iter dit;
5804 FOR_EACH_SSA_TREE_OPERAND (def, gsi_stmt (prev),
5805 dit, SSA_OP_ALL_DEFS)
5806 /* As existing DEFs may move between stmts
5807 we have to guard VN_INFO_GET. */
5808 if (! has_VN_INFO (def))
5809 VN_INFO_GET (def)->valnum = def;
5810 if (gsi_stmt (prev) == gsi_stmt (gsi))
5811 break;
5812 gsi_next (&prev);
5814 while (1);
5816 stmt = gsi_stmt (gsi);
5817 /* In case we folded the stmt away schedule the NOP for removal. */
5818 if (gimple_nop_p (stmt))
5819 to_remove.safe_push (stmt);
5822 /* Visit indirect calls and turn them into direct calls if
5823 possible using the devirtualization machinery. Do this before
5824 checking for required EH/abnormal/noreturn cleanup as devird
5825 may expose more of those. */
5826 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
5828 tree fn = gimple_call_fn (call_stmt);
5829 if (fn
5830 && flag_devirtualize
5831 && virtual_method_call_p (fn))
5833 tree otr_type = obj_type_ref_class (fn);
5834 unsigned HOST_WIDE_INT otr_tok
5835 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn));
5836 tree instance;
5837 ipa_polymorphic_call_context context (current_function_decl,
5838 fn, stmt, &instance);
5839 context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn),
5840 otr_type, stmt);
5841 bool final;
5842 vec <cgraph_node *> targets
5843 = possible_polymorphic_call_targets (obj_type_ref_class (fn),
5844 otr_tok, context, &final);
5845 if (dump_file)
5846 dump_possible_polymorphic_call_targets (dump_file,
5847 obj_type_ref_class (fn),
5848 otr_tok, context);
5849 if (final && targets.length () <= 1 && dbg_cnt (devirt))
5851 tree fn;
5852 if (targets.length () == 1)
5853 fn = targets[0]->decl;
5854 else
5855 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5856 if (dump_enabled_p ())
5858 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
5859 "converting indirect call to "
5860 "function %s\n",
5861 lang_hooks.decl_printable_name (fn, 2));
5863 gimple_call_set_fndecl (call_stmt, fn);
5864 /* If changing the call to __builtin_unreachable
5865 or similar noreturn function, adjust gimple_call_fntype
5866 too. */
5867 if (gimple_call_noreturn_p (call_stmt)
5868 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
5869 && TYPE_ARG_TYPES (TREE_TYPE (fn))
5870 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn)))
5871 == void_type_node))
5872 gimple_call_set_fntype (call_stmt, TREE_TYPE (fn));
5873 maybe_remove_unused_call_args (cfun, call_stmt);
5874 modified = true;
5879 if (modified)
5881 /* When changing a call into a noreturn call, cfg cleanup
5882 is needed to fix up the noreturn call. */
5883 if (!was_noreturn
5884 && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
5885 to_fixup.safe_push (stmt);
5886 /* When changing a condition or switch into one we know what
5887 edge will be executed, schedule a cfg cleanup. */
5888 if ((gimple_code (stmt) == GIMPLE_COND
5889 && (gimple_cond_true_p (as_a <gcond *> (stmt))
5890 || gimple_cond_false_p (as_a <gcond *> (stmt))))
5891 || (gimple_code (stmt) == GIMPLE_SWITCH
5892 && TREE_CODE (gimple_switch_index
5893 (as_a <gswitch *> (stmt))) == INTEGER_CST))
5894 el_todo |= TODO_cleanup_cfg;
5895 /* If we removed EH side-effects from the statement, clean
5896 its EH information. */
5897 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
5899 bitmap_set_bit (need_eh_cleanup,
5900 gimple_bb (stmt)->index);
5901 if (dump_file && (dump_flags & TDF_DETAILS))
5902 fprintf (dump_file, " Removed EH side-effects.\n");
5904 /* Likewise for AB side-effects. */
5905 if (can_make_abnormal_goto
5906 && !stmt_can_make_abnormal_goto (stmt))
5908 bitmap_set_bit (need_ab_cleanup,
5909 gimple_bb (stmt)->index);
5910 if (dump_file && (dump_flags & TDF_DETAILS))
5911 fprintf (dump_file, " Removed AB side-effects.\n");
5913 update_stmt (stmt);
5914 if (vdef != gimple_vdef (stmt))
5915 VN_INFO (vdef)->valnum = vuse;
5918 /* Make new values available - for fully redundant LHS we
5919 continue with the next stmt above and skip this. */
5920 def_operand_p defp;
5921 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
5922 eliminate_push_avail (DEF_FROM_PTR (defp));
5925 /* Replace destination PHI arguments. */
5926 FOR_EACH_EDGE (e, ei, b->succs)
5927 if (e->flags & EDGE_EXECUTABLE)
5928 for (gphi_iterator gsi = gsi_start_phis (e->dest);
5929 !gsi_end_p (gsi);
5930 gsi_next (&gsi))
5932 gphi *phi = gsi.phi ();
5933 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
5934 tree arg = USE_FROM_PTR (use_p);
5935 if (TREE_CODE (arg) != SSA_NAME
5936 || virtual_operand_p (arg))
5937 continue;
5938 tree sprime = eliminate_avail (arg);
5939 if (sprime && may_propagate_copy (arg, sprime))
5940 propagate_value (use_p, sprime);
5942 return NULL;
5945 /* Make no longer available leaders no longer available. */
5947 void
5948 eliminate_dom_walker::after_dom_children (basic_block)
5950 tree entry;
5951 while ((entry = avail_stack.pop ()) != NULL_TREE)
5953 tree valnum = VN_INFO (entry)->valnum;
5954 tree old = avail[SSA_NAME_VERSION (valnum)];
5955 if (old == entry)
5956 avail[SSA_NAME_VERSION (valnum)] = NULL_TREE;
5957 else
5958 avail[SSA_NAME_VERSION (valnum)] = entry;
5962 /* Eliminate fully redundant computations. */
5964 unsigned int
5965 vn_eliminate (bitmap inserted_exprs)
5967 eliminate_dom_walker el (CDI_DOMINATORS, inserted_exprs);
5968 el.avail.reserve (num_ssa_names);
5970 el.walk (cfun->cfg->x_entry_block_ptr);
5972 /* We cannot remove stmts during BB walk, especially not release SSA
5973 names there as this confuses the VN machinery. The stmts ending
5974 up in to_remove are either stores or simple copies.
5975 Remove stmts in reverse order to make debug stmt creation possible. */
5976 while (!el.to_remove.is_empty ())
5978 gimple *stmt = el.to_remove.pop ();
5980 if (dump_file && (dump_flags & TDF_DETAILS))
5982 fprintf (dump_file, "Removing dead stmt ");
5983 print_gimple_stmt (dump_file, stmt, 0, TDF_NONE);
5986 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5987 if (gimple_code (stmt) == GIMPLE_PHI)
5988 remove_phi_node (&gsi, true);
5989 else
5991 basic_block bb = gimple_bb (stmt);
5992 unlink_stmt_vdef (stmt);
5993 if (gsi_remove (&gsi, true))
5994 bitmap_set_bit (el.need_eh_cleanup, bb->index);
5995 if (is_gimple_call (stmt) && stmt_can_make_abnormal_goto (stmt))
5996 bitmap_set_bit (el.need_ab_cleanup, bb->index);
5997 release_defs (stmt);
6000 /* Removing a stmt may expose a forwarder block. */
6001 el.el_todo |= TODO_cleanup_cfg;
6004 /* Fixup stmts that became noreturn calls. This may require splitting
6005 blocks and thus isn't possible during the dominator walk. Do this
6006 in reverse order so we don't inadvertedly remove a stmt we want to
6007 fixup by visiting a dominating now noreturn call first. */
6008 while (!el.to_fixup.is_empty ())
6010 gimple *stmt = el.to_fixup.pop ();
6012 if (dump_file && (dump_flags & TDF_DETAILS))
6014 fprintf (dump_file, "Fixing up noreturn call ");
6015 print_gimple_stmt (dump_file, stmt, 0);
6018 if (fixup_noreturn_call (stmt))
6019 el.el_todo |= TODO_cleanup_cfg;
6022 bool do_eh_cleanup = !bitmap_empty_p (el.need_eh_cleanup);
6023 bool do_ab_cleanup = !bitmap_empty_p (el.need_ab_cleanup);
6025 if (do_eh_cleanup)
6026 gimple_purge_all_dead_eh_edges (el.need_eh_cleanup);
6028 if (do_ab_cleanup)
6029 gimple_purge_all_dead_abnormal_call_edges (el.need_ab_cleanup);
6031 if (do_eh_cleanup || do_ab_cleanup)
6032 el.el_todo |= TODO_cleanup_cfg;
6034 statistics_counter_event (cfun, "Eliminated", el.eliminations);
6035 statistics_counter_event (cfun, "Insertions", el.insertions);
6037 return el.el_todo;
6041 namespace {
6043 const pass_data pass_data_fre =
6045 GIMPLE_PASS, /* type */
6046 "fre", /* name */
6047 OPTGROUP_NONE, /* optinfo_flags */
6048 TV_TREE_FRE, /* tv_id */
6049 ( PROP_cfg | PROP_ssa ), /* properties_required */
6050 0, /* properties_provided */
6051 0, /* properties_destroyed */
6052 0, /* todo_flags_start */
6053 0, /* todo_flags_finish */
6056 class pass_fre : public gimple_opt_pass
6058 public:
6059 pass_fre (gcc::context *ctxt)
6060 : gimple_opt_pass (pass_data_fre, ctxt)
6063 /* opt_pass methods: */
6064 opt_pass * clone () { return new pass_fre (m_ctxt); }
6065 virtual bool gate (function *) { return flag_tree_fre != 0; }
6066 virtual unsigned int execute (function *);
6068 }; // class pass_fre
6070 unsigned int
6071 pass_fre::execute (function *)
6073 unsigned int todo = 0;
6075 run_scc_vn (VN_WALKREWRITE);
6077 /* Remove all the redundant expressions. */
6078 todo |= vn_eliminate (NULL);
6080 scc_vn_restore_ssa_info ();
6081 free_scc_vn ();
6083 return todo;
6086 } // anon namespace
6088 gimple_opt_pass *
6089 make_pass_fre (gcc::context *ctxt)
6091 return new pass_fre (ctxt);