2018-05-22 Richard Biener <rguenther@suse.de>
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blob884cce12bb36e95e66163ccb86f3b1efe6a1cba9
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2018 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "insn-config.h"
32 #include "memmodel.h"
33 #include "emit-rtl.h"
34 #include "cgraph.h"
35 #include "gimple-pretty-print.h"
36 #include "alias.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "cfganal.h"
40 #include "tree-inline.h"
41 #include "internal-fn.h"
42 #include "gimple-fold.h"
43 #include "tree-eh.h"
44 #include "gimplify.h"
45 #include "flags.h"
46 #include "dojump.h"
47 #include "explow.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "stmt.h"
51 #include "expr.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "dumpfile.h"
55 #include "cfgloop.h"
56 #include "params.h"
57 #include "tree-ssa-propagate.h"
58 #include "tree-cfg.h"
59 #include "domwalk.h"
60 #include "gimple-iterator.h"
61 #include "gimple-match.h"
62 #include "stringpool.h"
63 #include "attribs.h"
64 #include "tree-pass.h"
65 #include "statistics.h"
66 #include "langhooks.h"
67 #include "ipa-utils.h"
68 #include "dbgcnt.h"
69 #include "tree-cfgcleanup.h"
70 #include "tree-ssa-loop.h"
71 #include "tree-scalar-evolution.h"
72 #include "tree-ssa-sccvn.h"
74 /* This algorithm is based on the SCC algorithm presented by Keith
75 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
76 (http://citeseer.ist.psu.edu/41805.html). In
77 straight line code, it is equivalent to a regular hash based value
78 numbering that is performed in reverse postorder.
80 For code with cycles, there are two alternatives, both of which
81 require keeping the hashtables separate from the actual list of
82 value numbers for SSA names.
84 1. Iterate value numbering in an RPO walk of the blocks, removing
85 all the entries from the hashtable after each iteration (but
86 keeping the SSA name->value number mapping between iterations).
87 Iterate until it does not change.
89 2. Perform value numbering as part of an SCC walk on the SSA graph,
90 iterating only the cycles in the SSA graph until they do not change
91 (using a separate, optimistic hashtable for value numbering the SCC
92 operands).
94 The second is not just faster in practice (because most SSA graph
95 cycles do not involve all the variables in the graph), it also has
96 some nice properties.
98 One of these nice properties is that when we pop an SCC off the
99 stack, we are guaranteed to have processed all the operands coming from
100 *outside of that SCC*, so we do not need to do anything special to
101 ensure they have value numbers.
103 Another nice property is that the SCC walk is done as part of a DFS
104 of the SSA graph, which makes it easy to perform combining and
105 simplifying operations at the same time.
107 The code below is deliberately written in a way that makes it easy
108 to separate the SCC walk from the other work it does.
110 In order to propagate constants through the code, we track which
111 expressions contain constants, and use those while folding. In
112 theory, we could also track expressions whose value numbers are
113 replaced, in case we end up folding based on expression
114 identities.
116 In order to value number memory, we assign value numbers to vuses.
117 This enables us to note that, for example, stores to the same
118 address of the same value from the same starting memory states are
119 equivalent.
120 TODO:
122 1. We can iterate only the changing portions of the SCC's, but
123 I have not seen an SCC big enough for this to be a win.
124 2. If you differentiate between phi nodes for loops and phi nodes
125 for if-then-else, you can properly consider phi nodes in different
126 blocks for equivalence.
127 3. We could value number vuses in more cases, particularly, whole
128 structure copies.
132 static tree *last_vuse_ptr;
133 static vn_lookup_kind vn_walk_kind;
134 static vn_lookup_kind default_vn_walk_kind;
135 bitmap const_parms;
137 /* vn_nary_op hashtable helpers. */
139 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
141 typedef vn_nary_op_s *compare_type;
142 static inline hashval_t hash (const vn_nary_op_s *);
143 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
146 /* Return the computed hashcode for nary operation P1. */
148 inline hashval_t
149 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
151 return vno1->hashcode;
154 /* Compare nary operations P1 and P2 and return true if they are
155 equivalent. */
157 inline bool
158 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
160 return vn_nary_op_eq (vno1, vno2);
163 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
164 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
167 /* vn_phi hashtable helpers. */
169 static int
170 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
172 struct vn_phi_hasher : pointer_hash <vn_phi_s>
174 static inline hashval_t hash (const vn_phi_s *);
175 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
176 static inline void remove (vn_phi_s *);
179 /* Return the computed hashcode for phi operation P1. */
181 inline hashval_t
182 vn_phi_hasher::hash (const vn_phi_s *vp1)
184 return vp1->hashcode;
187 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
189 inline bool
190 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
192 return vn_phi_eq (vp1, vp2);
195 /* Free a phi operation structure VP. */
197 inline void
198 vn_phi_hasher::remove (vn_phi_s *phi)
200 phi->phiargs.release ();
203 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
204 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
207 /* Compare two reference operands P1 and P2 for equality. Return true if
208 they are equal, and false otherwise. */
210 static int
211 vn_reference_op_eq (const void *p1, const void *p2)
213 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
214 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
216 return (vro1->opcode == vro2->opcode
217 /* We do not care for differences in type qualification. */
218 && (vro1->type == vro2->type
219 || (vro1->type && vro2->type
220 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
221 TYPE_MAIN_VARIANT (vro2->type))))
222 && expressions_equal_p (vro1->op0, vro2->op0)
223 && expressions_equal_p (vro1->op1, vro2->op1)
224 && expressions_equal_p (vro1->op2, vro2->op2));
227 /* Free a reference operation structure VP. */
229 static inline void
230 free_reference (vn_reference_s *vr)
232 vr->operands.release ();
236 /* vn_reference hashtable helpers. */
238 struct vn_reference_hasher : pointer_hash <vn_reference_s>
240 static inline hashval_t hash (const vn_reference_s *);
241 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
242 static inline void remove (vn_reference_s *);
245 /* Return the hashcode for a given reference operation P1. */
247 inline hashval_t
248 vn_reference_hasher::hash (const vn_reference_s *vr1)
250 return vr1->hashcode;
253 inline bool
254 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
256 return vn_reference_eq (v, c);
259 inline void
260 vn_reference_hasher::remove (vn_reference_s *v)
262 free_reference (v);
265 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
266 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
269 /* The set of hashtables and alloc_pool's for their items. */
271 typedef struct vn_tables_s
273 vn_nary_op_table_type *nary;
274 vn_phi_table_type *phis;
275 vn_reference_table_type *references;
276 struct obstack nary_obstack;
277 object_allocator<vn_phi_s> *phis_pool;
278 object_allocator<vn_reference_s> *references_pool;
279 } *vn_tables_t;
282 /* vn_constant hashtable helpers. */
284 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
286 static inline hashval_t hash (const vn_constant_s *);
287 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
290 /* Hash table hash function for vn_constant_t. */
292 inline hashval_t
293 vn_constant_hasher::hash (const vn_constant_s *vc1)
295 return vc1->hashcode;
298 /* Hash table equality function for vn_constant_t. */
300 inline bool
301 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
303 if (vc1->hashcode != vc2->hashcode)
304 return false;
306 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
309 static hash_table<vn_constant_hasher> *constant_to_value_id;
310 static bitmap constant_value_ids;
313 /* Valid hashtables storing information we have proven to be
314 correct. */
316 static vn_tables_t valid_info;
318 /* Optimistic hashtables storing information we are making assumptions about
319 during iterations. */
321 static vn_tables_t optimistic_info;
323 /* Pointer to the set of hashtables that is currently being used.
324 Should always point to either the optimistic_info, or the
325 valid_info. */
327 static vn_tables_t current_info;
330 /* Reverse post order index for each basic block. */
332 static int *rpo_numbers;
334 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
336 /* Return the SSA value of the VUSE x, supporting released VDEFs
337 during elimination which will value-number the VDEF to the
338 associated VUSE (but not substitute in the whole lattice). */
340 static inline tree
341 vuse_ssa_val (tree x)
343 if (!x)
344 return NULL_TREE;
348 tree tem = SSA_VAL (x);
349 /* stmt walking can walk over a backedge and reach code we didn't
350 value-number yet. */
351 if (tem == VN_TOP)
352 return x;
353 x = tem;
355 while (SSA_NAME_IN_FREE_LIST (x));
357 return x;
360 /* This represents the top of the VN lattice, which is the universal
361 value. */
363 tree VN_TOP;
365 /* Unique counter for our value ids. */
367 static unsigned int next_value_id;
369 /* Next DFS number and the stack for strongly connected component
370 detection. */
372 static unsigned int next_dfs_num;
373 static vec<tree> sccstack;
377 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
378 are allocated on an obstack for locality reasons, and to free them
379 without looping over the vec. */
381 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
382 static struct obstack vn_ssa_aux_obstack;
384 /* Return whether there is value numbering information for a given SSA name. */
386 bool
387 has_VN_INFO (tree name)
389 if (SSA_NAME_VERSION (name) < vn_ssa_aux_table.length ())
390 return vn_ssa_aux_table[SSA_NAME_VERSION (name)] != NULL;
391 return false;
394 /* Return the value numbering information for a given SSA name. */
396 vn_ssa_aux_t
397 VN_INFO (tree name)
399 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
400 gcc_checking_assert (res);
401 return res;
404 /* Set the value numbering info for a given SSA name to a given
405 value. */
407 static inline void
408 VN_INFO_SET (tree name, vn_ssa_aux_t value)
410 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
413 /* Initialize the value numbering info for a given SSA name.
414 This should be called just once for every SSA name. */
416 vn_ssa_aux_t
417 VN_INFO_GET (tree name)
419 vn_ssa_aux_t newinfo;
421 gcc_assert (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ()
422 || vn_ssa_aux_table[SSA_NAME_VERSION (name)] == NULL);
423 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
424 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
425 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
426 vn_ssa_aux_table.safe_grow_cleared (SSA_NAME_VERSION (name) + 1);
427 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
428 return newinfo;
432 /* Return the vn_kind the expression computed by the stmt should be
433 associated with. */
435 enum vn_kind
436 vn_get_stmt_kind (gimple *stmt)
438 switch (gimple_code (stmt))
440 case GIMPLE_CALL:
441 return VN_REFERENCE;
442 case GIMPLE_PHI:
443 return VN_PHI;
444 case GIMPLE_ASSIGN:
446 enum tree_code code = gimple_assign_rhs_code (stmt);
447 tree rhs1 = gimple_assign_rhs1 (stmt);
448 switch (get_gimple_rhs_class (code))
450 case GIMPLE_UNARY_RHS:
451 case GIMPLE_BINARY_RHS:
452 case GIMPLE_TERNARY_RHS:
453 return VN_NARY;
454 case GIMPLE_SINGLE_RHS:
455 switch (TREE_CODE_CLASS (code))
457 case tcc_reference:
458 /* VOP-less references can go through unary case. */
459 if ((code == REALPART_EXPR
460 || code == IMAGPART_EXPR
461 || code == VIEW_CONVERT_EXPR
462 || code == BIT_FIELD_REF)
463 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
464 return VN_NARY;
466 /* Fallthrough. */
467 case tcc_declaration:
468 return VN_REFERENCE;
470 case tcc_constant:
471 return VN_CONSTANT;
473 default:
474 if (code == ADDR_EXPR)
475 return (is_gimple_min_invariant (rhs1)
476 ? VN_CONSTANT : VN_REFERENCE);
477 else if (code == CONSTRUCTOR)
478 return VN_NARY;
479 return VN_NONE;
481 default:
482 return VN_NONE;
485 default:
486 return VN_NONE;
490 /* Lookup a value id for CONSTANT and return it. If it does not
491 exist returns 0. */
493 unsigned int
494 get_constant_value_id (tree constant)
496 vn_constant_s **slot;
497 struct vn_constant_s vc;
499 vc.hashcode = vn_hash_constant_with_type (constant);
500 vc.constant = constant;
501 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
502 if (slot)
503 return (*slot)->value_id;
504 return 0;
507 /* Lookup a value id for CONSTANT, and if it does not exist, create a
508 new one and return it. If it does exist, return it. */
510 unsigned int
511 get_or_alloc_constant_value_id (tree constant)
513 vn_constant_s **slot;
514 struct vn_constant_s vc;
515 vn_constant_t vcp;
517 vc.hashcode = vn_hash_constant_with_type (constant);
518 vc.constant = constant;
519 slot = constant_to_value_id->find_slot (&vc, INSERT);
520 if (*slot)
521 return (*slot)->value_id;
523 vcp = XNEW (struct vn_constant_s);
524 vcp->hashcode = vc.hashcode;
525 vcp->constant = constant;
526 vcp->value_id = get_next_value_id ();
527 *slot = vcp;
528 bitmap_set_bit (constant_value_ids, vcp->value_id);
529 return vcp->value_id;
532 /* Return true if V is a value id for a constant. */
534 bool
535 value_id_constant_p (unsigned int v)
537 return bitmap_bit_p (constant_value_ids, v);
540 /* Compute the hash for a reference operand VRO1. */
542 static void
543 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
545 hstate.add_int (vro1->opcode);
546 if (vro1->op0)
547 inchash::add_expr (vro1->op0, hstate);
548 if (vro1->op1)
549 inchash::add_expr (vro1->op1, hstate);
550 if (vro1->op2)
551 inchash::add_expr (vro1->op2, hstate);
554 /* Compute a hash for the reference operation VR1 and return it. */
556 static hashval_t
557 vn_reference_compute_hash (const vn_reference_t vr1)
559 inchash::hash hstate;
560 hashval_t result;
561 int i;
562 vn_reference_op_t vro;
563 poly_int64 off = -1;
564 bool deref = false;
566 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
568 if (vro->opcode == MEM_REF)
569 deref = true;
570 else if (vro->opcode != ADDR_EXPR)
571 deref = false;
572 if (maybe_ne (vro->off, -1))
574 if (known_eq (off, -1))
575 off = 0;
576 off += vro->off;
578 else
580 if (maybe_ne (off, -1)
581 && maybe_ne (off, 0))
582 hstate.add_poly_int (off);
583 off = -1;
584 if (deref
585 && vro->opcode == ADDR_EXPR)
587 if (vro->op0)
589 tree op = TREE_OPERAND (vro->op0, 0);
590 hstate.add_int (TREE_CODE (op));
591 inchash::add_expr (op, hstate);
594 else
595 vn_reference_op_compute_hash (vro, hstate);
598 result = hstate.end ();
599 /* ??? We would ICE later if we hash instead of adding that in. */
600 if (vr1->vuse)
601 result += SSA_NAME_VERSION (vr1->vuse);
603 return result;
606 /* Return true if reference operations VR1 and VR2 are equivalent. This
607 means they have the same set of operands and vuses. */
609 bool
610 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
612 unsigned i, j;
614 /* Early out if this is not a hash collision. */
615 if (vr1->hashcode != vr2->hashcode)
616 return false;
618 /* The VOP needs to be the same. */
619 if (vr1->vuse != vr2->vuse)
620 return false;
622 /* If the operands are the same we are done. */
623 if (vr1->operands == vr2->operands)
624 return true;
626 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
627 return false;
629 if (INTEGRAL_TYPE_P (vr1->type)
630 && INTEGRAL_TYPE_P (vr2->type))
632 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
633 return false;
635 else if (INTEGRAL_TYPE_P (vr1->type)
636 && (TYPE_PRECISION (vr1->type)
637 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
638 return false;
639 else if (INTEGRAL_TYPE_P (vr2->type)
640 && (TYPE_PRECISION (vr2->type)
641 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
642 return false;
644 i = 0;
645 j = 0;
648 poly_int64 off1 = 0, off2 = 0;
649 vn_reference_op_t vro1, vro2;
650 vn_reference_op_s tem1, tem2;
651 bool deref1 = false, deref2 = false;
652 for (; vr1->operands.iterate (i, &vro1); i++)
654 if (vro1->opcode == MEM_REF)
655 deref1 = true;
656 /* Do not look through a storage order barrier. */
657 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
658 return false;
659 if (known_eq (vro1->off, -1))
660 break;
661 off1 += vro1->off;
663 for (; vr2->operands.iterate (j, &vro2); j++)
665 if (vro2->opcode == MEM_REF)
666 deref2 = true;
667 /* Do not look through a storage order barrier. */
668 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
669 return false;
670 if (known_eq (vro2->off, -1))
671 break;
672 off2 += vro2->off;
674 if (maybe_ne (off1, off2))
675 return false;
676 if (deref1 && vro1->opcode == ADDR_EXPR)
678 memset (&tem1, 0, sizeof (tem1));
679 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
680 tem1.type = TREE_TYPE (tem1.op0);
681 tem1.opcode = TREE_CODE (tem1.op0);
682 vro1 = &tem1;
683 deref1 = false;
685 if (deref2 && vro2->opcode == ADDR_EXPR)
687 memset (&tem2, 0, sizeof (tem2));
688 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
689 tem2.type = TREE_TYPE (tem2.op0);
690 tem2.opcode = TREE_CODE (tem2.op0);
691 vro2 = &tem2;
692 deref2 = false;
694 if (deref1 != deref2)
695 return false;
696 if (!vn_reference_op_eq (vro1, vro2))
697 return false;
698 ++j;
699 ++i;
701 while (vr1->operands.length () != i
702 || vr2->operands.length () != j);
704 return true;
707 /* Copy the operations present in load/store REF into RESULT, a vector of
708 vn_reference_op_s's. */
710 static void
711 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
713 if (TREE_CODE (ref) == TARGET_MEM_REF)
715 vn_reference_op_s temp;
717 result->reserve (3);
719 memset (&temp, 0, sizeof (temp));
720 temp.type = TREE_TYPE (ref);
721 temp.opcode = TREE_CODE (ref);
722 temp.op0 = TMR_INDEX (ref);
723 temp.op1 = TMR_STEP (ref);
724 temp.op2 = TMR_OFFSET (ref);
725 temp.off = -1;
726 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
727 temp.base = MR_DEPENDENCE_BASE (ref);
728 result->quick_push (temp);
730 memset (&temp, 0, sizeof (temp));
731 temp.type = NULL_TREE;
732 temp.opcode = ERROR_MARK;
733 temp.op0 = TMR_INDEX2 (ref);
734 temp.off = -1;
735 result->quick_push (temp);
737 memset (&temp, 0, sizeof (temp));
738 temp.type = NULL_TREE;
739 temp.opcode = TREE_CODE (TMR_BASE (ref));
740 temp.op0 = TMR_BASE (ref);
741 temp.off = -1;
742 result->quick_push (temp);
743 return;
746 /* For non-calls, store the information that makes up the address. */
747 tree orig = ref;
748 while (ref)
750 vn_reference_op_s temp;
752 memset (&temp, 0, sizeof (temp));
753 temp.type = TREE_TYPE (ref);
754 temp.opcode = TREE_CODE (ref);
755 temp.off = -1;
757 switch (temp.opcode)
759 case MODIFY_EXPR:
760 temp.op0 = TREE_OPERAND (ref, 1);
761 break;
762 case WITH_SIZE_EXPR:
763 temp.op0 = TREE_OPERAND (ref, 1);
764 temp.off = 0;
765 break;
766 case MEM_REF:
767 /* The base address gets its own vn_reference_op_s structure. */
768 temp.op0 = TREE_OPERAND (ref, 1);
769 if (!mem_ref_offset (ref).to_shwi (&temp.off))
770 temp.off = -1;
771 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
772 temp.base = MR_DEPENDENCE_BASE (ref);
773 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
774 break;
775 case BIT_FIELD_REF:
776 /* Record bits, position and storage order. */
777 temp.op0 = TREE_OPERAND (ref, 1);
778 temp.op1 = TREE_OPERAND (ref, 2);
779 if (!multiple_p (bit_field_offset (ref), BITS_PER_UNIT, &temp.off))
780 temp.off = -1;
781 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
782 break;
783 case COMPONENT_REF:
784 /* The field decl is enough to unambiguously specify the field,
785 a matching type is not necessary and a mismatching type
786 is always a spurious difference. */
787 temp.type = NULL_TREE;
788 temp.op0 = TREE_OPERAND (ref, 1);
789 temp.op1 = TREE_OPERAND (ref, 2);
791 tree this_offset = component_ref_field_offset (ref);
792 if (this_offset
793 && poly_int_tree_p (this_offset))
795 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
796 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
798 poly_offset_int off
799 = (wi::to_poly_offset (this_offset)
800 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
801 /* Probibit value-numbering zero offset components
802 of addresses the same before the pass folding
803 __builtin_object_size had a chance to run
804 (checking cfun->after_inlining does the
805 trick here). */
806 if (TREE_CODE (orig) != ADDR_EXPR
807 || maybe_ne (off, 0)
808 || cfun->after_inlining)
809 off.to_shwi (&temp.off);
813 break;
814 case ARRAY_RANGE_REF:
815 case ARRAY_REF:
817 tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
818 /* Record index as operand. */
819 temp.op0 = TREE_OPERAND (ref, 1);
820 /* Always record lower bounds and element size. */
821 temp.op1 = array_ref_low_bound (ref);
822 /* But record element size in units of the type alignment. */
823 temp.op2 = TREE_OPERAND (ref, 3);
824 temp.align = eltype->type_common.align;
825 if (! temp.op2)
826 temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
827 size_int (TYPE_ALIGN_UNIT (eltype)));
828 if (poly_int_tree_p (temp.op0)
829 && poly_int_tree_p (temp.op1)
830 && TREE_CODE (temp.op2) == INTEGER_CST)
832 poly_offset_int off = ((wi::to_poly_offset (temp.op0)
833 - wi::to_poly_offset (temp.op1))
834 * wi::to_offset (temp.op2)
835 * vn_ref_op_align_unit (&temp));
836 off.to_shwi (&temp.off);
839 break;
840 case VAR_DECL:
841 if (DECL_HARD_REGISTER (ref))
843 temp.op0 = ref;
844 break;
846 /* Fallthru. */
847 case PARM_DECL:
848 case CONST_DECL:
849 case RESULT_DECL:
850 /* Canonicalize decls to MEM[&decl] which is what we end up with
851 when valueizing MEM[ptr] with ptr = &decl. */
852 temp.opcode = MEM_REF;
853 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
854 temp.off = 0;
855 result->safe_push (temp);
856 temp.opcode = ADDR_EXPR;
857 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
858 temp.type = TREE_TYPE (temp.op0);
859 temp.off = -1;
860 break;
861 case STRING_CST:
862 case INTEGER_CST:
863 case COMPLEX_CST:
864 case VECTOR_CST:
865 case REAL_CST:
866 case FIXED_CST:
867 case CONSTRUCTOR:
868 case SSA_NAME:
869 temp.op0 = ref;
870 break;
871 case ADDR_EXPR:
872 if (is_gimple_min_invariant (ref))
874 temp.op0 = ref;
875 break;
877 break;
878 /* These are only interesting for their operands, their
879 existence, and their type. They will never be the last
880 ref in the chain of references (IE they require an
881 operand), so we don't have to put anything
882 for op* as it will be handled by the iteration */
883 case REALPART_EXPR:
884 temp.off = 0;
885 break;
886 case VIEW_CONVERT_EXPR:
887 temp.off = 0;
888 temp.reverse = storage_order_barrier_p (ref);
889 break;
890 case IMAGPART_EXPR:
891 /* This is only interesting for its constant offset. */
892 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
893 break;
894 default:
895 gcc_unreachable ();
897 result->safe_push (temp);
899 if (REFERENCE_CLASS_P (ref)
900 || TREE_CODE (ref) == MODIFY_EXPR
901 || TREE_CODE (ref) == WITH_SIZE_EXPR
902 || (TREE_CODE (ref) == ADDR_EXPR
903 && !is_gimple_min_invariant (ref)))
904 ref = TREE_OPERAND (ref, 0);
905 else
906 ref = NULL_TREE;
910 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
911 operands in *OPS, the reference alias set SET and the reference type TYPE.
912 Return true if something useful was produced. */
914 bool
915 ao_ref_init_from_vn_reference (ao_ref *ref,
916 alias_set_type set, tree type,
917 vec<vn_reference_op_s> ops)
919 vn_reference_op_t op;
920 unsigned i;
921 tree base = NULL_TREE;
922 tree *op0_p = &base;
923 poly_offset_int offset = 0;
924 poly_offset_int max_size;
925 poly_offset_int size = -1;
926 tree size_tree = NULL_TREE;
927 alias_set_type base_alias_set = -1;
929 /* First get the final access size from just the outermost expression. */
930 op = &ops[0];
931 if (op->opcode == COMPONENT_REF)
932 size_tree = DECL_SIZE (op->op0);
933 else if (op->opcode == BIT_FIELD_REF)
934 size_tree = op->op0;
935 else
937 machine_mode mode = TYPE_MODE (type);
938 if (mode == BLKmode)
939 size_tree = TYPE_SIZE (type);
940 else
941 size = GET_MODE_BITSIZE (mode);
943 if (size_tree != NULL_TREE
944 && poly_int_tree_p (size_tree))
945 size = wi::to_poly_offset (size_tree);
947 /* Initially, maxsize is the same as the accessed element size.
948 In the following it will only grow (or become -1). */
949 max_size = size;
951 /* Compute cumulative bit-offset for nested component-refs and array-refs,
952 and find the ultimate containing object. */
953 FOR_EACH_VEC_ELT (ops, i, op)
955 switch (op->opcode)
957 /* These may be in the reference ops, but we cannot do anything
958 sensible with them here. */
959 case ADDR_EXPR:
960 /* Apart from ADDR_EXPR arguments to MEM_REF. */
961 if (base != NULL_TREE
962 && TREE_CODE (base) == MEM_REF
963 && op->op0
964 && DECL_P (TREE_OPERAND (op->op0, 0)))
966 vn_reference_op_t pop = &ops[i-1];
967 base = TREE_OPERAND (op->op0, 0);
968 if (known_eq (pop->off, -1))
970 max_size = -1;
971 offset = 0;
973 else
974 offset += pop->off * BITS_PER_UNIT;
975 op0_p = NULL;
976 break;
978 /* Fallthru. */
979 case CALL_EXPR:
980 return false;
982 /* Record the base objects. */
983 case MEM_REF:
984 base_alias_set = get_deref_alias_set (op->op0);
985 *op0_p = build2 (MEM_REF, op->type,
986 NULL_TREE, op->op0);
987 MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
988 MR_DEPENDENCE_BASE (*op0_p) = op->base;
989 op0_p = &TREE_OPERAND (*op0_p, 0);
990 break;
992 case VAR_DECL:
993 case PARM_DECL:
994 case RESULT_DECL:
995 case SSA_NAME:
996 *op0_p = op->op0;
997 op0_p = NULL;
998 break;
1000 /* And now the usual component-reference style ops. */
1001 case BIT_FIELD_REF:
1002 offset += wi::to_offset (op->op1);
1003 break;
1005 case COMPONENT_REF:
1007 tree field = op->op0;
1008 /* We do not have a complete COMPONENT_REF tree here so we
1009 cannot use component_ref_field_offset. Do the interesting
1010 parts manually. */
1011 tree this_offset = DECL_FIELD_OFFSET (field);
1013 if (op->op1 || !poly_int_tree_p (this_offset))
1014 max_size = -1;
1015 else
1017 poly_offset_int woffset = (wi::to_poly_offset (this_offset)
1018 << LOG2_BITS_PER_UNIT);
1019 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1020 offset += woffset;
1022 break;
1025 case ARRAY_RANGE_REF:
1026 case ARRAY_REF:
1027 /* We recorded the lower bound and the element size. */
1028 if (!poly_int_tree_p (op->op0)
1029 || !poly_int_tree_p (op->op1)
1030 || TREE_CODE (op->op2) != INTEGER_CST)
1031 max_size = -1;
1032 else
1034 poly_offset_int woffset
1035 = wi::sext (wi::to_poly_offset (op->op0)
1036 - wi::to_poly_offset (op->op1),
1037 TYPE_PRECISION (TREE_TYPE (op->op0)));
1038 woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
1039 woffset <<= LOG2_BITS_PER_UNIT;
1040 offset += woffset;
1042 break;
1044 case REALPART_EXPR:
1045 break;
1047 case IMAGPART_EXPR:
1048 offset += size;
1049 break;
1051 case VIEW_CONVERT_EXPR:
1052 break;
1054 case STRING_CST:
1055 case INTEGER_CST:
1056 case COMPLEX_CST:
1057 case VECTOR_CST:
1058 case REAL_CST:
1059 case CONSTRUCTOR:
1060 case CONST_DECL:
1061 return false;
1063 default:
1064 return false;
1068 if (base == NULL_TREE)
1069 return false;
1071 ref->ref = NULL_TREE;
1072 ref->base = base;
1073 ref->ref_alias_set = set;
1074 if (base_alias_set != -1)
1075 ref->base_alias_set = base_alias_set;
1076 else
1077 ref->base_alias_set = get_alias_set (base);
1078 /* We discount volatiles from value-numbering elsewhere. */
1079 ref->volatile_p = false;
1081 if (!size.to_shwi (&ref->size) || maybe_lt (ref->size, 0))
1083 ref->offset = 0;
1084 ref->size = -1;
1085 ref->max_size = -1;
1086 return true;
1089 if (!offset.to_shwi (&ref->offset))
1091 ref->offset = 0;
1092 ref->max_size = -1;
1093 return true;
1096 if (!max_size.to_shwi (&ref->max_size) || maybe_lt (ref->max_size, 0))
1097 ref->max_size = -1;
1099 return true;
1102 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1103 vn_reference_op_s's. */
1105 static void
1106 copy_reference_ops_from_call (gcall *call,
1107 vec<vn_reference_op_s> *result)
1109 vn_reference_op_s temp;
1110 unsigned i;
1111 tree lhs = gimple_call_lhs (call);
1112 int lr;
1114 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1115 different. By adding the lhs here in the vector, we ensure that the
1116 hashcode is different, guaranteeing a different value number. */
1117 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1119 memset (&temp, 0, sizeof (temp));
1120 temp.opcode = MODIFY_EXPR;
1121 temp.type = TREE_TYPE (lhs);
1122 temp.op0 = lhs;
1123 temp.off = -1;
1124 result->safe_push (temp);
1127 /* Copy the type, opcode, function, static chain and EH region, if any. */
1128 memset (&temp, 0, sizeof (temp));
1129 temp.type = gimple_call_return_type (call);
1130 temp.opcode = CALL_EXPR;
1131 temp.op0 = gimple_call_fn (call);
1132 temp.op1 = gimple_call_chain (call);
1133 if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1134 temp.op2 = size_int (lr);
1135 temp.off = -1;
1136 if (gimple_call_with_bounds_p (call))
1137 temp.with_bounds = 1;
1138 result->safe_push (temp);
1140 /* Copy the call arguments. As they can be references as well,
1141 just chain them together. */
1142 for (i = 0; i < gimple_call_num_args (call); ++i)
1144 tree callarg = gimple_call_arg (call, i);
1145 copy_reference_ops_from_ref (callarg, result);
1149 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1150 *I_P to point to the last element of the replacement. */
1151 static bool
1152 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1153 unsigned int *i_p)
1155 unsigned int i = *i_p;
1156 vn_reference_op_t op = &(*ops)[i];
1157 vn_reference_op_t mem_op = &(*ops)[i - 1];
1158 tree addr_base;
1159 poly_int64 addr_offset = 0;
1161 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1162 from .foo.bar to the preceding MEM_REF offset and replace the
1163 address with &OBJ. */
1164 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1165 &addr_offset);
1166 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1167 if (addr_base != TREE_OPERAND (op->op0, 0))
1169 poly_offset_int off
1170 = (poly_offset_int::from (wi::to_poly_wide (mem_op->op0),
1171 SIGNED)
1172 + addr_offset);
1173 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1174 op->op0 = build_fold_addr_expr (addr_base);
1175 if (tree_fits_shwi_p (mem_op->op0))
1176 mem_op->off = tree_to_shwi (mem_op->op0);
1177 else
1178 mem_op->off = -1;
1179 return true;
1181 return false;
1184 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1185 *I_P to point to the last element of the replacement. */
1186 static bool
1187 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1188 unsigned int *i_p)
1190 unsigned int i = *i_p;
1191 vn_reference_op_t op = &(*ops)[i];
1192 vn_reference_op_t mem_op = &(*ops)[i - 1];
1193 gimple *def_stmt;
1194 enum tree_code code;
1195 poly_offset_int off;
1197 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1198 if (!is_gimple_assign (def_stmt))
1199 return false;
1201 code = gimple_assign_rhs_code (def_stmt);
1202 if (code != ADDR_EXPR
1203 && code != POINTER_PLUS_EXPR)
1204 return false;
1206 off = poly_offset_int::from (wi::to_poly_wide (mem_op->op0), SIGNED);
1208 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1209 from .foo.bar to the preceding MEM_REF offset and replace the
1210 address with &OBJ. */
1211 if (code == ADDR_EXPR)
1213 tree addr, addr_base;
1214 poly_int64 addr_offset;
1216 addr = gimple_assign_rhs1 (def_stmt);
1217 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1218 &addr_offset);
1219 /* If that didn't work because the address isn't invariant propagate
1220 the reference tree from the address operation in case the current
1221 dereference isn't offsetted. */
1222 if (!addr_base
1223 && *i_p == ops->length () - 1
1224 && known_eq (off, 0)
1225 /* This makes us disable this transform for PRE where the
1226 reference ops might be also used for code insertion which
1227 is invalid. */
1228 && default_vn_walk_kind == VN_WALKREWRITE)
1230 auto_vec<vn_reference_op_s, 32> tem;
1231 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1232 /* Make sure to preserve TBAA info. The only objects not
1233 wrapped in MEM_REFs that can have their address taken are
1234 STRING_CSTs. */
1235 if (tem.length () >= 2
1236 && tem[tem.length () - 2].opcode == MEM_REF)
1238 vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1239 new_mem_op->op0
1240 = wide_int_to_tree (TREE_TYPE (mem_op->op0),
1241 wi::to_poly_wide (new_mem_op->op0));
1243 else
1244 gcc_assert (tem.last ().opcode == STRING_CST);
1245 ops->pop ();
1246 ops->pop ();
1247 ops->safe_splice (tem);
1248 --*i_p;
1249 return true;
1251 if (!addr_base
1252 || TREE_CODE (addr_base) != MEM_REF
1253 || (TREE_CODE (TREE_OPERAND (addr_base, 0)) == SSA_NAME
1254 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (addr_base, 0))))
1255 return false;
1257 off += addr_offset;
1258 off += mem_ref_offset (addr_base);
1259 op->op0 = TREE_OPERAND (addr_base, 0);
1261 else
1263 tree ptr, ptroff;
1264 ptr = gimple_assign_rhs1 (def_stmt);
1265 ptroff = gimple_assign_rhs2 (def_stmt);
1266 if (TREE_CODE (ptr) != SSA_NAME
1267 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1268 || TREE_CODE (ptroff) != INTEGER_CST)
1269 return false;
1271 off += wi::to_offset (ptroff);
1272 op->op0 = ptr;
1275 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1276 if (tree_fits_shwi_p (mem_op->op0))
1277 mem_op->off = tree_to_shwi (mem_op->op0);
1278 else
1279 mem_op->off = -1;
1280 if (TREE_CODE (op->op0) == SSA_NAME)
1281 op->op0 = SSA_VAL (op->op0);
1282 if (TREE_CODE (op->op0) != SSA_NAME)
1283 op->opcode = TREE_CODE (op->op0);
1285 /* And recurse. */
1286 if (TREE_CODE (op->op0) == SSA_NAME)
1287 vn_reference_maybe_forwprop_address (ops, i_p);
1288 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1289 vn_reference_fold_indirect (ops, i_p);
1290 return true;
1293 /* Optimize the reference REF to a constant if possible or return
1294 NULL_TREE if not. */
1296 tree
1297 fully_constant_vn_reference_p (vn_reference_t ref)
1299 vec<vn_reference_op_s> operands = ref->operands;
1300 vn_reference_op_t op;
1302 /* Try to simplify the translated expression if it is
1303 a call to a builtin function with at most two arguments. */
1304 op = &operands[0];
1305 if (op->opcode == CALL_EXPR
1306 && TREE_CODE (op->op0) == ADDR_EXPR
1307 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1308 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1309 && operands.length () >= 2
1310 && operands.length () <= 3)
1312 vn_reference_op_t arg0, arg1 = NULL;
1313 bool anyconst = false;
1314 arg0 = &operands[1];
1315 if (operands.length () > 2)
1316 arg1 = &operands[2];
1317 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1318 || (arg0->opcode == ADDR_EXPR
1319 && is_gimple_min_invariant (arg0->op0)))
1320 anyconst = true;
1321 if (arg1
1322 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1323 || (arg1->opcode == ADDR_EXPR
1324 && is_gimple_min_invariant (arg1->op0))))
1325 anyconst = true;
1326 if (anyconst)
1328 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1329 arg1 ? 2 : 1,
1330 arg0->op0,
1331 arg1 ? arg1->op0 : NULL);
1332 if (folded
1333 && TREE_CODE (folded) == NOP_EXPR)
1334 folded = TREE_OPERAND (folded, 0);
1335 if (folded
1336 && is_gimple_min_invariant (folded))
1337 return folded;
1341 /* Simplify reads from constants or constant initializers. */
1342 else if (BITS_PER_UNIT == 8
1343 && is_gimple_reg_type (ref->type)
1344 && (!INTEGRAL_TYPE_P (ref->type)
1345 || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
1347 poly_int64 off = 0;
1348 HOST_WIDE_INT size;
1349 if (INTEGRAL_TYPE_P (ref->type))
1350 size = TYPE_PRECISION (ref->type);
1351 else
1352 size = tree_to_shwi (TYPE_SIZE (ref->type));
1353 if (size % BITS_PER_UNIT != 0
1354 || size > MAX_BITSIZE_MODE_ANY_MODE)
1355 return NULL_TREE;
1356 size /= BITS_PER_UNIT;
1357 unsigned i;
1358 for (i = 0; i < operands.length (); ++i)
1360 if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1362 ++i;
1363 break;
1365 if (known_eq (operands[i].off, -1))
1366 return NULL_TREE;
1367 off += operands[i].off;
1368 if (operands[i].opcode == MEM_REF)
1370 ++i;
1371 break;
1374 vn_reference_op_t base = &operands[--i];
1375 tree ctor = error_mark_node;
1376 tree decl = NULL_TREE;
1377 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1378 ctor = base->op0;
1379 else if (base->opcode == MEM_REF
1380 && base[1].opcode == ADDR_EXPR
1381 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1382 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL
1383 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == STRING_CST))
1385 decl = TREE_OPERAND (base[1].op0, 0);
1386 if (TREE_CODE (decl) == STRING_CST)
1387 ctor = decl;
1388 else
1389 ctor = ctor_for_folding (decl);
1391 if (ctor == NULL_TREE)
1392 return build_zero_cst (ref->type);
1393 else if (ctor != error_mark_node)
1395 HOST_WIDE_INT const_off;
1396 if (decl)
1398 tree res = fold_ctor_reference (ref->type, ctor,
1399 off * BITS_PER_UNIT,
1400 size * BITS_PER_UNIT, decl);
1401 if (res)
1403 STRIP_USELESS_TYPE_CONVERSION (res);
1404 if (is_gimple_min_invariant (res))
1405 return res;
1408 else if (off.is_constant (&const_off))
1410 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1411 int len = native_encode_expr (ctor, buf, size, const_off);
1412 if (len > 0)
1413 return native_interpret_expr (ref->type, buf, len);
1418 return NULL_TREE;
1421 /* Return true if OPS contain a storage order barrier. */
1423 static bool
1424 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1426 vn_reference_op_t op;
1427 unsigned i;
1429 FOR_EACH_VEC_ELT (ops, i, op)
1430 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1431 return true;
1433 return false;
1436 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1437 structures into their value numbers. This is done in-place, and
1438 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1439 whether any operands were valueized. */
1441 static vec<vn_reference_op_s>
1442 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1444 vn_reference_op_t vro;
1445 unsigned int i;
1447 *valueized_anything = false;
1449 FOR_EACH_VEC_ELT (orig, i, vro)
1451 if (vro->opcode == SSA_NAME
1452 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1454 tree tem = SSA_VAL (vro->op0);
1455 if (tem != vro->op0)
1457 *valueized_anything = true;
1458 vro->op0 = tem;
1460 /* If it transforms from an SSA_NAME to a constant, update
1461 the opcode. */
1462 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1463 vro->opcode = TREE_CODE (vro->op0);
1465 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1467 tree tem = SSA_VAL (vro->op1);
1468 if (tem != vro->op1)
1470 *valueized_anything = true;
1471 vro->op1 = tem;
1474 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1476 tree tem = SSA_VAL (vro->op2);
1477 if (tem != vro->op2)
1479 *valueized_anything = true;
1480 vro->op2 = tem;
1483 /* If it transforms from an SSA_NAME to an address, fold with
1484 a preceding indirect reference. */
1485 if (i > 0
1486 && vro->op0
1487 && TREE_CODE (vro->op0) == ADDR_EXPR
1488 && orig[i - 1].opcode == MEM_REF)
1490 if (vn_reference_fold_indirect (&orig, &i))
1491 *valueized_anything = true;
1493 else if (i > 0
1494 && vro->opcode == SSA_NAME
1495 && orig[i - 1].opcode == MEM_REF)
1497 if (vn_reference_maybe_forwprop_address (&orig, &i))
1498 *valueized_anything = true;
1500 /* If it transforms a non-constant ARRAY_REF into a constant
1501 one, adjust the constant offset. */
1502 else if (vro->opcode == ARRAY_REF
1503 && known_eq (vro->off, -1)
1504 && poly_int_tree_p (vro->op0)
1505 && poly_int_tree_p (vro->op1)
1506 && TREE_CODE (vro->op2) == INTEGER_CST)
1508 poly_offset_int off = ((wi::to_poly_offset (vro->op0)
1509 - wi::to_poly_offset (vro->op1))
1510 * wi::to_offset (vro->op2)
1511 * vn_ref_op_align_unit (vro));
1512 off.to_shwi (&vro->off);
1516 return orig;
1519 static vec<vn_reference_op_s>
1520 valueize_refs (vec<vn_reference_op_s> orig)
1522 bool tem;
1523 return valueize_refs_1 (orig, &tem);
1526 static vec<vn_reference_op_s> shared_lookup_references;
1528 /* Create a vector of vn_reference_op_s structures from REF, a
1529 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1530 this function. *VALUEIZED_ANYTHING will specify whether any
1531 operands were valueized. */
1533 static vec<vn_reference_op_s>
1534 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1536 if (!ref)
1537 return vNULL;
1538 shared_lookup_references.truncate (0);
1539 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1540 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1541 valueized_anything);
1542 return shared_lookup_references;
1545 /* Create a vector of vn_reference_op_s structures from CALL, a
1546 call statement. The vector is shared among all callers of
1547 this function. */
1549 static vec<vn_reference_op_s>
1550 valueize_shared_reference_ops_from_call (gcall *call)
1552 if (!call)
1553 return vNULL;
1554 shared_lookup_references.truncate (0);
1555 copy_reference_ops_from_call (call, &shared_lookup_references);
1556 shared_lookup_references = valueize_refs (shared_lookup_references);
1557 return shared_lookup_references;
1560 /* Lookup a SCCVN reference operation VR in the current hash table.
1561 Returns the resulting value number if it exists in the hash table,
1562 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1563 vn_reference_t stored in the hashtable if something is found. */
1565 static tree
1566 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1568 vn_reference_s **slot;
1569 hashval_t hash;
1571 hash = vr->hashcode;
1572 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1573 if (!slot && current_info == optimistic_info)
1574 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1575 if (slot)
1577 if (vnresult)
1578 *vnresult = (vn_reference_t)*slot;
1579 return ((vn_reference_t)*slot)->result;
1582 return NULL_TREE;
1585 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1586 with the current VUSE and performs the expression lookup. */
1588 static void *
1589 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1590 unsigned int cnt, void *vr_)
1592 vn_reference_t vr = (vn_reference_t)vr_;
1593 vn_reference_s **slot;
1594 hashval_t hash;
1596 /* This bounds the stmt walks we perform on reference lookups
1597 to O(1) instead of O(N) where N is the number of dominating
1598 stores. */
1599 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1600 return (void *)-1;
1602 if (last_vuse_ptr)
1603 *last_vuse_ptr = vuse;
1605 /* Fixup vuse and hash. */
1606 if (vr->vuse)
1607 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1608 vr->vuse = vuse_ssa_val (vuse);
1609 if (vr->vuse)
1610 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1612 hash = vr->hashcode;
1613 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1614 if (!slot && current_info == optimistic_info)
1615 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1616 if (slot)
1617 return *slot;
1619 return NULL;
1622 /* Lookup an existing or insert a new vn_reference entry into the
1623 value table for the VUSE, SET, TYPE, OPERANDS reference which
1624 has the value VALUE which is either a constant or an SSA name. */
1626 static vn_reference_t
1627 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1628 alias_set_type set,
1629 tree type,
1630 vec<vn_reference_op_s,
1631 va_heap> operands,
1632 tree value)
1634 vn_reference_s vr1;
1635 vn_reference_t result;
1636 unsigned value_id;
1637 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1638 vr1.operands = operands;
1639 vr1.type = type;
1640 vr1.set = set;
1641 vr1.hashcode = vn_reference_compute_hash (&vr1);
1642 if (vn_reference_lookup_1 (&vr1, &result))
1643 return result;
1644 if (TREE_CODE (value) == SSA_NAME)
1645 value_id = VN_INFO (value)->value_id;
1646 else
1647 value_id = get_or_alloc_constant_value_id (value);
1648 return vn_reference_insert_pieces (vuse, set, type,
1649 operands.copy (), value, value_id);
1652 static vn_nary_op_t vn_nary_op_insert_stmt (gimple *stmt, tree result);
1653 static unsigned mprts_hook_cnt;
1655 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
1657 static tree
1658 vn_lookup_simplify_result (code_helper rcode, tree type, tree *ops_)
1660 if (!rcode.is_tree_code ())
1661 return NULL_TREE;
1662 tree *ops = ops_;
1663 unsigned int length = TREE_CODE_LENGTH ((tree_code) rcode);
1664 if (rcode == CONSTRUCTOR
1665 /* ??? We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
1666 and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree. */
1667 && TREE_CODE (ops_[0]) == CONSTRUCTOR)
1669 length = CONSTRUCTOR_NELTS (ops_[0]);
1670 ops = XALLOCAVEC (tree, length);
1671 for (unsigned i = 0; i < length; ++i)
1672 ops[i] = CONSTRUCTOR_ELT (ops_[0], i)->value;
1674 vn_nary_op_t vnresult = NULL;
1675 tree res = vn_nary_op_lookup_pieces (length, (tree_code) rcode,
1676 type, ops, &vnresult);
1677 /* We can end up endlessly recursing simplifications if the lookup above
1678 presents us with a def-use chain that mirrors the original simplification.
1679 See PR80887 for an example. Limit successful lookup artificially
1680 to 10 times if we are called as mprts_hook. */
1681 if (res
1682 && mprts_hook
1683 && --mprts_hook_cnt == 0)
1685 if (dump_file && (dump_flags & TDF_DETAILS))
1686 fprintf (dump_file, "Resetting mprts_hook after too many "
1687 "invocations.\n");
1688 mprts_hook = NULL;
1690 return res;
1693 /* Return a value-number for RCODE OPS... either by looking up an existing
1694 value-number for the simplified result or by inserting the operation if
1695 INSERT is true. */
1697 static tree
1698 vn_nary_build_or_lookup_1 (code_helper rcode, tree type, tree *ops,
1699 bool insert)
1701 tree result = NULL_TREE;
1702 /* We will be creating a value number for
1703 RCODE (OPS...).
1704 So first simplify and lookup this expression to see if it
1705 is already available. */
1706 mprts_hook = vn_lookup_simplify_result;
1707 mprts_hook_cnt = 9;
1708 bool res = false;
1709 switch (TREE_CODE_LENGTH ((tree_code) rcode))
1711 case 1:
1712 res = gimple_resimplify1 (NULL, &rcode, type, ops, vn_valueize);
1713 break;
1714 case 2:
1715 res = gimple_resimplify2 (NULL, &rcode, type, ops, vn_valueize);
1716 break;
1717 case 3:
1718 res = gimple_resimplify3 (NULL, &rcode, type, ops, vn_valueize);
1719 break;
1721 mprts_hook = NULL;
1722 gimple *new_stmt = NULL;
1723 if (res
1724 && gimple_simplified_result_is_gimple_val (rcode, ops))
1725 /* The expression is already available. */
1726 result = ops[0];
1727 else
1729 tree val = vn_lookup_simplify_result (rcode, type, ops);
1730 if (!val && insert)
1732 gimple_seq stmts = NULL;
1733 result = maybe_push_res_to_seq (rcode, type, ops, &stmts);
1734 if (result)
1736 gcc_assert (gimple_seq_singleton_p (stmts));
1737 new_stmt = gimple_seq_first_stmt (stmts);
1740 else
1741 /* The expression is already available. */
1742 result = val;
1744 if (new_stmt)
1746 /* The expression is not yet available, value-number lhs to
1747 the new SSA_NAME we created. */
1748 /* Initialize value-number information properly. */
1749 VN_INFO_GET (result)->valnum = result;
1750 VN_INFO (result)->value_id = get_next_value_id ();
1751 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
1752 new_stmt);
1753 VN_INFO (result)->needs_insertion = true;
1754 /* ??? PRE phi-translation inserts NARYs without corresponding
1755 SSA name result. Re-use those but set their result according
1756 to the stmt we just built. */
1757 vn_nary_op_t nary = NULL;
1758 vn_nary_op_lookup_stmt (new_stmt, &nary);
1759 if (nary)
1761 gcc_assert (nary->result == NULL_TREE);
1762 nary->result = gimple_assign_lhs (new_stmt);
1764 /* As all "inserted" statements are singleton SCCs, insert
1765 to the valid table. This is strictly needed to
1766 avoid re-generating new value SSA_NAMEs for the same
1767 expression during SCC iteration over and over (the
1768 optimistic table gets cleared after each iteration).
1769 We do not need to insert into the optimistic table, as
1770 lookups there will fall back to the valid table. */
1771 else if (current_info == optimistic_info)
1773 current_info = valid_info;
1774 vn_nary_op_insert_stmt (new_stmt, result);
1775 current_info = optimistic_info;
1777 else
1778 vn_nary_op_insert_stmt (new_stmt, result);
1779 if (dump_file && (dump_flags & TDF_DETAILS))
1781 fprintf (dump_file, "Inserting name ");
1782 print_generic_expr (dump_file, result);
1783 fprintf (dump_file, " for expression ");
1784 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
1785 fprintf (dump_file, "\n");
1788 return result;
1791 /* Return a value-number for RCODE OPS... either by looking up an existing
1792 value-number for the simplified result or by inserting the operation. */
1794 static tree
1795 vn_nary_build_or_lookup (code_helper rcode, tree type, tree *ops)
1797 return vn_nary_build_or_lookup_1 (rcode, type, ops, true);
1800 /* Try to simplify the expression RCODE OPS... of type TYPE and return
1801 its value if present. */
1803 tree
1804 vn_nary_simplify (vn_nary_op_t nary)
1806 if (nary->length > 3)
1807 return NULL_TREE;
1808 tree ops[3];
1809 memcpy (ops, nary->op, sizeof (tree) * nary->length);
1810 return vn_nary_build_or_lookup_1 (nary->opcode, nary->type, ops, false);
1814 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1815 from the statement defining VUSE and if not successful tries to
1816 translate *REFP and VR_ through an aggregate copy at the definition
1817 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
1818 of *REF and *VR. If only disambiguation was performed then
1819 *DISAMBIGUATE_ONLY is set to true. */
1821 static void *
1822 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1823 bool *disambiguate_only)
1825 vn_reference_t vr = (vn_reference_t)vr_;
1826 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
1827 tree base = ao_ref_base (ref);
1828 HOST_WIDE_INT offseti, maxsizei;
1829 static vec<vn_reference_op_s> lhs_ops;
1830 ao_ref lhs_ref;
1831 bool lhs_ref_ok = false;
1832 poly_int64 copy_size;
1834 /* If the reference is based on a parameter that was determined as
1835 pointing to readonly memory it doesn't change. */
1836 if (TREE_CODE (base) == MEM_REF
1837 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
1838 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
1839 && bitmap_bit_p (const_parms,
1840 SSA_NAME_VERSION (TREE_OPERAND (base, 0))))
1842 *disambiguate_only = true;
1843 return NULL;
1846 /* First try to disambiguate after value-replacing in the definitions LHS. */
1847 if (is_gimple_assign (def_stmt))
1849 tree lhs = gimple_assign_lhs (def_stmt);
1850 bool valueized_anything = false;
1851 /* Avoid re-allocation overhead. */
1852 lhs_ops.truncate (0);
1853 copy_reference_ops_from_ref (lhs, &lhs_ops);
1854 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1855 if (valueized_anything)
1857 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1858 get_alias_set (lhs),
1859 TREE_TYPE (lhs), lhs_ops);
1860 if (lhs_ref_ok
1861 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1863 *disambiguate_only = true;
1864 return NULL;
1867 else
1869 ao_ref_init (&lhs_ref, lhs);
1870 lhs_ref_ok = true;
1873 /* If we reach a clobbering statement try to skip it and see if
1874 we find a VN result with exactly the same value as the
1875 possible clobber. In this case we can ignore the clobber
1876 and return the found value.
1877 Note that we don't need to worry about partial overlapping
1878 accesses as we then can use TBAA to disambiguate against the
1879 clobbering statement when looking up a load (thus the
1880 VN_WALKREWRITE guard). */
1881 if (vn_walk_kind == VN_WALKREWRITE
1882 && is_gimple_reg_type (TREE_TYPE (lhs))
1883 && types_compatible_p (TREE_TYPE (lhs), vr->type))
1885 tree *saved_last_vuse_ptr = last_vuse_ptr;
1886 /* Do not update last_vuse_ptr in vn_reference_lookup_2. */
1887 last_vuse_ptr = NULL;
1888 tree saved_vuse = vr->vuse;
1889 hashval_t saved_hashcode = vr->hashcode;
1890 void *res = vn_reference_lookup_2 (ref,
1891 gimple_vuse (def_stmt), 0, vr);
1892 /* Need to restore vr->vuse and vr->hashcode. */
1893 vr->vuse = saved_vuse;
1894 vr->hashcode = saved_hashcode;
1895 last_vuse_ptr = saved_last_vuse_ptr;
1896 if (res && res != (void *)-1)
1898 vn_reference_t vnresult = (vn_reference_t) res;
1899 if (vnresult->result
1900 && operand_equal_p (vnresult->result,
1901 gimple_assign_rhs1 (def_stmt), 0))
1902 return res;
1906 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1907 && gimple_call_num_args (def_stmt) <= 4)
1909 /* For builtin calls valueize its arguments and call the
1910 alias oracle again. Valueization may improve points-to
1911 info of pointers and constify size and position arguments.
1912 Originally this was motivated by PR61034 which has
1913 conditional calls to free falsely clobbering ref because
1914 of imprecise points-to info of the argument. */
1915 tree oldargs[4];
1916 bool valueized_anything = false;
1917 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1919 oldargs[i] = gimple_call_arg (def_stmt, i);
1920 tree val = vn_valueize (oldargs[i]);
1921 if (val != oldargs[i])
1923 gimple_call_set_arg (def_stmt, i, val);
1924 valueized_anything = true;
1927 if (valueized_anything)
1929 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1930 ref);
1931 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1932 gimple_call_set_arg (def_stmt, i, oldargs[i]);
1933 if (!res)
1935 *disambiguate_only = true;
1936 return NULL;
1941 if (*disambiguate_only)
1942 return (void *)-1;
1944 /* If we cannot constrain the size of the reference we cannot
1945 test if anything kills it. */
1946 if (!ref->max_size_known_p ())
1947 return (void *)-1;
1949 poly_int64 offset = ref->offset;
1950 poly_int64 maxsize = ref->max_size;
1952 /* We can't deduce anything useful from clobbers. */
1953 if (gimple_clobber_p (def_stmt))
1954 return (void *)-1;
1956 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1957 from that definition.
1958 1) Memset. */
1959 if (is_gimple_reg_type (vr->type)
1960 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1961 && (integer_zerop (gimple_call_arg (def_stmt, 1))
1962 || (INTEGRAL_TYPE_P (vr->type)
1963 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1964 && known_eq (ref->size, 8)
1965 && known_eq (ref->size, maxsize)
1966 && offset.is_constant (&offseti)
1967 && offseti % BITS_PER_UNIT == 0))
1968 && poly_int_tree_p (gimple_call_arg (def_stmt, 2))
1969 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
1970 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME))
1972 tree base2;
1973 poly_int64 offset2, size2, maxsize2;
1974 bool reverse;
1975 tree ref2 = gimple_call_arg (def_stmt, 0);
1976 if (TREE_CODE (ref2) == SSA_NAME)
1978 ref2 = SSA_VAL (ref2);
1979 if (TREE_CODE (ref2) == SSA_NAME
1980 && (TREE_CODE (base) != MEM_REF
1981 || TREE_OPERAND (base, 0) != ref2))
1983 gimple *def_stmt = SSA_NAME_DEF_STMT (ref2);
1984 if (gimple_assign_single_p (def_stmt)
1985 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
1986 ref2 = gimple_assign_rhs1 (def_stmt);
1989 if (TREE_CODE (ref2) == ADDR_EXPR)
1991 ref2 = TREE_OPERAND (ref2, 0);
1992 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
1993 &reverse);
1994 if (!known_size_p (maxsize2)
1995 || !operand_equal_p (base, base2, OEP_ADDRESS_OF))
1996 return (void *)-1;
1998 else if (TREE_CODE (ref2) == SSA_NAME)
2000 poly_int64 soff;
2001 if (TREE_CODE (base) != MEM_REF
2002 || !(mem_ref_offset (base) << LOG2_BITS_PER_UNIT).to_shwi (&soff))
2003 return (void *)-1;
2004 offset += soff;
2005 offset2 = 0;
2006 if (TREE_OPERAND (base, 0) != ref2)
2008 gimple *def = SSA_NAME_DEF_STMT (ref2);
2009 if (is_gimple_assign (def)
2010 && gimple_assign_rhs_code (def) == POINTER_PLUS_EXPR
2011 && gimple_assign_rhs1 (def) == TREE_OPERAND (base, 0)
2012 && poly_int_tree_p (gimple_assign_rhs2 (def))
2013 && (wi::to_poly_offset (gimple_assign_rhs2 (def))
2014 << LOG2_BITS_PER_UNIT).to_shwi (&offset2))
2016 ref2 = gimple_assign_rhs1 (def);
2017 if (TREE_CODE (ref2) == SSA_NAME)
2018 ref2 = SSA_VAL (ref2);
2020 else
2021 return (void *)-1;
2024 else
2025 return (void *)-1;
2026 tree len = gimple_call_arg (def_stmt, 2);
2027 if (known_subrange_p (offset, maxsize, offset2,
2028 wi::to_poly_offset (len) << LOG2_BITS_PER_UNIT))
2030 tree val;
2031 if (integer_zerop (gimple_call_arg (def_stmt, 1)))
2032 val = build_zero_cst (vr->type);
2033 else
2035 code_helper rcode = NOP_EXPR;
2036 tree ops[3] = {};
2037 ops[0] = gimple_call_arg (def_stmt, 1);
2038 val = vn_nary_build_or_lookup (rcode, vr->type, ops);
2039 if (!val
2040 || (TREE_CODE (val) == SSA_NAME
2041 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2042 return (void *)-1;
2044 return vn_reference_lookup_or_insert_for_pieces
2045 (vuse, vr->set, vr->type, vr->operands, val);
2049 /* 2) Assignment from an empty CONSTRUCTOR. */
2050 else if (is_gimple_reg_type (vr->type)
2051 && gimple_assign_single_p (def_stmt)
2052 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
2053 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
2055 tree base2;
2056 poly_int64 offset2, size2, maxsize2;
2057 bool reverse;
2058 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2059 &offset2, &size2, &maxsize2, &reverse);
2060 if (known_size_p (maxsize2)
2061 && operand_equal_p (base, base2, 0)
2062 && known_subrange_p (offset, maxsize, offset2, size2))
2064 tree val = build_zero_cst (vr->type);
2065 return vn_reference_lookup_or_insert_for_pieces
2066 (vuse, vr->set, vr->type, vr->operands, val);
2070 /* 3) Assignment from a constant. We can use folds native encode/interpret
2071 routines to extract the assigned bits. */
2072 else if (known_eq (ref->size, maxsize)
2073 && is_gimple_reg_type (vr->type)
2074 && !contains_storage_order_barrier_p (vr->operands)
2075 && gimple_assign_single_p (def_stmt)
2076 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
2077 /* native_encode and native_decode operate on arrays of bytes
2078 and so fundamentally need a compile-time size and offset. */
2079 && maxsize.is_constant (&maxsizei)
2080 && maxsizei % BITS_PER_UNIT == 0
2081 && offset.is_constant (&offseti)
2082 && offseti % BITS_PER_UNIT == 0
2083 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
2084 || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
2085 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
2087 tree base2;
2088 HOST_WIDE_INT offset2, size2;
2089 bool reverse;
2090 base2 = get_ref_base_and_extent_hwi (gimple_assign_lhs (def_stmt),
2091 &offset2, &size2, &reverse);
2092 if (base2
2093 && !reverse
2094 && size2 % BITS_PER_UNIT == 0
2095 && offset2 % BITS_PER_UNIT == 0
2096 && operand_equal_p (base, base2, 0)
2097 && known_subrange_p (offseti, maxsizei, offset2, size2))
2099 /* We support up to 512-bit values (for V8DFmode). */
2100 unsigned char buffer[64];
2101 int len;
2103 tree rhs = gimple_assign_rhs1 (def_stmt);
2104 if (TREE_CODE (rhs) == SSA_NAME)
2105 rhs = SSA_VAL (rhs);
2106 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
2107 buffer, sizeof (buffer),
2108 (offseti - offset2) / BITS_PER_UNIT);
2109 if (len > 0 && len * BITS_PER_UNIT >= maxsizei)
2111 tree type = vr->type;
2112 /* Make sure to interpret in a type that has a range
2113 covering the whole access size. */
2114 if (INTEGRAL_TYPE_P (vr->type)
2115 && maxsizei != TYPE_PRECISION (vr->type))
2116 type = build_nonstandard_integer_type (maxsizei,
2117 TYPE_UNSIGNED (type));
2118 tree val = native_interpret_expr (type, buffer,
2119 maxsizei / BITS_PER_UNIT);
2120 /* If we chop off bits because the types precision doesn't
2121 match the memory access size this is ok when optimizing
2122 reads but not when called from the DSE code during
2123 elimination. */
2124 if (val
2125 && type != vr->type)
2127 if (! int_fits_type_p (val, vr->type))
2128 val = NULL_TREE;
2129 else
2130 val = fold_convert (vr->type, val);
2133 if (val)
2134 return vn_reference_lookup_or_insert_for_pieces
2135 (vuse, vr->set, vr->type, vr->operands, val);
2140 /* 4) Assignment from an SSA name which definition we may be able
2141 to access pieces from. */
2142 else if (known_eq (ref->size, maxsize)
2143 && is_gimple_reg_type (vr->type)
2144 && !contains_storage_order_barrier_p (vr->operands)
2145 && gimple_assign_single_p (def_stmt)
2146 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
2148 tree base2;
2149 poly_int64 offset2, size2, maxsize2;
2150 bool reverse;
2151 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2152 &offset2, &size2, &maxsize2,
2153 &reverse);
2154 if (!reverse
2155 && known_size_p (maxsize2)
2156 && known_eq (maxsize2, size2)
2157 && operand_equal_p (base, base2, 0)
2158 && known_subrange_p (offset, maxsize, offset2, size2)
2159 /* ??? We can't handle bitfield precision extracts without
2160 either using an alternate type for the BIT_FIELD_REF and
2161 then doing a conversion or possibly adjusting the offset
2162 according to endianness. */
2163 && (! INTEGRAL_TYPE_P (vr->type)
2164 || known_eq (ref->size, TYPE_PRECISION (vr->type)))
2165 && multiple_p (ref->size, BITS_PER_UNIT))
2167 code_helper rcode = BIT_FIELD_REF;
2168 tree ops[3];
2169 ops[0] = SSA_VAL (gimple_assign_rhs1 (def_stmt));
2170 ops[1] = bitsize_int (ref->size);
2171 ops[2] = bitsize_int (offset - offset2);
2172 tree val = vn_nary_build_or_lookup (rcode, vr->type, ops);
2173 if (val
2174 && (TREE_CODE (val) != SSA_NAME
2175 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2177 vn_reference_t res = vn_reference_lookup_or_insert_for_pieces
2178 (vuse, vr->set, vr->type, vr->operands, val);
2179 return res;
2184 /* 5) For aggregate copies translate the reference through them if
2185 the copy kills ref. */
2186 else if (vn_walk_kind == VN_WALKREWRITE
2187 && gimple_assign_single_p (def_stmt)
2188 && (DECL_P (gimple_assign_rhs1 (def_stmt))
2189 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
2190 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
2192 tree base2;
2193 int i, j, k;
2194 auto_vec<vn_reference_op_s> rhs;
2195 vn_reference_op_t vro;
2196 ao_ref r;
2198 if (!lhs_ref_ok)
2199 return (void *)-1;
2201 /* See if the assignment kills REF. */
2202 base2 = ao_ref_base (&lhs_ref);
2203 if (!lhs_ref.max_size_known_p ()
2204 || (base != base2
2205 && (TREE_CODE (base) != MEM_REF
2206 || TREE_CODE (base2) != MEM_REF
2207 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
2208 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
2209 TREE_OPERAND (base2, 1))))
2210 || !stmt_kills_ref_p (def_stmt, ref))
2211 return (void *)-1;
2213 /* Find the common base of ref and the lhs. lhs_ops already
2214 contains valueized operands for the lhs. */
2215 i = vr->operands.length () - 1;
2216 j = lhs_ops.length () - 1;
2217 while (j >= 0 && i >= 0
2218 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
2220 i--;
2221 j--;
2224 /* ??? The innermost op should always be a MEM_REF and we already
2225 checked that the assignment to the lhs kills vr. Thus for
2226 aggregate copies using char[] types the vn_reference_op_eq
2227 may fail when comparing types for compatibility. But we really
2228 don't care here - further lookups with the rewritten operands
2229 will simply fail if we messed up types too badly. */
2230 poly_int64 extra_off = 0;
2231 if (j == 0 && i >= 0
2232 && lhs_ops[0].opcode == MEM_REF
2233 && maybe_ne (lhs_ops[0].off, -1))
2235 if (known_eq (lhs_ops[0].off, vr->operands[i].off))
2236 i--, j--;
2237 else if (vr->operands[i].opcode == MEM_REF
2238 && maybe_ne (vr->operands[i].off, -1))
2240 extra_off = vr->operands[i].off - lhs_ops[0].off;
2241 i--, j--;
2245 /* i now points to the first additional op.
2246 ??? LHS may not be completely contained in VR, one or more
2247 VIEW_CONVERT_EXPRs could be in its way. We could at least
2248 try handling outermost VIEW_CONVERT_EXPRs. */
2249 if (j != -1)
2250 return (void *)-1;
2252 /* Punt if the additional ops contain a storage order barrier. */
2253 for (k = i; k >= 0; k--)
2255 vro = &vr->operands[k];
2256 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
2257 return (void *)-1;
2260 /* Now re-write REF to be based on the rhs of the assignment. */
2261 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
2263 /* Apply an extra offset to the inner MEM_REF of the RHS. */
2264 if (maybe_ne (extra_off, 0))
2266 if (rhs.length () < 2
2267 || rhs[0].opcode != MEM_REF
2268 || known_eq (rhs[0].off, -1))
2269 return (void *)-1;
2270 rhs[0].off += extra_off;
2271 rhs[0].op0 = int_const_binop (PLUS_EXPR, rhs[0].op0,
2272 build_int_cst (TREE_TYPE (rhs[0].op0),
2273 extra_off));
2276 /* We need to pre-pend vr->operands[0..i] to rhs. */
2277 vec<vn_reference_op_s> old = vr->operands;
2278 if (i + 1 + rhs.length () > vr->operands.length ())
2279 vr->operands.safe_grow (i + 1 + rhs.length ());
2280 else
2281 vr->operands.truncate (i + 1 + rhs.length ());
2282 FOR_EACH_VEC_ELT (rhs, j, vro)
2283 vr->operands[i + 1 + j] = *vro;
2284 vr->operands = valueize_refs (vr->operands);
2285 if (old == shared_lookup_references)
2286 shared_lookup_references = vr->operands;
2287 vr->hashcode = vn_reference_compute_hash (vr);
2289 /* Try folding the new reference to a constant. */
2290 tree val = fully_constant_vn_reference_p (vr);
2291 if (val)
2292 return vn_reference_lookup_or_insert_for_pieces
2293 (vuse, vr->set, vr->type, vr->operands, val);
2295 /* Adjust *ref from the new operands. */
2296 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2297 return (void *)-1;
2298 /* This can happen with bitfields. */
2299 if (maybe_ne (ref->size, r.size))
2300 return (void *)-1;
2301 *ref = r;
2303 /* Do not update last seen VUSE after translating. */
2304 last_vuse_ptr = NULL;
2306 /* Keep looking for the adjusted *REF / VR pair. */
2307 return NULL;
2310 /* 6) For memcpy copies translate the reference through them if
2311 the copy kills ref. */
2312 else if (vn_walk_kind == VN_WALKREWRITE
2313 && is_gimple_reg_type (vr->type)
2314 /* ??? Handle BCOPY as well. */
2315 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2316 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2317 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2318 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2319 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2320 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2321 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2322 && poly_int_tree_p (gimple_call_arg (def_stmt, 2), &copy_size))
2324 tree lhs, rhs;
2325 ao_ref r;
2326 poly_int64 rhs_offset, lhs_offset;
2327 vn_reference_op_s op;
2328 poly_uint64 mem_offset;
2329 poly_int64 at, byte_maxsize;
2331 /* Only handle non-variable, addressable refs. */
2332 if (maybe_ne (ref->size, maxsize)
2333 || !multiple_p (offset, BITS_PER_UNIT, &at)
2334 || !multiple_p (maxsize, BITS_PER_UNIT, &byte_maxsize))
2335 return (void *)-1;
2337 /* Extract a pointer base and an offset for the destination. */
2338 lhs = gimple_call_arg (def_stmt, 0);
2339 lhs_offset = 0;
2340 if (TREE_CODE (lhs) == SSA_NAME)
2342 lhs = SSA_VAL (lhs);
2343 if (TREE_CODE (lhs) == SSA_NAME)
2345 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
2346 if (gimple_assign_single_p (def_stmt)
2347 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2348 lhs = gimple_assign_rhs1 (def_stmt);
2351 if (TREE_CODE (lhs) == ADDR_EXPR)
2353 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2354 &lhs_offset);
2355 if (!tem)
2356 return (void *)-1;
2357 if (TREE_CODE (tem) == MEM_REF
2358 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
2360 lhs = TREE_OPERAND (tem, 0);
2361 if (TREE_CODE (lhs) == SSA_NAME)
2362 lhs = SSA_VAL (lhs);
2363 lhs_offset += mem_offset;
2365 else if (DECL_P (tem))
2366 lhs = build_fold_addr_expr (tem);
2367 else
2368 return (void *)-1;
2370 if (TREE_CODE (lhs) != SSA_NAME
2371 && TREE_CODE (lhs) != ADDR_EXPR)
2372 return (void *)-1;
2374 /* Extract a pointer base and an offset for the source. */
2375 rhs = gimple_call_arg (def_stmt, 1);
2376 rhs_offset = 0;
2377 if (TREE_CODE (rhs) == SSA_NAME)
2378 rhs = SSA_VAL (rhs);
2379 if (TREE_CODE (rhs) == ADDR_EXPR)
2381 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2382 &rhs_offset);
2383 if (!tem)
2384 return (void *)-1;
2385 if (TREE_CODE (tem) == MEM_REF
2386 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
2388 rhs = TREE_OPERAND (tem, 0);
2389 rhs_offset += mem_offset;
2391 else if (DECL_P (tem)
2392 || TREE_CODE (tem) == STRING_CST)
2393 rhs = build_fold_addr_expr (tem);
2394 else
2395 return (void *)-1;
2397 if (TREE_CODE (rhs) != SSA_NAME
2398 && TREE_CODE (rhs) != ADDR_EXPR)
2399 return (void *)-1;
2401 /* The bases of the destination and the references have to agree. */
2402 if (TREE_CODE (base) == MEM_REF)
2404 if (TREE_OPERAND (base, 0) != lhs
2405 || !poly_int_tree_p (TREE_OPERAND (base, 1), &mem_offset))
2406 return (void *) -1;
2407 at += mem_offset;
2409 else if (!DECL_P (base)
2410 || TREE_CODE (lhs) != ADDR_EXPR
2411 || TREE_OPERAND (lhs, 0) != base)
2412 return (void *)-1;
2414 /* If the access is completely outside of the memcpy destination
2415 area there is no aliasing. */
2416 if (!ranges_maybe_overlap_p (lhs_offset, copy_size, at, byte_maxsize))
2417 return NULL;
2418 /* And the access has to be contained within the memcpy destination. */
2419 if (!known_subrange_p (at, byte_maxsize, lhs_offset, copy_size))
2420 return (void *)-1;
2422 /* Make room for 2 operands in the new reference. */
2423 if (vr->operands.length () < 2)
2425 vec<vn_reference_op_s> old = vr->operands;
2426 vr->operands.safe_grow_cleared (2);
2427 if (old == shared_lookup_references)
2428 shared_lookup_references = vr->operands;
2430 else
2431 vr->operands.truncate (2);
2433 /* The looked-through reference is a simple MEM_REF. */
2434 memset (&op, 0, sizeof (op));
2435 op.type = vr->type;
2436 op.opcode = MEM_REF;
2437 op.op0 = build_int_cst (ptr_type_node, at - lhs_offset + rhs_offset);
2438 op.off = at - lhs_offset + rhs_offset;
2439 vr->operands[0] = op;
2440 op.type = TREE_TYPE (rhs);
2441 op.opcode = TREE_CODE (rhs);
2442 op.op0 = rhs;
2443 op.off = -1;
2444 vr->operands[1] = op;
2445 vr->hashcode = vn_reference_compute_hash (vr);
2447 /* Try folding the new reference to a constant. */
2448 tree val = fully_constant_vn_reference_p (vr);
2449 if (val)
2450 return vn_reference_lookup_or_insert_for_pieces
2451 (vuse, vr->set, vr->type, vr->operands, val);
2453 /* Adjust *ref from the new operands. */
2454 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2455 return (void *)-1;
2456 /* This can happen with bitfields. */
2457 if (maybe_ne (ref->size, r.size))
2458 return (void *)-1;
2459 *ref = r;
2461 /* Do not update last seen VUSE after translating. */
2462 last_vuse_ptr = NULL;
2464 /* Keep looking for the adjusted *REF / VR pair. */
2465 return NULL;
2468 /* Bail out and stop walking. */
2469 return (void *)-1;
2472 /* Return a reference op vector from OP that can be used for
2473 vn_reference_lookup_pieces. The caller is responsible for releasing
2474 the vector. */
2476 vec<vn_reference_op_s>
2477 vn_reference_operands_for_lookup (tree op)
2479 bool valueized;
2480 return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
2483 /* Lookup a reference operation by it's parts, in the current hash table.
2484 Returns the resulting value number if it exists in the hash table,
2485 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2486 vn_reference_t stored in the hashtable if something is found. */
2488 tree
2489 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2490 vec<vn_reference_op_s> operands,
2491 vn_reference_t *vnresult, vn_lookup_kind kind)
2493 struct vn_reference_s vr1;
2494 vn_reference_t tmp;
2495 tree cst;
2497 if (!vnresult)
2498 vnresult = &tmp;
2499 *vnresult = NULL;
2501 vr1.vuse = vuse_ssa_val (vuse);
2502 shared_lookup_references.truncate (0);
2503 shared_lookup_references.safe_grow (operands.length ());
2504 memcpy (shared_lookup_references.address (),
2505 operands.address (),
2506 sizeof (vn_reference_op_s)
2507 * operands.length ());
2508 vr1.operands = operands = shared_lookup_references
2509 = valueize_refs (shared_lookup_references);
2510 vr1.type = type;
2511 vr1.set = set;
2512 vr1.hashcode = vn_reference_compute_hash (&vr1);
2513 if ((cst = fully_constant_vn_reference_p (&vr1)))
2514 return cst;
2516 vn_reference_lookup_1 (&vr1, vnresult);
2517 if (!*vnresult
2518 && kind != VN_NOWALK
2519 && vr1.vuse)
2521 ao_ref r;
2522 vn_walk_kind = kind;
2523 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2524 *vnresult =
2525 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2526 vn_reference_lookup_2,
2527 vn_reference_lookup_3,
2528 vuse_ssa_val, &vr1);
2529 gcc_checking_assert (vr1.operands == shared_lookup_references);
2532 if (*vnresult)
2533 return (*vnresult)->result;
2535 return NULL_TREE;
2538 /* Lookup OP in the current hash table, and return the resulting value
2539 number if it exists in the hash table. Return NULL_TREE if it does
2540 not exist in the hash table or if the result field of the structure
2541 was NULL.. VNRESULT will be filled in with the vn_reference_t
2542 stored in the hashtable if one exists. When TBAA_P is false assume
2543 we are looking up a store and treat it as having alias-set zero. */
2545 tree
2546 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2547 vn_reference_t *vnresult, bool tbaa_p)
2549 vec<vn_reference_op_s> operands;
2550 struct vn_reference_s vr1;
2551 tree cst;
2552 bool valuezied_anything;
2554 if (vnresult)
2555 *vnresult = NULL;
2557 vr1.vuse = vuse_ssa_val (vuse);
2558 vr1.operands = operands
2559 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2560 vr1.type = TREE_TYPE (op);
2561 vr1.set = tbaa_p ? get_alias_set (op) : 0;
2562 vr1.hashcode = vn_reference_compute_hash (&vr1);
2563 if ((cst = fully_constant_vn_reference_p (&vr1)))
2564 return cst;
2566 if (kind != VN_NOWALK
2567 && vr1.vuse)
2569 vn_reference_t wvnresult;
2570 ao_ref r;
2571 /* Make sure to use a valueized reference if we valueized anything.
2572 Otherwise preserve the full reference for advanced TBAA. */
2573 if (!valuezied_anything
2574 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2575 vr1.operands))
2576 ao_ref_init (&r, op);
2577 if (! tbaa_p)
2578 r.ref_alias_set = r.base_alias_set = 0;
2579 vn_walk_kind = kind;
2580 wvnresult =
2581 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2582 vn_reference_lookup_2,
2583 vn_reference_lookup_3,
2584 vuse_ssa_val, &vr1);
2585 gcc_checking_assert (vr1.operands == shared_lookup_references);
2586 if (wvnresult)
2588 if (vnresult)
2589 *vnresult = wvnresult;
2590 return wvnresult->result;
2593 return NULL_TREE;
2596 return vn_reference_lookup_1 (&vr1, vnresult);
2599 /* Lookup CALL in the current hash table and return the entry in
2600 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2602 void
2603 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2604 vn_reference_t vr)
2606 if (vnresult)
2607 *vnresult = NULL;
2609 tree vuse = gimple_vuse (call);
2611 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2612 vr->operands = valueize_shared_reference_ops_from_call (call);
2613 vr->type = gimple_expr_type (call);
2614 vr->set = 0;
2615 vr->hashcode = vn_reference_compute_hash (vr);
2616 vn_reference_lookup_1 (vr, vnresult);
2619 /* Insert OP into the current hash table with a value number of
2620 RESULT, and return the resulting reference structure we created. */
2622 static vn_reference_t
2623 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2625 vn_reference_s **slot;
2626 vn_reference_t vr1;
2627 bool tem;
2629 vr1 = current_info->references_pool->allocate ();
2630 if (TREE_CODE (result) == SSA_NAME)
2631 vr1->value_id = VN_INFO (result)->value_id;
2632 else
2633 vr1->value_id = get_or_alloc_constant_value_id (result);
2634 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2635 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2636 vr1->type = TREE_TYPE (op);
2637 vr1->set = get_alias_set (op);
2638 vr1->hashcode = vn_reference_compute_hash (vr1);
2639 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2640 vr1->result_vdef = vdef;
2642 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2643 INSERT);
2645 /* Because we lookup stores using vuses, and value number failures
2646 using the vdefs (see visit_reference_op_store for how and why),
2647 it's possible that on failure we may try to insert an already
2648 inserted store. This is not wrong, there is no ssa name for a
2649 store that we could use as a differentiator anyway. Thus, unlike
2650 the other lookup functions, you cannot gcc_assert (!*slot)
2651 here. */
2653 /* But free the old slot in case of a collision. */
2654 if (*slot)
2655 free_reference (*slot);
2657 *slot = vr1;
2658 return vr1;
2661 /* Insert a reference by it's pieces into the current hash table with
2662 a value number of RESULT. Return the resulting reference
2663 structure we created. */
2665 vn_reference_t
2666 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2667 vec<vn_reference_op_s> operands,
2668 tree result, unsigned int value_id)
2671 vn_reference_s **slot;
2672 vn_reference_t vr1;
2674 vr1 = current_info->references_pool->allocate ();
2675 vr1->value_id = value_id;
2676 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2677 vr1->operands = valueize_refs (operands);
2678 vr1->type = type;
2679 vr1->set = set;
2680 vr1->hashcode = vn_reference_compute_hash (vr1);
2681 if (result && TREE_CODE (result) == SSA_NAME)
2682 result = SSA_VAL (result);
2683 vr1->result = result;
2685 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2686 INSERT);
2688 /* At this point we should have all the things inserted that we have
2689 seen before, and we should never try inserting something that
2690 already exists. */
2691 gcc_assert (!*slot);
2692 if (*slot)
2693 free_reference (*slot);
2695 *slot = vr1;
2696 return vr1;
2699 /* Compute and return the hash value for nary operation VBO1. */
2701 static hashval_t
2702 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2704 inchash::hash hstate;
2705 unsigned i;
2707 for (i = 0; i < vno1->length; ++i)
2708 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2709 vno1->op[i] = SSA_VAL (vno1->op[i]);
2711 if (((vno1->length == 2
2712 && commutative_tree_code (vno1->opcode))
2713 || (vno1->length == 3
2714 && commutative_ternary_tree_code (vno1->opcode)))
2715 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2716 std::swap (vno1->op[0], vno1->op[1]);
2717 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
2718 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2720 std::swap (vno1->op[0], vno1->op[1]);
2721 vno1->opcode = swap_tree_comparison (vno1->opcode);
2724 hstate.add_int (vno1->opcode);
2725 for (i = 0; i < vno1->length; ++i)
2726 inchash::add_expr (vno1->op[i], hstate);
2728 return hstate.end ();
2731 /* Compare nary operations VNO1 and VNO2 and return true if they are
2732 equivalent. */
2734 bool
2735 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2737 unsigned i;
2739 if (vno1->hashcode != vno2->hashcode)
2740 return false;
2742 if (vno1->length != vno2->length)
2743 return false;
2745 if (vno1->opcode != vno2->opcode
2746 || !types_compatible_p (vno1->type, vno2->type))
2747 return false;
2749 for (i = 0; i < vno1->length; ++i)
2750 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2751 return false;
2753 /* BIT_INSERT_EXPR has an implict operand as the type precision
2754 of op1. Need to check to make sure they are the same. */
2755 if (vno1->opcode == BIT_INSERT_EXPR
2756 && TREE_CODE (vno1->op[1]) == INTEGER_CST
2757 && TYPE_PRECISION (TREE_TYPE (vno1->op[1]))
2758 != TYPE_PRECISION (TREE_TYPE (vno2->op[1])))
2759 return false;
2761 return true;
2764 /* Initialize VNO from the pieces provided. */
2766 static void
2767 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2768 enum tree_code code, tree type, tree *ops)
2770 vno->opcode = code;
2771 vno->length = length;
2772 vno->type = type;
2773 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2776 /* Initialize VNO from OP. */
2778 static void
2779 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2781 unsigned i;
2783 vno->opcode = TREE_CODE (op);
2784 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2785 vno->type = TREE_TYPE (op);
2786 for (i = 0; i < vno->length; ++i)
2787 vno->op[i] = TREE_OPERAND (op, i);
2790 /* Return the number of operands for a vn_nary ops structure from STMT. */
2792 static unsigned int
2793 vn_nary_length_from_stmt (gimple *stmt)
2795 switch (gimple_assign_rhs_code (stmt))
2797 case REALPART_EXPR:
2798 case IMAGPART_EXPR:
2799 case VIEW_CONVERT_EXPR:
2800 return 1;
2802 case BIT_FIELD_REF:
2803 return 3;
2805 case CONSTRUCTOR:
2806 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2808 default:
2809 return gimple_num_ops (stmt) - 1;
2813 /* Initialize VNO from STMT. */
2815 static void
2816 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
2818 unsigned i;
2820 vno->opcode = gimple_assign_rhs_code (stmt);
2821 vno->type = gimple_expr_type (stmt);
2822 switch (vno->opcode)
2824 case REALPART_EXPR:
2825 case IMAGPART_EXPR:
2826 case VIEW_CONVERT_EXPR:
2827 vno->length = 1;
2828 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2829 break;
2831 case BIT_FIELD_REF:
2832 vno->length = 3;
2833 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2834 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2835 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2836 break;
2838 case CONSTRUCTOR:
2839 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2840 for (i = 0; i < vno->length; ++i)
2841 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2842 break;
2844 default:
2845 gcc_checking_assert (!gimple_assign_single_p (stmt));
2846 vno->length = gimple_num_ops (stmt) - 1;
2847 for (i = 0; i < vno->length; ++i)
2848 vno->op[i] = gimple_op (stmt, i + 1);
2852 /* Compute the hashcode for VNO and look for it in the hash table;
2853 return the resulting value number if it exists in the hash table.
2854 Return NULL_TREE if it does not exist in the hash table or if the
2855 result field of the operation is NULL. VNRESULT will contain the
2856 vn_nary_op_t from the hashtable if it exists. */
2858 static tree
2859 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2861 vn_nary_op_s **slot;
2863 if (vnresult)
2864 *vnresult = NULL;
2866 vno->hashcode = vn_nary_op_compute_hash (vno);
2867 slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2868 NO_INSERT);
2869 if (!slot && current_info == optimistic_info)
2870 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2871 NO_INSERT);
2872 if (!slot)
2873 return NULL_TREE;
2874 if (vnresult)
2875 *vnresult = *slot;
2876 return (*slot)->result;
2879 /* Lookup a n-ary operation by its pieces and return the resulting value
2880 number if it exists in the hash table. Return NULL_TREE if it does
2881 not exist in the hash table or if the result field of the operation
2882 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2883 if it exists. */
2885 tree
2886 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2887 tree type, tree *ops, vn_nary_op_t *vnresult)
2889 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2890 sizeof_vn_nary_op (length));
2891 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2892 return vn_nary_op_lookup_1 (vno1, vnresult);
2895 /* Lookup OP in the current hash table, and return the resulting value
2896 number if it exists in the hash table. Return NULL_TREE if it does
2897 not exist in the hash table or if the result field of the operation
2898 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2899 if it exists. */
2901 tree
2902 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2904 vn_nary_op_t vno1
2905 = XALLOCAVAR (struct vn_nary_op_s,
2906 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2907 init_vn_nary_op_from_op (vno1, op);
2908 return vn_nary_op_lookup_1 (vno1, vnresult);
2911 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2912 value number if it exists in the hash table. Return NULL_TREE if
2913 it does not exist in the hash table. VNRESULT will contain the
2914 vn_nary_op_t from the hashtable if it exists. */
2916 tree
2917 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
2919 vn_nary_op_t vno1
2920 = XALLOCAVAR (struct vn_nary_op_s,
2921 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2922 init_vn_nary_op_from_stmt (vno1, stmt);
2923 return vn_nary_op_lookup_1 (vno1, vnresult);
2926 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2928 static vn_nary_op_t
2929 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2931 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2934 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2935 obstack. */
2937 static vn_nary_op_t
2938 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2940 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2941 &current_info->nary_obstack);
2943 vno1->value_id = value_id;
2944 vno1->length = length;
2945 vno1->result = result;
2947 return vno1;
2950 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2951 VNO->HASHCODE first. */
2953 static vn_nary_op_t
2954 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
2955 bool compute_hash)
2957 vn_nary_op_s **slot;
2959 if (compute_hash)
2960 vno->hashcode = vn_nary_op_compute_hash (vno);
2962 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
2963 /* While we do not want to insert things twice it's awkward to
2964 avoid it in the case where visit_nary_op pattern-matches stuff
2965 and ends up simplifying the replacement to itself. We then
2966 get two inserts, one from visit_nary_op and one from
2967 vn_nary_build_or_lookup.
2968 So allow inserts with the same value number. */
2969 if (*slot && (*slot)->result == vno->result)
2970 return *slot;
2972 gcc_assert (!*slot);
2974 *slot = vno;
2975 return vno;
2978 /* Insert a n-ary operation into the current hash table using it's
2979 pieces. Return the vn_nary_op_t structure we created and put in
2980 the hashtable. */
2982 vn_nary_op_t
2983 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2984 tree type, tree *ops,
2985 tree result, unsigned int value_id)
2987 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2988 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2989 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2992 /* Insert OP into the current hash table with a value number of
2993 RESULT. Return the vn_nary_op_t structure we created and put in
2994 the hashtable. */
2996 vn_nary_op_t
2997 vn_nary_op_insert (tree op, tree result)
2999 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
3000 vn_nary_op_t vno1;
3002 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
3003 init_vn_nary_op_from_op (vno1, op);
3004 return vn_nary_op_insert_into (vno1, current_info->nary, true);
3007 /* Insert the rhs of STMT into the current hash table with a value number of
3008 RESULT. */
3010 static vn_nary_op_t
3011 vn_nary_op_insert_stmt (gimple *stmt, tree result)
3013 vn_nary_op_t vno1
3014 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
3015 result, VN_INFO (result)->value_id);
3016 init_vn_nary_op_from_stmt (vno1, stmt);
3017 return vn_nary_op_insert_into (vno1, current_info->nary, true);
3020 /* Compute a hashcode for PHI operation VP1 and return it. */
3022 static inline hashval_t
3023 vn_phi_compute_hash (vn_phi_t vp1)
3025 inchash::hash hstate (vp1->phiargs.length () > 2
3026 ? vp1->block->index : vp1->phiargs.length ());
3027 tree phi1op;
3028 tree type;
3029 edge e;
3030 edge_iterator ei;
3032 /* If all PHI arguments are constants we need to distinguish
3033 the PHI node via its type. */
3034 type = vp1->type;
3035 hstate.merge_hash (vn_hash_type (type));
3037 FOR_EACH_EDGE (e, ei, vp1->block->preds)
3039 /* Don't hash backedge values they need to be handled as VN_TOP
3040 for optimistic value-numbering. */
3041 if (e->flags & EDGE_DFS_BACK)
3042 continue;
3044 phi1op = vp1->phiargs[e->dest_idx];
3045 if (phi1op == VN_TOP)
3046 continue;
3047 inchash::add_expr (phi1op, hstate);
3050 return hstate.end ();
3054 /* Return true if COND1 and COND2 represent the same condition, set
3055 *INVERTED_P if one needs to be inverted to make it the same as
3056 the other. */
3058 static bool
3059 cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
3060 gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
3062 enum tree_code code1 = gimple_cond_code (cond1);
3063 enum tree_code code2 = gimple_cond_code (cond2);
3065 *inverted_p = false;
3066 if (code1 == code2)
3068 else if (code1 == swap_tree_comparison (code2))
3069 std::swap (lhs2, rhs2);
3070 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
3071 *inverted_p = true;
3072 else if (code1 == invert_tree_comparison
3073 (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
3075 std::swap (lhs2, rhs2);
3076 *inverted_p = true;
3078 else
3079 return false;
3081 return ((expressions_equal_p (lhs1, lhs2)
3082 && expressions_equal_p (rhs1, rhs2))
3083 || (commutative_tree_code (code1)
3084 && expressions_equal_p (lhs1, rhs2)
3085 && expressions_equal_p (rhs1, lhs2)));
3088 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
3090 static int
3091 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
3093 if (vp1->hashcode != vp2->hashcode)
3094 return false;
3096 if (vp1->block != vp2->block)
3098 if (vp1->phiargs.length () != vp2->phiargs.length ())
3099 return false;
3101 switch (vp1->phiargs.length ())
3103 case 1:
3104 /* Single-arg PHIs are just copies. */
3105 break;
3107 case 2:
3109 /* Rule out backedges into the PHI. */
3110 if (vp1->block->loop_father->header == vp1->block
3111 || vp2->block->loop_father->header == vp2->block)
3112 return false;
3114 /* If the PHI nodes do not have compatible types
3115 they are not the same. */
3116 if (!types_compatible_p (vp1->type, vp2->type))
3117 return false;
3119 basic_block idom1
3120 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3121 basic_block idom2
3122 = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
3123 /* If the immediate dominator end in switch stmts multiple
3124 values may end up in the same PHI arg via intermediate
3125 CFG merges. */
3126 if (EDGE_COUNT (idom1->succs) != 2
3127 || EDGE_COUNT (idom2->succs) != 2)
3128 return false;
3130 /* Verify the controlling stmt is the same. */
3131 gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1));
3132 gcond *last2 = safe_dyn_cast <gcond *> (last_stmt (idom2));
3133 if (! last1 || ! last2)
3134 return false;
3135 bool inverted_p;
3136 if (! cond_stmts_equal_p (last1, vp1->cclhs, vp1->ccrhs,
3137 last2, vp2->cclhs, vp2->ccrhs,
3138 &inverted_p))
3139 return false;
3141 /* Get at true/false controlled edges into the PHI. */
3142 edge te1, te2, fe1, fe2;
3143 if (! extract_true_false_controlled_edges (idom1, vp1->block,
3144 &te1, &fe1)
3145 || ! extract_true_false_controlled_edges (idom2, vp2->block,
3146 &te2, &fe2))
3147 return false;
3149 /* Swap edges if the second condition is the inverted of the
3150 first. */
3151 if (inverted_p)
3152 std::swap (te2, fe2);
3154 /* ??? Handle VN_TOP specially. */
3155 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
3156 vp2->phiargs[te2->dest_idx])
3157 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
3158 vp2->phiargs[fe2->dest_idx]))
3159 return false;
3161 return true;
3164 default:
3165 return false;
3169 /* If the PHI nodes do not have compatible types
3170 they are not the same. */
3171 if (!types_compatible_p (vp1->type, vp2->type))
3172 return false;
3174 /* Any phi in the same block will have it's arguments in the
3175 same edge order, because of how we store phi nodes. */
3176 int i;
3177 tree phi1op;
3178 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
3180 tree phi2op = vp2->phiargs[i];
3181 if (phi1op == VN_TOP || phi2op == VN_TOP)
3182 continue;
3183 if (!expressions_equal_p (phi1op, phi2op))
3184 return false;
3187 return true;
3190 static vec<tree> shared_lookup_phiargs;
3192 /* Lookup PHI in the current hash table, and return the resulting
3193 value number if it exists in the hash table. Return NULL_TREE if
3194 it does not exist in the hash table. */
3196 static tree
3197 vn_phi_lookup (gimple *phi)
3199 vn_phi_s **slot;
3200 struct vn_phi_s vp1;
3201 edge e;
3202 edge_iterator ei;
3204 shared_lookup_phiargs.truncate (0);
3205 shared_lookup_phiargs.safe_grow (gimple_phi_num_args (phi));
3207 /* Canonicalize the SSA_NAME's to their value number. */
3208 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3210 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3211 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3212 shared_lookup_phiargs[e->dest_idx] = def;
3214 vp1.type = TREE_TYPE (gimple_phi_result (phi));
3215 vp1.phiargs = shared_lookup_phiargs;
3216 vp1.block = gimple_bb (phi);
3217 /* Extract values of the controlling condition. */
3218 vp1.cclhs = NULL_TREE;
3219 vp1.ccrhs = NULL_TREE;
3220 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1.block);
3221 if (EDGE_COUNT (idom1->succs) == 2)
3222 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3224 vp1.cclhs = vn_valueize (gimple_cond_lhs (last1));
3225 vp1.ccrhs = vn_valueize (gimple_cond_rhs (last1));
3227 vp1.hashcode = vn_phi_compute_hash (&vp1);
3228 slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3229 NO_INSERT);
3230 if (!slot && current_info == optimistic_info)
3231 slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3232 NO_INSERT);
3233 if (!slot)
3234 return NULL_TREE;
3235 return (*slot)->result;
3238 /* Insert PHI into the current hash table with a value number of
3239 RESULT. */
3241 static vn_phi_t
3242 vn_phi_insert (gimple *phi, tree result)
3244 vn_phi_s **slot;
3245 vn_phi_t vp1 = current_info->phis_pool->allocate ();
3246 vec<tree> args = vNULL;
3247 edge e;
3248 edge_iterator ei;
3250 args.safe_grow (gimple_phi_num_args (phi));
3252 /* Canonicalize the SSA_NAME's to their value number. */
3253 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3255 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3256 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3257 args[e->dest_idx] = def;
3259 vp1->value_id = VN_INFO (result)->value_id;
3260 vp1->type = TREE_TYPE (gimple_phi_result (phi));
3261 vp1->phiargs = args;
3262 vp1->block = gimple_bb (phi);
3263 /* Extract values of the controlling condition. */
3264 vp1->cclhs = NULL_TREE;
3265 vp1->ccrhs = NULL_TREE;
3266 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3267 if (EDGE_COUNT (idom1->succs) == 2)
3268 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3270 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
3271 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
3273 vp1->result = result;
3274 vp1->hashcode = vn_phi_compute_hash (vp1);
3276 slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
3278 /* Because we iterate over phi operations more than once, it's
3279 possible the slot might already exist here, hence no assert.*/
3280 *slot = vp1;
3281 return vp1;
3285 /* Print set of components in strongly connected component SCC to OUT. */
3287 static void
3288 print_scc (FILE *out, vec<tree> scc)
3290 tree var;
3291 unsigned int i;
3293 fprintf (out, "SCC consists of %u:", scc.length ());
3294 FOR_EACH_VEC_ELT (scc, i, var)
3296 fprintf (out, " ");
3297 print_generic_expr (out, var);
3299 fprintf (out, "\n");
3302 /* Return true if BB1 is dominated by BB2 taking into account edges
3303 that are not executable. */
3305 static bool
3306 dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
3308 edge_iterator ei;
3309 edge e;
3311 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3312 return true;
3314 /* Before iterating we'd like to know if there exists a
3315 (executable) path from bb2 to bb1 at all, if not we can
3316 directly return false. For now simply iterate once. */
3318 /* Iterate to the single executable bb1 predecessor. */
3319 if (EDGE_COUNT (bb1->preds) > 1)
3321 edge prede = NULL;
3322 FOR_EACH_EDGE (e, ei, bb1->preds)
3323 if (e->flags & EDGE_EXECUTABLE)
3325 if (prede)
3327 prede = NULL;
3328 break;
3330 prede = e;
3332 if (prede)
3334 bb1 = prede->src;
3336 /* Re-do the dominance check with changed bb1. */
3337 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3338 return true;
3342 /* Iterate to the single executable bb2 successor. */
3343 edge succe = NULL;
3344 FOR_EACH_EDGE (e, ei, bb2->succs)
3345 if (e->flags & EDGE_EXECUTABLE)
3347 if (succe)
3349 succe = NULL;
3350 break;
3352 succe = e;
3354 if (succe)
3356 /* Verify the reached block is only reached through succe.
3357 If there is only one edge we can spare us the dominator
3358 check and iterate directly. */
3359 if (EDGE_COUNT (succe->dest->preds) > 1)
3361 FOR_EACH_EDGE (e, ei, succe->dest->preds)
3362 if (e != succe
3363 && (e->flags & EDGE_EXECUTABLE))
3365 succe = NULL;
3366 break;
3369 if (succe)
3371 bb2 = succe->dest;
3373 /* Re-do the dominance check with changed bb2. */
3374 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3375 return true;
3379 /* We could now iterate updating bb1 / bb2. */
3380 return false;
3383 /* Set the value number of FROM to TO, return true if it has changed
3384 as a result. */
3386 static inline bool
3387 set_ssa_val_to (tree from, tree to)
3389 tree currval = SSA_VAL (from);
3390 poly_int64 toff, coff;
3392 /* The only thing we allow as value numbers are ssa_names
3393 and invariants. So assert that here. We don't allow VN_TOP
3394 as visiting a stmt should produce a value-number other than
3395 that.
3396 ??? Still VN_TOP can happen for unreachable code, so force
3397 it to varying in that case. Not all code is prepared to
3398 get VN_TOP on valueization. */
3399 if (to == VN_TOP)
3401 if (dump_file && (dump_flags & TDF_DETAILS))
3402 fprintf (dump_file, "Forcing value number to varying on "
3403 "receiving VN_TOP\n");
3404 to = from;
3407 gcc_assert (to != NULL_TREE
3408 && ((TREE_CODE (to) == SSA_NAME
3409 && (to == from || SSA_VAL (to) == to))
3410 || is_gimple_min_invariant (to)));
3412 if (from != to)
3414 if (currval == from)
3416 if (dump_file && (dump_flags & TDF_DETAILS))
3418 fprintf (dump_file, "Not changing value number of ");
3419 print_generic_expr (dump_file, from);
3420 fprintf (dump_file, " from VARYING to ");
3421 print_generic_expr (dump_file, to);
3422 fprintf (dump_file, "\n");
3424 return false;
3426 else if (currval != VN_TOP
3427 && ! is_gimple_min_invariant (currval)
3428 && is_gimple_min_invariant (to))
3430 if (dump_file && (dump_flags & TDF_DETAILS))
3432 fprintf (dump_file, "Forcing VARYING instead of changing "
3433 "value number of ");
3434 print_generic_expr (dump_file, from);
3435 fprintf (dump_file, " from ");
3436 print_generic_expr (dump_file, currval);
3437 fprintf (dump_file, " (non-constant) to ");
3438 print_generic_expr (dump_file, to);
3439 fprintf (dump_file, " (constant)\n");
3441 to = from;
3443 else if (TREE_CODE (to) == SSA_NAME
3444 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
3445 to = from;
3448 if (dump_file && (dump_flags & TDF_DETAILS))
3450 fprintf (dump_file, "Setting value number of ");
3451 print_generic_expr (dump_file, from);
3452 fprintf (dump_file, " to ");
3453 print_generic_expr (dump_file, to);
3456 if (currval != to
3457 && !operand_equal_p (currval, to, 0)
3458 /* Different undefined SSA names are not actually different. See
3459 PR82320 for a testcase were we'd otherwise not terminate iteration. */
3460 && !(TREE_CODE (currval) == SSA_NAME
3461 && TREE_CODE (to) == SSA_NAME
3462 && ssa_undefined_value_p (currval, false)
3463 && ssa_undefined_value_p (to, false))
3464 /* ??? For addresses involving volatile objects or types operand_equal_p
3465 does not reliably detect ADDR_EXPRs as equal. We know we are only
3466 getting invariant gimple addresses here, so can use
3467 get_addr_base_and_unit_offset to do this comparison. */
3468 && !(TREE_CODE (currval) == ADDR_EXPR
3469 && TREE_CODE (to) == ADDR_EXPR
3470 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
3471 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
3472 && known_eq (coff, toff)))
3474 if (dump_file && (dump_flags & TDF_DETAILS))
3475 fprintf (dump_file, " (changed)\n");
3477 /* If we equate two SSA names we have to make the side-band info
3478 of the leader conservative (and remember whatever original value
3479 was present). */
3480 if (TREE_CODE (to) == SSA_NAME)
3482 if (INTEGRAL_TYPE_P (TREE_TYPE (to))
3483 && SSA_NAME_RANGE_INFO (to))
3485 if (SSA_NAME_IS_DEFAULT_DEF (to)
3486 || dominated_by_p_w_unex
3487 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3488 gimple_bb (SSA_NAME_DEF_STMT (to))))
3489 /* Keep the info from the dominator. */
3491 else
3493 /* Save old info. */
3494 if (! VN_INFO (to)->info.range_info)
3496 VN_INFO (to)->info.range_info = SSA_NAME_RANGE_INFO (to);
3497 VN_INFO (to)->range_info_anti_range_p
3498 = SSA_NAME_ANTI_RANGE_P (to);
3500 /* Rather than allocating memory and unioning the info
3501 just clear it. */
3502 if (dump_file && (dump_flags & TDF_DETAILS))
3504 fprintf (dump_file, "clearing range info of ");
3505 print_generic_expr (dump_file, to);
3506 fprintf (dump_file, "\n");
3508 SSA_NAME_RANGE_INFO (to) = NULL;
3511 else if (POINTER_TYPE_P (TREE_TYPE (to))
3512 && SSA_NAME_PTR_INFO (to))
3514 if (SSA_NAME_IS_DEFAULT_DEF (to)
3515 || dominated_by_p_w_unex
3516 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3517 gimple_bb (SSA_NAME_DEF_STMT (to))))
3518 /* Keep the info from the dominator. */
3520 else if (! SSA_NAME_PTR_INFO (from)
3521 /* Handle the case of trivially equivalent info. */
3522 || memcmp (SSA_NAME_PTR_INFO (to),
3523 SSA_NAME_PTR_INFO (from),
3524 sizeof (ptr_info_def)) != 0)
3526 /* Save old info. */
3527 if (! VN_INFO (to)->info.ptr_info)
3528 VN_INFO (to)->info.ptr_info = SSA_NAME_PTR_INFO (to);
3529 /* Rather than allocating memory and unioning the info
3530 just clear it. */
3531 if (dump_file && (dump_flags & TDF_DETAILS))
3533 fprintf (dump_file, "clearing points-to info of ");
3534 print_generic_expr (dump_file, to);
3535 fprintf (dump_file, "\n");
3537 SSA_NAME_PTR_INFO (to) = NULL;
3542 VN_INFO (from)->valnum = to;
3543 return true;
3545 if (dump_file && (dump_flags & TDF_DETAILS))
3546 fprintf (dump_file, "\n");
3547 return false;
3550 /* Mark as processed all the definitions in the defining stmt of USE, or
3551 the USE itself. */
3553 static void
3554 mark_use_processed (tree use)
3556 ssa_op_iter iter;
3557 def_operand_p defp;
3558 gimple *stmt = SSA_NAME_DEF_STMT (use);
3560 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
3562 VN_INFO (use)->use_processed = true;
3563 return;
3566 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3568 tree def = DEF_FROM_PTR (defp);
3570 VN_INFO (def)->use_processed = true;
3574 /* Set all definitions in STMT to value number to themselves.
3575 Return true if a value number changed. */
3577 static bool
3578 defs_to_varying (gimple *stmt)
3580 bool changed = false;
3581 ssa_op_iter iter;
3582 def_operand_p defp;
3584 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3586 tree def = DEF_FROM_PTR (defp);
3587 changed |= set_ssa_val_to (def, def);
3589 return changed;
3592 /* Visit a copy between LHS and RHS, return true if the value number
3593 changed. */
3595 static bool
3596 visit_copy (tree lhs, tree rhs)
3598 /* Valueize. */
3599 rhs = SSA_VAL (rhs);
3601 return set_ssa_val_to (lhs, rhs);
3604 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
3605 is the same. */
3607 static tree
3608 valueized_wider_op (tree wide_type, tree op)
3610 if (TREE_CODE (op) == SSA_NAME)
3611 op = SSA_VAL (op);
3613 /* Either we have the op widened available. */
3614 tree ops[3] = {};
3615 ops[0] = op;
3616 tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
3617 wide_type, ops, NULL);
3618 if (tem)
3619 return tem;
3621 /* Or the op is truncated from some existing value. */
3622 if (TREE_CODE (op) == SSA_NAME)
3624 gimple *def = SSA_NAME_DEF_STMT (op);
3625 if (is_gimple_assign (def)
3626 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3628 tem = gimple_assign_rhs1 (def);
3629 if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)))
3631 if (TREE_CODE (tem) == SSA_NAME)
3632 tem = SSA_VAL (tem);
3633 return tem;
3638 /* For constants simply extend it. */
3639 if (TREE_CODE (op) == INTEGER_CST)
3640 return wide_int_to_tree (wide_type, wi::to_wide (op));
3642 return NULL_TREE;
3645 /* Visit a nary operator RHS, value number it, and return true if the
3646 value number of LHS has changed as a result. */
3648 static bool
3649 visit_nary_op (tree lhs, gassign *stmt)
3651 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
3652 if (result)
3653 return set_ssa_val_to (lhs, result);
3655 /* Do some special pattern matching for redundancies of operations
3656 in different types. */
3657 enum tree_code code = gimple_assign_rhs_code (stmt);
3658 tree type = TREE_TYPE (lhs);
3659 tree rhs1 = gimple_assign_rhs1 (stmt);
3660 switch (code)
3662 CASE_CONVERT:
3663 /* Match arithmetic done in a different type where we can easily
3664 substitute the result from some earlier sign-changed or widened
3665 operation. */
3666 if (INTEGRAL_TYPE_P (type)
3667 && TREE_CODE (rhs1) == SSA_NAME
3668 /* We only handle sign-changes or zero-extension -> & mask. */
3669 && ((TYPE_UNSIGNED (TREE_TYPE (rhs1))
3670 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs1)))
3671 || TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (rhs1))))
3673 gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
3674 if (def
3675 && (gimple_assign_rhs_code (def) == PLUS_EXPR
3676 || gimple_assign_rhs_code (def) == MINUS_EXPR
3677 || gimple_assign_rhs_code (def) == MULT_EXPR))
3679 tree ops[3] = {};
3680 /* Either we have the op widened available. */
3681 ops[0] = valueized_wider_op (type,
3682 gimple_assign_rhs1 (def));
3683 if (ops[0])
3684 ops[1] = valueized_wider_op (type,
3685 gimple_assign_rhs2 (def));
3686 if (ops[0] && ops[1])
3688 ops[0] = vn_nary_op_lookup_pieces
3689 (2, gimple_assign_rhs_code (def), type, ops, NULL);
3690 /* We have wider operation available. */
3691 if (ops[0])
3693 unsigned lhs_prec = TYPE_PRECISION (type);
3694 unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1));
3695 if (lhs_prec == rhs_prec)
3697 ops[1] = NULL_TREE;
3698 result = vn_nary_build_or_lookup (NOP_EXPR,
3699 type, ops);
3700 if (result)
3702 bool changed = set_ssa_val_to (lhs, result);
3703 vn_nary_op_insert_stmt (stmt, result);
3704 return changed;
3707 else
3709 ops[1] = wide_int_to_tree (type,
3710 wi::mask (rhs_prec, false,
3711 lhs_prec));
3712 result = vn_nary_build_or_lookup (BIT_AND_EXPR,
3713 TREE_TYPE (lhs),
3714 ops);
3715 if (result)
3717 bool changed = set_ssa_val_to (lhs, result);
3718 vn_nary_op_insert_stmt (stmt, result);
3719 return changed;
3726 default:;
3729 bool changed = set_ssa_val_to (lhs, lhs);
3730 vn_nary_op_insert_stmt (stmt, lhs);
3731 return changed;
3734 /* Visit a call STMT storing into LHS. Return true if the value number
3735 of the LHS has changed as a result. */
3737 static bool
3738 visit_reference_op_call (tree lhs, gcall *stmt)
3740 bool changed = false;
3741 struct vn_reference_s vr1;
3742 vn_reference_t vnresult = NULL;
3743 tree vdef = gimple_vdef (stmt);
3745 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
3746 if (lhs && TREE_CODE (lhs) != SSA_NAME)
3747 lhs = NULL_TREE;
3749 vn_reference_lookup_call (stmt, &vnresult, &vr1);
3750 if (vnresult)
3752 if (vnresult->result_vdef && vdef)
3753 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
3754 else if (vdef)
3755 /* If the call was discovered to be pure or const reflect
3756 that as far as possible. */
3757 changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt)));
3759 if (!vnresult->result && lhs)
3760 vnresult->result = lhs;
3762 if (vnresult->result && lhs)
3763 changed |= set_ssa_val_to (lhs, vnresult->result);
3765 else
3767 vn_reference_t vr2;
3768 vn_reference_s **slot;
3769 tree vdef_val = vdef;
3770 if (vdef)
3772 /* If we value numbered an indirect functions function to
3773 one not clobbering memory value number its VDEF to its
3774 VUSE. */
3775 tree fn = gimple_call_fn (stmt);
3776 if (fn && TREE_CODE (fn) == SSA_NAME)
3778 fn = SSA_VAL (fn);
3779 if (TREE_CODE (fn) == ADDR_EXPR
3780 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
3781 && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
3782 & (ECF_CONST | ECF_PURE)))
3783 vdef_val = vuse_ssa_val (gimple_vuse (stmt));
3785 changed |= set_ssa_val_to (vdef, vdef_val);
3787 if (lhs)
3788 changed |= set_ssa_val_to (lhs, lhs);
3789 vr2 = current_info->references_pool->allocate ();
3790 vr2->vuse = vr1.vuse;
3791 /* As we are not walking the virtual operand chain we know the
3792 shared_lookup_references are still original so we can re-use
3793 them here. */
3794 vr2->operands = vr1.operands.copy ();
3795 vr2->type = vr1.type;
3796 vr2->set = vr1.set;
3797 vr2->hashcode = vr1.hashcode;
3798 vr2->result = lhs;
3799 vr2->result_vdef = vdef_val;
3800 slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3801 INSERT);
3802 gcc_assert (!*slot);
3803 *slot = vr2;
3806 return changed;
3809 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3810 and return true if the value number of the LHS has changed as a result. */
3812 static bool
3813 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
3815 bool changed = false;
3816 tree last_vuse;
3817 tree result;
3819 last_vuse = gimple_vuse (stmt);
3820 last_vuse_ptr = &last_vuse;
3821 result = vn_reference_lookup (op, gimple_vuse (stmt),
3822 default_vn_walk_kind, NULL, true);
3823 last_vuse_ptr = NULL;
3825 /* We handle type-punning through unions by value-numbering based
3826 on offset and size of the access. Be prepared to handle a
3827 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3828 if (result
3829 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3831 /* We will be setting the value number of lhs to the value number
3832 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3833 So first simplify and lookup this expression to see if it
3834 is already available. */
3835 code_helper rcode = VIEW_CONVERT_EXPR;
3836 tree ops[3] = { result };
3837 result = vn_nary_build_or_lookup (rcode, TREE_TYPE (op), ops);
3840 if (result)
3841 changed = set_ssa_val_to (lhs, result);
3842 else
3844 changed = set_ssa_val_to (lhs, lhs);
3845 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
3848 return changed;
3852 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3853 and return true if the value number of the LHS has changed as a result. */
3855 static bool
3856 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
3858 bool changed = false;
3859 vn_reference_t vnresult = NULL;
3860 tree assign;
3861 bool resultsame = false;
3862 tree vuse = gimple_vuse (stmt);
3863 tree vdef = gimple_vdef (stmt);
3865 if (TREE_CODE (op) == SSA_NAME)
3866 op = SSA_VAL (op);
3868 /* First we want to lookup using the *vuses* from the store and see
3869 if there the last store to this location with the same address
3870 had the same value.
3872 The vuses represent the memory state before the store. If the
3873 memory state, address, and value of the store is the same as the
3874 last store to this location, then this store will produce the
3875 same memory state as that store.
3877 In this case the vdef versions for this store are value numbered to those
3878 vuse versions, since they represent the same memory state after
3879 this store.
3881 Otherwise, the vdefs for the store are used when inserting into
3882 the table, since the store generates a new memory state. */
3884 vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
3885 if (vnresult
3886 && vnresult->result)
3888 tree result = vnresult->result;
3889 if (TREE_CODE (result) == SSA_NAME)
3890 result = SSA_VAL (result);
3891 resultsame = expressions_equal_p (result, op);
3892 if (resultsame)
3894 /* If the TBAA state isn't compatible for downstream reads
3895 we cannot value-number the VDEFs the same. */
3896 alias_set_type set = get_alias_set (lhs);
3897 if (vnresult->set != set
3898 && ! alias_set_subset_of (set, vnresult->set))
3899 resultsame = false;
3903 if (!resultsame)
3905 /* Only perform the following when being called from PRE
3906 which embeds tail merging. */
3907 if (default_vn_walk_kind == VN_WALK)
3909 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3910 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
3911 if (vnresult)
3913 VN_INFO (vdef)->use_processed = true;
3914 return set_ssa_val_to (vdef, vnresult->result_vdef);
3918 if (dump_file && (dump_flags & TDF_DETAILS))
3920 fprintf (dump_file, "No store match\n");
3921 fprintf (dump_file, "Value numbering store ");
3922 print_generic_expr (dump_file, lhs);
3923 fprintf (dump_file, " to ");
3924 print_generic_expr (dump_file, op);
3925 fprintf (dump_file, "\n");
3927 /* Have to set value numbers before insert, since insert is
3928 going to valueize the references in-place. */
3929 if (vdef)
3930 changed |= set_ssa_val_to (vdef, vdef);
3932 /* Do not insert structure copies into the tables. */
3933 if (is_gimple_min_invariant (op)
3934 || is_gimple_reg (op))
3935 vn_reference_insert (lhs, op, vdef, NULL);
3937 /* Only perform the following when being called from PRE
3938 which embeds tail merging. */
3939 if (default_vn_walk_kind == VN_WALK)
3941 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3942 vn_reference_insert (assign, lhs, vuse, vdef);
3945 else
3947 /* We had a match, so value number the vdef to have the value
3948 number of the vuse it came from. */
3950 if (dump_file && (dump_flags & TDF_DETAILS))
3951 fprintf (dump_file, "Store matched earlier value, "
3952 "value numbering store vdefs to matching vuses.\n");
3954 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3957 return changed;
3960 /* Visit and value number PHI, return true if the value number
3961 changed. */
3963 static bool
3964 visit_phi (gimple *phi)
3966 tree result, sameval = VN_TOP, seen_undef = NULL_TREE;
3967 unsigned n_executable = 0;
3968 bool allsame = true;
3969 edge_iterator ei;
3970 edge e;
3972 /* TODO: We could check for this in init_sccvn, and replace this
3973 with a gcc_assert. */
3974 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3975 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3977 /* See if all non-TOP arguments have the same value. TOP is
3978 equivalent to everything, so we can ignore it. */
3979 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3980 if (e->flags & EDGE_EXECUTABLE)
3982 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3984 ++n_executable;
3985 if (TREE_CODE (def) == SSA_NAME)
3986 def = SSA_VAL (def);
3987 if (def == VN_TOP)
3989 /* Ignore undefined defs for sameval but record one. */
3990 else if (TREE_CODE (def) == SSA_NAME
3991 && ssa_undefined_value_p (def, false))
3992 seen_undef = def;
3993 else if (sameval == VN_TOP)
3994 sameval = def;
3995 else if (!expressions_equal_p (def, sameval))
3997 allsame = false;
3998 break;
4003 /* If none of the edges was executable keep the value-number at VN_TOP,
4004 if only a single edge is exectuable use its value. */
4005 if (n_executable <= 1)
4006 result = seen_undef ? seen_undef : sameval;
4007 /* If we saw only undefined values and VN_TOP use one of the
4008 undefined values. */
4009 else if (sameval == VN_TOP)
4010 result = seen_undef ? seen_undef : sameval;
4011 /* First see if it is equivalent to a phi node in this block. We prefer
4012 this as it allows IV elimination - see PRs 66502 and 67167. */
4013 else if ((result = vn_phi_lookup (phi)))
4015 /* If all values are the same use that, unless we've seen undefined
4016 values as well and the value isn't constant.
4017 CCP/copyprop have the same restriction to not remove uninit warnings. */
4018 else if (allsame
4019 && (! seen_undef || is_gimple_min_invariant (sameval)))
4020 result = sameval;
4021 else
4023 result = PHI_RESULT (phi);
4024 /* Only insert PHIs that are varying, for constant value numbers
4025 we mess up equivalences otherwise as we are only comparing
4026 the immediate controlling predicates. */
4027 vn_phi_insert (phi, result);
4030 return set_ssa_val_to (PHI_RESULT (phi), result);
4033 /* Try to simplify RHS using equivalences and constant folding. */
4035 static tree
4036 try_to_simplify (gassign *stmt)
4038 enum tree_code code = gimple_assign_rhs_code (stmt);
4039 tree tem;
4041 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
4042 in this case, there is no point in doing extra work. */
4043 if (code == SSA_NAME)
4044 return NULL_TREE;
4046 /* First try constant folding based on our current lattice. */
4047 mprts_hook = vn_lookup_simplify_result;
4048 mprts_hook_cnt = 9;
4049 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
4050 mprts_hook = NULL;
4051 if (tem
4052 && (TREE_CODE (tem) == SSA_NAME
4053 || is_gimple_min_invariant (tem)))
4054 return tem;
4056 return NULL_TREE;
4059 /* Visit and value number USE, return true if the value number
4060 changed. */
4062 static bool
4063 visit_use (tree use)
4065 bool changed = false;
4066 gimple *stmt = SSA_NAME_DEF_STMT (use);
4068 mark_use_processed (use);
4070 gcc_assert (!SSA_NAME_IN_FREE_LIST (use)
4071 && !SSA_NAME_IS_DEFAULT_DEF (use));
4073 if (dump_file && (dump_flags & TDF_DETAILS))
4075 fprintf (dump_file, "Value numbering ");
4076 print_generic_expr (dump_file, use);
4077 fprintf (dump_file, " stmt = ");
4078 print_gimple_stmt (dump_file, stmt, 0);
4081 if (gimple_code (stmt) == GIMPLE_PHI)
4082 changed = visit_phi (stmt);
4083 else if (gimple_has_volatile_ops (stmt))
4084 changed = defs_to_varying (stmt);
4085 else if (gassign *ass = dyn_cast <gassign *> (stmt))
4087 enum tree_code code = gimple_assign_rhs_code (ass);
4088 tree lhs = gimple_assign_lhs (ass);
4089 tree rhs1 = gimple_assign_rhs1 (ass);
4090 tree simplified;
4092 /* Shortcut for copies. Simplifying copies is pointless,
4093 since we copy the expression and value they represent. */
4094 if (code == SSA_NAME
4095 && TREE_CODE (lhs) == SSA_NAME)
4097 changed = visit_copy (lhs, rhs1);
4098 goto done;
4100 simplified = try_to_simplify (ass);
4101 if (simplified)
4103 if (dump_file && (dump_flags & TDF_DETAILS))
4105 fprintf (dump_file, "RHS ");
4106 print_gimple_expr (dump_file, ass, 0);
4107 fprintf (dump_file, " simplified to ");
4108 print_generic_expr (dump_file, simplified);
4109 fprintf (dump_file, "\n");
4112 /* Setting value numbers to constants will occasionally
4113 screw up phi congruence because constants are not
4114 uniquely associated with a single ssa name that can be
4115 looked up. */
4116 if (simplified
4117 && is_gimple_min_invariant (simplified)
4118 && TREE_CODE (lhs) == SSA_NAME)
4120 changed = set_ssa_val_to (lhs, simplified);
4121 goto done;
4123 else if (simplified
4124 && TREE_CODE (simplified) == SSA_NAME
4125 && TREE_CODE (lhs) == SSA_NAME)
4127 changed = visit_copy (lhs, simplified);
4128 goto done;
4131 if ((TREE_CODE (lhs) == SSA_NAME
4132 /* We can substitute SSA_NAMEs that are live over
4133 abnormal edges with their constant value. */
4134 && !(gimple_assign_copy_p (ass)
4135 && is_gimple_min_invariant (rhs1))
4136 && !(simplified
4137 && is_gimple_min_invariant (simplified))
4138 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4139 /* Stores or copies from SSA_NAMEs that are live over
4140 abnormal edges are a problem. */
4141 || (code == SSA_NAME
4142 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
4143 changed = defs_to_varying (ass);
4144 else if (REFERENCE_CLASS_P (lhs)
4145 || DECL_P (lhs))
4146 changed = visit_reference_op_store (lhs, rhs1, ass);
4147 else if (TREE_CODE (lhs) == SSA_NAME)
4149 if ((gimple_assign_copy_p (ass)
4150 && is_gimple_min_invariant (rhs1))
4151 || (simplified
4152 && is_gimple_min_invariant (simplified)))
4154 if (simplified)
4155 changed = set_ssa_val_to (lhs, simplified);
4156 else
4157 changed = set_ssa_val_to (lhs, rhs1);
4159 else
4161 /* Visit the original statement. */
4162 switch (vn_get_stmt_kind (ass))
4164 case VN_NARY:
4165 changed = visit_nary_op (lhs, ass);
4166 break;
4167 case VN_REFERENCE:
4168 changed = visit_reference_op_load (lhs, rhs1, ass);
4169 break;
4170 default:
4171 changed = defs_to_varying (ass);
4172 break;
4176 else
4177 changed = defs_to_varying (ass);
4179 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
4181 tree lhs = gimple_call_lhs (call_stmt);
4182 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4184 /* Try constant folding based on our current lattice. */
4185 tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
4186 vn_valueize);
4187 if (simplified)
4189 if (dump_file && (dump_flags & TDF_DETAILS))
4191 fprintf (dump_file, "call ");
4192 print_gimple_expr (dump_file, call_stmt, 0);
4193 fprintf (dump_file, " simplified to ");
4194 print_generic_expr (dump_file, simplified);
4195 fprintf (dump_file, "\n");
4198 /* Setting value numbers to constants will occasionally
4199 screw up phi congruence because constants are not
4200 uniquely associated with a single ssa name that can be
4201 looked up. */
4202 if (simplified
4203 && is_gimple_min_invariant (simplified))
4205 changed = set_ssa_val_to (lhs, simplified);
4206 if (gimple_vdef (call_stmt))
4207 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4208 SSA_VAL (gimple_vuse (call_stmt)));
4209 goto done;
4211 else if (simplified
4212 && TREE_CODE (simplified) == SSA_NAME)
4214 changed = visit_copy (lhs, simplified);
4215 if (gimple_vdef (call_stmt))
4216 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4217 SSA_VAL (gimple_vuse (call_stmt)));
4218 goto done;
4220 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4222 changed = defs_to_varying (call_stmt);
4223 goto done;
4227 /* Pick up flags from a devirtualization target. */
4228 tree fn = gimple_call_fn (stmt);
4229 int extra_fnflags = 0;
4230 if (fn && TREE_CODE (fn) == SSA_NAME)
4232 fn = SSA_VAL (fn);
4233 if (TREE_CODE (fn) == ADDR_EXPR
4234 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
4235 extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0));
4237 if (!gimple_call_internal_p (call_stmt)
4238 && (/* Calls to the same function with the same vuse
4239 and the same operands do not necessarily return the same
4240 value, unless they're pure or const. */
4241 ((gimple_call_flags (call_stmt) | extra_fnflags)
4242 & (ECF_PURE | ECF_CONST))
4243 /* If calls have a vdef, subsequent calls won't have
4244 the same incoming vuse. So, if 2 calls with vdef have the
4245 same vuse, we know they're not subsequent.
4246 We can value number 2 calls to the same function with the
4247 same vuse and the same operands which are not subsequent
4248 the same, because there is no code in the program that can
4249 compare the 2 values... */
4250 || (gimple_vdef (call_stmt)
4251 /* ... unless the call returns a pointer which does
4252 not alias with anything else. In which case the
4253 information that the values are distinct are encoded
4254 in the IL. */
4255 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
4256 /* Only perform the following when being called from PRE
4257 which embeds tail merging. */
4258 && default_vn_walk_kind == VN_WALK)))
4259 changed = visit_reference_op_call (lhs, call_stmt);
4260 else
4261 changed = defs_to_varying (call_stmt);
4263 else
4264 changed = defs_to_varying (stmt);
4265 done:
4266 return changed;
4269 /* Compare two operands by reverse postorder index */
4271 static int
4272 compare_ops (const void *pa, const void *pb)
4274 const tree opa = *((const tree *)pa);
4275 const tree opb = *((const tree *)pb);
4276 gimple *opstmta = SSA_NAME_DEF_STMT (opa);
4277 gimple *opstmtb = SSA_NAME_DEF_STMT (opb);
4278 basic_block bba;
4279 basic_block bbb;
4281 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
4282 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4283 else if (gimple_nop_p (opstmta))
4284 return -1;
4285 else if (gimple_nop_p (opstmtb))
4286 return 1;
4288 bba = gimple_bb (opstmta);
4289 bbb = gimple_bb (opstmtb);
4291 if (!bba && !bbb)
4292 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4293 else if (!bba)
4294 return -1;
4295 else if (!bbb)
4296 return 1;
4298 if (bba == bbb)
4300 if (gimple_code (opstmta) == GIMPLE_PHI
4301 && gimple_code (opstmtb) == GIMPLE_PHI)
4302 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4303 else if (gimple_code (opstmta) == GIMPLE_PHI)
4304 return -1;
4305 else if (gimple_code (opstmtb) == GIMPLE_PHI)
4306 return 1;
4307 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
4308 return gimple_uid (opstmta) - gimple_uid (opstmtb);
4309 else
4310 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4312 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
4315 /* Sort an array containing members of a strongly connected component
4316 SCC so that the members are ordered by RPO number.
4317 This means that when the sort is complete, iterating through the
4318 array will give you the members in RPO order. */
4320 static void
4321 sort_scc (vec<tree> scc)
4323 scc.qsort (compare_ops);
4326 /* Insert the no longer used nary ONARY to the hash INFO. */
4328 static void
4329 copy_nary (vn_nary_op_t onary, vn_tables_t info)
4331 size_t size = sizeof_vn_nary_op (onary->length);
4332 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
4333 &info->nary_obstack);
4334 memcpy (nary, onary, size);
4335 vn_nary_op_insert_into (nary, info->nary, false);
4338 /* Insert the no longer used phi OPHI to the hash INFO. */
4340 static void
4341 copy_phi (vn_phi_t ophi, vn_tables_t info)
4343 vn_phi_t phi = info->phis_pool->allocate ();
4344 vn_phi_s **slot;
4345 memcpy (phi, ophi, sizeof (*phi));
4346 ophi->phiargs.create (0);
4347 slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
4348 gcc_assert (!*slot);
4349 *slot = phi;
4352 /* Insert the no longer used reference OREF to the hash INFO. */
4354 static void
4355 copy_reference (vn_reference_t oref, vn_tables_t info)
4357 vn_reference_t ref;
4358 vn_reference_s **slot;
4359 ref = info->references_pool->allocate ();
4360 memcpy (ref, oref, sizeof (*ref));
4361 oref->operands.create (0);
4362 slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
4363 if (*slot)
4364 free_reference (*slot);
4365 *slot = ref;
4368 /* Process a strongly connected component in the SSA graph. */
4370 static void
4371 process_scc (vec<tree> scc)
4373 tree var;
4374 unsigned int i;
4375 unsigned int iterations = 0;
4376 bool changed = true;
4377 vn_nary_op_iterator_type hin;
4378 vn_phi_iterator_type hip;
4379 vn_reference_iterator_type hir;
4380 vn_nary_op_t nary;
4381 vn_phi_t phi;
4382 vn_reference_t ref;
4384 /* If the SCC has a single member, just visit it. */
4385 if (scc.length () == 1)
4387 tree use = scc[0];
4388 if (VN_INFO (use)->use_processed)
4389 return;
4390 /* We need to make sure it doesn't form a cycle itself, which can
4391 happen for self-referential PHI nodes. In that case we would
4392 end up inserting an expression with VN_TOP operands into the
4393 valid table which makes us derive bogus equivalences later.
4394 The cheapest way to check this is to assume it for all PHI nodes. */
4395 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
4396 /* Fallthru to iteration. */ ;
4397 else
4399 visit_use (use);
4400 return;
4404 if (dump_file && (dump_flags & TDF_DETAILS))
4405 print_scc (dump_file, scc);
4407 /* Iterate over the SCC with the optimistic table until it stops
4408 changing. */
4409 current_info = optimistic_info;
4410 while (changed)
4412 changed = false;
4413 iterations++;
4414 if (dump_file && (dump_flags & TDF_DETAILS))
4415 fprintf (dump_file, "Starting iteration %d\n", iterations);
4416 /* As we are value-numbering optimistically we have to
4417 clear the expression tables and the simplified expressions
4418 in each iteration until we converge. */
4419 optimistic_info->nary->empty ();
4420 optimistic_info->phis->empty ();
4421 optimistic_info->references->empty ();
4422 obstack_free (&optimistic_info->nary_obstack, NULL);
4423 gcc_obstack_init (&optimistic_info->nary_obstack);
4424 optimistic_info->phis_pool->release ();
4425 optimistic_info->references_pool->release ();
4426 FOR_EACH_VEC_ELT (scc, i, var)
4427 gcc_assert (!VN_INFO (var)->needs_insertion
4428 && VN_INFO (var)->expr == NULL);
4429 FOR_EACH_VEC_ELT (scc, i, var)
4430 changed |= visit_use (var);
4433 if (dump_file && (dump_flags & TDF_DETAILS))
4434 fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
4435 statistics_histogram_event (cfun, "SCC iterations", iterations);
4437 /* Finally, copy the contents of the no longer used optimistic
4438 table to the valid table. */
4439 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
4440 copy_nary (nary, valid_info);
4441 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
4442 copy_phi (phi, valid_info);
4443 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
4444 ref, vn_reference_t, hir)
4445 copy_reference (ref, valid_info);
4447 current_info = valid_info;
4451 /* Pop the components of the found SCC for NAME off the SCC stack
4452 and process them. Returns true if all went well, false if
4453 we run into resource limits. */
4455 static void
4456 extract_and_process_scc_for_name (tree name)
4458 auto_vec<tree> scc;
4459 tree x;
4461 /* Found an SCC, pop the components off the SCC stack and
4462 process them. */
4465 x = sccstack.pop ();
4467 VN_INFO (x)->on_sccstack = false;
4468 scc.safe_push (x);
4469 } while (x != name);
4471 /* Drop all defs in the SCC to varying in case a SCC turns out to be
4472 incredibly large.
4473 ??? Just switch to a non-optimistic mode that avoids any iteration. */
4474 if (scc.length () > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
4476 if (dump_file)
4478 print_scc (dump_file, scc);
4479 fprintf (dump_file, "WARNING: Giving up value-numbering SCC due to "
4480 "size %u exceeding %u\n", scc.length (),
4481 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
4483 tree var;
4484 unsigned i;
4485 FOR_EACH_VEC_ELT (scc, i, var)
4487 gimple *def = SSA_NAME_DEF_STMT (var);
4488 mark_use_processed (var);
4489 if (SSA_NAME_IS_DEFAULT_DEF (var)
4490 || gimple_code (def) == GIMPLE_PHI)
4491 set_ssa_val_to (var, var);
4492 else
4493 defs_to_varying (def);
4495 return;
4498 if (scc.length () > 1)
4499 sort_scc (scc);
4501 process_scc (scc);
4504 /* Depth first search on NAME to discover and process SCC's in the SSA
4505 graph.
4506 Execution of this algorithm relies on the fact that the SCC's are
4507 popped off the stack in topological order.
4508 Returns true if successful, false if we stopped processing SCC's due
4509 to resource constraints. */
4511 static void
4512 DFS (tree name)
4514 auto_vec<ssa_op_iter> itervec;
4515 auto_vec<tree> namevec;
4516 use_operand_p usep = NULL;
4517 gimple *defstmt;
4518 tree use;
4519 ssa_op_iter iter;
4521 start_over:
4522 /* SCC info */
4523 VN_INFO (name)->dfsnum = next_dfs_num++;
4524 VN_INFO (name)->visited = true;
4525 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
4527 sccstack.safe_push (name);
4528 VN_INFO (name)->on_sccstack = true;
4529 defstmt = SSA_NAME_DEF_STMT (name);
4531 /* Recursively DFS on our operands, looking for SCC's. */
4532 if (!gimple_nop_p (defstmt))
4534 /* Push a new iterator. */
4535 if (gphi *phi = dyn_cast <gphi *> (defstmt))
4536 usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
4537 else
4538 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
4540 else
4541 clear_and_done_ssa_iter (&iter);
4543 while (1)
4545 /* If we are done processing uses of a name, go up the stack
4546 of iterators and process SCCs as we found them. */
4547 if (op_iter_done (&iter))
4549 /* See if we found an SCC. */
4550 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
4551 extract_and_process_scc_for_name (name);
4553 /* Check if we are done. */
4554 if (namevec.is_empty ())
4555 return;
4557 /* Restore the last use walker and continue walking there. */
4558 use = name;
4559 name = namevec.pop ();
4560 memcpy (&iter, &itervec.last (),
4561 sizeof (ssa_op_iter));
4562 itervec.pop ();
4563 goto continue_walking;
4566 use = USE_FROM_PTR (usep);
4568 /* Since we handle phi nodes, we will sometimes get
4569 invariants in the use expression. */
4570 if (TREE_CODE (use) == SSA_NAME)
4572 if (! (VN_INFO (use)->visited))
4574 /* Recurse by pushing the current use walking state on
4575 the stack and starting over. */
4576 itervec.safe_push (iter);
4577 namevec.safe_push (name);
4578 name = use;
4579 goto start_over;
4581 continue_walking:
4582 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
4583 VN_INFO (use)->low);
4585 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
4586 && VN_INFO (use)->on_sccstack)
4588 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
4589 VN_INFO (name)->low);
4593 usep = op_iter_next_use (&iter);
4597 /* Allocate a value number table. */
4599 static void
4600 allocate_vn_table (vn_tables_t table)
4602 table->phis = new vn_phi_table_type (23);
4603 table->nary = new vn_nary_op_table_type (23);
4604 table->references = new vn_reference_table_type (23);
4606 gcc_obstack_init (&table->nary_obstack);
4607 table->phis_pool = new object_allocator<vn_phi_s> ("VN phis");
4608 table->references_pool = new object_allocator<vn_reference_s>
4609 ("VN references");
4612 /* Free a value number table. */
4614 static void
4615 free_vn_table (vn_tables_t table)
4617 delete table->phis;
4618 table->phis = NULL;
4619 delete table->nary;
4620 table->nary = NULL;
4621 delete table->references;
4622 table->references = NULL;
4623 obstack_free (&table->nary_obstack, NULL);
4624 delete table->phis_pool;
4625 delete table->references_pool;
4628 static void
4629 init_scc_vn (void)
4631 int j;
4632 int *rpo_numbers_temp;
4634 calculate_dominance_info (CDI_DOMINATORS);
4635 mark_dfs_back_edges ();
4637 sccstack.create (0);
4638 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
4640 constant_value_ids = BITMAP_ALLOC (NULL);
4642 next_dfs_num = 1;
4643 next_value_id = 1;
4645 vn_ssa_aux_table.create (num_ssa_names + 1);
4646 /* VEC_alloc doesn't actually grow it to the right size, it just
4647 preallocates the space to do so. */
4648 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
4649 gcc_obstack_init (&vn_ssa_aux_obstack);
4651 shared_lookup_phiargs.create (0);
4652 shared_lookup_references.create (0);
4653 rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
4654 rpo_numbers_temp =
4655 XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
4656 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4658 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4659 the i'th block in RPO order is bb. We want to map bb's to RPO
4660 numbers, so we need to rearrange this array. */
4661 for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
4662 rpo_numbers[rpo_numbers_temp[j]] = j;
4664 XDELETE (rpo_numbers_temp);
4666 VN_TOP = build_decl (UNKNOWN_LOCATION, VAR_DECL,
4667 get_identifier ("VN_TOP"), error_mark_node);
4669 renumber_gimple_stmt_uids ();
4671 /* Create the valid and optimistic value numbering tables. */
4672 valid_info = XCNEW (struct vn_tables_s);
4673 allocate_vn_table (valid_info);
4674 optimistic_info = XCNEW (struct vn_tables_s);
4675 allocate_vn_table (optimistic_info);
4676 current_info = valid_info;
4678 /* Create the VN_INFO structures, and initialize value numbers to
4679 TOP or VARYING for parameters. */
4680 size_t i;
4681 tree name;
4683 FOR_EACH_SSA_NAME (i, name, cfun)
4685 VN_INFO_GET (name)->valnum = VN_TOP;
4686 VN_INFO (name)->needs_insertion = false;
4687 VN_INFO (name)->expr = NULL;
4688 VN_INFO (name)->value_id = 0;
4690 if (!SSA_NAME_IS_DEFAULT_DEF (name))
4691 continue;
4693 switch (TREE_CODE (SSA_NAME_VAR (name)))
4695 case VAR_DECL:
4696 /* All undefined vars are VARYING. */
4697 VN_INFO (name)->valnum = name;
4698 VN_INFO (name)->visited = true;
4699 break;
4701 case PARM_DECL:
4702 /* Parameters are VARYING but we can record a condition
4703 if we know it is a non-NULL pointer. */
4704 VN_INFO (name)->visited = true;
4705 VN_INFO (name)->valnum = name;
4706 if (POINTER_TYPE_P (TREE_TYPE (name))
4707 && nonnull_arg_p (SSA_NAME_VAR (name)))
4709 tree ops[2];
4710 ops[0] = name;
4711 ops[1] = build_int_cst (TREE_TYPE (name), 0);
4712 vn_nary_op_insert_pieces (2, NE_EXPR, boolean_type_node, ops,
4713 boolean_true_node, 0);
4714 if (dump_file && (dump_flags & TDF_DETAILS))
4716 fprintf (dump_file, "Recording ");
4717 print_generic_expr (dump_file, name, TDF_SLIM);
4718 fprintf (dump_file, " != 0\n");
4721 break;
4723 case RESULT_DECL:
4724 /* If the result is passed by invisible reference the default
4725 def is initialized, otherwise it's uninitialized. Still
4726 undefined is varying. */
4727 VN_INFO (name)->visited = true;
4728 VN_INFO (name)->valnum = name;
4729 break;
4731 default:
4732 gcc_unreachable ();
4737 /* Restore SSA info that has been reset on value leaders. */
4739 void
4740 scc_vn_restore_ssa_info (void)
4742 unsigned i;
4743 tree name;
4745 FOR_EACH_SSA_NAME (i, name, cfun)
4747 if (has_VN_INFO (name))
4749 if (VN_INFO (name)->needs_insertion)
4751 else if (POINTER_TYPE_P (TREE_TYPE (name))
4752 && VN_INFO (name)->info.ptr_info)
4753 SSA_NAME_PTR_INFO (name) = VN_INFO (name)->info.ptr_info;
4754 else if (INTEGRAL_TYPE_P (TREE_TYPE (name))
4755 && VN_INFO (name)->info.range_info)
4757 SSA_NAME_RANGE_INFO (name) = VN_INFO (name)->info.range_info;
4758 SSA_NAME_ANTI_RANGE_P (name)
4759 = VN_INFO (name)->range_info_anti_range_p;
4765 void
4766 free_scc_vn (void)
4768 size_t i;
4769 tree name;
4771 delete constant_to_value_id;
4772 constant_to_value_id = NULL;
4773 BITMAP_FREE (constant_value_ids);
4774 shared_lookup_phiargs.release ();
4775 shared_lookup_references.release ();
4776 XDELETEVEC (rpo_numbers);
4778 FOR_EACH_SSA_NAME (i, name, cfun)
4780 if (has_VN_INFO (name)
4781 && VN_INFO (name)->needs_insertion)
4782 release_ssa_name (name);
4784 obstack_free (&vn_ssa_aux_obstack, NULL);
4785 vn_ssa_aux_table.release ();
4787 sccstack.release ();
4788 free_vn_table (valid_info);
4789 XDELETE (valid_info);
4790 free_vn_table (optimistic_info);
4791 XDELETE (optimistic_info);
4793 BITMAP_FREE (const_parms);
4796 /* Set *ID according to RESULT. */
4798 static void
4799 set_value_id_for_result (tree result, unsigned int *id)
4801 if (result && TREE_CODE (result) == SSA_NAME)
4802 *id = VN_INFO (result)->value_id;
4803 else if (result && is_gimple_min_invariant (result))
4804 *id = get_or_alloc_constant_value_id (result);
4805 else
4806 *id = get_next_value_id ();
4809 /* Set the value ids in the valid hash tables. */
4811 static void
4812 set_hashtable_value_ids (void)
4814 vn_nary_op_iterator_type hin;
4815 vn_phi_iterator_type hip;
4816 vn_reference_iterator_type hir;
4817 vn_nary_op_t vno;
4818 vn_reference_t vr;
4819 vn_phi_t vp;
4821 /* Now set the value ids of the things we had put in the hash
4822 table. */
4824 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4825 set_value_id_for_result (vno->result, &vno->value_id);
4827 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4828 set_value_id_for_result (vp->result, &vp->value_id);
4830 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4831 hir)
4832 set_value_id_for_result (vr->result, &vr->value_id);
4835 class sccvn_dom_walker : public dom_walker
4837 public:
4838 sccvn_dom_walker ()
4839 : dom_walker (CDI_DOMINATORS, REACHABLE_BLOCKS), cond_stack (0) {}
4841 virtual edge before_dom_children (basic_block);
4842 virtual void after_dom_children (basic_block);
4844 void record_cond (basic_block,
4845 enum tree_code code, tree lhs, tree rhs, bool value);
4846 void record_conds (basic_block,
4847 enum tree_code code, tree lhs, tree rhs, bool value);
4849 auto_vec<std::pair <basic_block, std::pair <vn_nary_op_t, vn_nary_op_t> > >
4850 cond_stack;
4853 /* Record a temporary condition for the BB and its dominated blocks. */
4855 void
4856 sccvn_dom_walker::record_cond (basic_block bb,
4857 enum tree_code code, tree lhs, tree rhs,
4858 bool value)
4860 tree ops[2] = { lhs, rhs };
4861 vn_nary_op_t old = NULL;
4862 if (vn_nary_op_lookup_pieces (2, code, boolean_type_node, ops, &old))
4863 current_info->nary->remove_elt_with_hash (old, old->hashcode);
4864 vn_nary_op_t cond
4865 = vn_nary_op_insert_pieces (2, code, boolean_type_node, ops,
4866 value
4867 ? boolean_true_node
4868 : boolean_false_node, 0);
4869 if (dump_file && (dump_flags & TDF_DETAILS))
4871 fprintf (dump_file, "Recording temporarily ");
4872 print_generic_expr (dump_file, ops[0], TDF_SLIM);
4873 fprintf (dump_file, " %s ", get_tree_code_name (code));
4874 print_generic_expr (dump_file, ops[1], TDF_SLIM);
4875 fprintf (dump_file, " == %s%s\n",
4876 value ? "true" : "false",
4877 old ? " (old entry saved)" : "");
4879 cond_stack.safe_push (std::make_pair (bb, std::make_pair (cond, old)));
4882 /* Record temporary conditions for the BB and its dominated blocks
4883 according to LHS CODE RHS == VALUE and its dominated conditions. */
4885 void
4886 sccvn_dom_walker::record_conds (basic_block bb,
4887 enum tree_code code, tree lhs, tree rhs,
4888 bool value)
4890 /* Record the original condition. */
4891 record_cond (bb, code, lhs, rhs, value);
4893 if (!value)
4894 return;
4896 /* Record dominated conditions if the condition is true. Note that
4897 the inversion is already recorded. */
4898 switch (code)
4900 case LT_EXPR:
4901 case GT_EXPR:
4902 record_cond (bb, code == LT_EXPR ? LE_EXPR : GE_EXPR, lhs, rhs, true);
4903 record_cond (bb, NE_EXPR, lhs, rhs, true);
4904 record_cond (bb, EQ_EXPR, lhs, rhs, false);
4905 break;
4907 case EQ_EXPR:
4908 record_cond (bb, LE_EXPR, lhs, rhs, true);
4909 record_cond (bb, GE_EXPR, lhs, rhs, true);
4910 record_cond (bb, LT_EXPR, lhs, rhs, false);
4911 record_cond (bb, GT_EXPR, lhs, rhs, false);
4912 break;
4914 default:
4915 break;
4919 /* Restore expressions and values derived from conditionals. */
4921 void
4922 sccvn_dom_walker::after_dom_children (basic_block bb)
4924 while (!cond_stack.is_empty ()
4925 && cond_stack.last ().first == bb)
4927 vn_nary_op_t cond = cond_stack.last ().second.first;
4928 vn_nary_op_t old = cond_stack.last ().second.second;
4929 current_info->nary->remove_elt_with_hash (cond, cond->hashcode);
4930 if (old)
4931 vn_nary_op_insert_into (old, current_info->nary, false);
4932 cond_stack.pop ();
4936 /* Value number all statements in BB. */
4938 edge
4939 sccvn_dom_walker::before_dom_children (basic_block bb)
4941 if (dump_file && (dump_flags & TDF_DETAILS))
4942 fprintf (dump_file, "Visiting BB %d\n", bb->index);
4944 /* If we have a single predecessor record the equivalence from a
4945 possible condition on the predecessor edge. */
4946 edge pred_e = single_pred_edge_ignoring_loop_edges (bb, false);
4947 if (pred_e)
4949 /* Check if there are multiple executable successor edges in
4950 the source block. Otherwise there is no additional info
4951 to be recorded. */
4952 edge_iterator ei;
4953 edge e2;
4954 FOR_EACH_EDGE (e2, ei, pred_e->src->succs)
4955 if (e2 != pred_e
4956 && e2->flags & EDGE_EXECUTABLE)
4957 break;
4958 if (e2 && (e2->flags & EDGE_EXECUTABLE))
4960 gimple *stmt = last_stmt (pred_e->src);
4961 if (stmt
4962 && gimple_code (stmt) == GIMPLE_COND)
4964 enum tree_code code = gimple_cond_code (stmt);
4965 tree lhs = gimple_cond_lhs (stmt);
4966 tree rhs = gimple_cond_rhs (stmt);
4967 record_conds (bb, code, lhs, rhs,
4968 (pred_e->flags & EDGE_TRUE_VALUE) != 0);
4969 code = invert_tree_comparison (code, HONOR_NANS (lhs));
4970 if (code != ERROR_MARK)
4971 record_conds (bb, code, lhs, rhs,
4972 (pred_e->flags & EDGE_TRUE_VALUE) == 0);
4977 /* Value-number all defs in the basic-block. */
4978 for (gphi_iterator gsi = gsi_start_phis (bb);
4979 !gsi_end_p (gsi); gsi_next (&gsi))
4981 gphi *phi = gsi.phi ();
4982 tree res = PHI_RESULT (phi);
4983 if (!VN_INFO (res)->visited)
4984 DFS (res);
4986 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
4987 !gsi_end_p (gsi); gsi_next (&gsi))
4989 ssa_op_iter i;
4990 tree op;
4991 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
4992 if (!VN_INFO (op)->visited)
4993 DFS (op);
4996 /* Finally look at the last stmt. */
4997 gimple *stmt = last_stmt (bb);
4998 if (!stmt)
4999 return NULL;
5001 enum gimple_code code = gimple_code (stmt);
5002 if (code != GIMPLE_COND
5003 && code != GIMPLE_SWITCH
5004 && code != GIMPLE_GOTO)
5005 return NULL;
5007 if (dump_file && (dump_flags & TDF_DETAILS))
5009 fprintf (dump_file, "Visiting control stmt ending BB %d: ", bb->index);
5010 print_gimple_stmt (dump_file, stmt, 0);
5013 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
5014 if value-numbering can prove they are not reachable. Handling
5015 computed gotos is also possible. */
5016 tree val;
5017 switch (code)
5019 case GIMPLE_COND:
5021 tree lhs = vn_valueize (gimple_cond_lhs (stmt));
5022 tree rhs = vn_valueize (gimple_cond_rhs (stmt));
5023 val = gimple_simplify (gimple_cond_code (stmt),
5024 boolean_type_node, lhs, rhs,
5025 NULL, vn_valueize);
5026 /* If that didn't simplify to a constant see if we have recorded
5027 temporary expressions from taken edges. */
5028 if (!val || TREE_CODE (val) != INTEGER_CST)
5030 tree ops[2];
5031 ops[0] = lhs;
5032 ops[1] = rhs;
5033 val = vn_nary_op_lookup_pieces (2, gimple_cond_code (stmt),
5034 boolean_type_node, ops, NULL);
5036 break;
5038 case GIMPLE_SWITCH:
5039 val = gimple_switch_index (as_a <gswitch *> (stmt));
5040 break;
5041 case GIMPLE_GOTO:
5042 val = gimple_goto_dest (stmt);
5043 break;
5044 default:
5045 gcc_unreachable ();
5047 if (!val)
5048 return NULL;
5050 edge taken = find_taken_edge (bb, vn_valueize (val));
5051 if (!taken)
5052 return NULL;
5054 if (dump_file && (dump_flags & TDF_DETAILS))
5055 fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
5056 "not executable\n", bb->index, bb->index, taken->dest->index);
5058 return taken;
5061 /* Do SCCVN. Returns true if it finished, false if we bailed out
5062 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
5063 how we use the alias oracle walking during the VN process. */
5065 void
5066 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
5068 size_t i;
5070 default_vn_walk_kind = default_vn_walk_kind_;
5072 init_scc_vn ();
5074 /* Collect pointers we know point to readonly memory. */
5075 const_parms = BITMAP_ALLOC (NULL);
5076 tree fnspec = lookup_attribute ("fn spec",
5077 TYPE_ATTRIBUTES (TREE_TYPE (cfun->decl)));
5078 if (fnspec)
5080 fnspec = TREE_VALUE (TREE_VALUE (fnspec));
5081 i = 1;
5082 for (tree arg = DECL_ARGUMENTS (cfun->decl);
5083 arg; arg = DECL_CHAIN (arg), ++i)
5085 if (i >= (unsigned) TREE_STRING_LENGTH (fnspec))
5086 break;
5087 if (TREE_STRING_POINTER (fnspec)[i] == 'R'
5088 || TREE_STRING_POINTER (fnspec)[i] == 'r')
5090 tree name = ssa_default_def (cfun, arg);
5091 if (name)
5092 bitmap_set_bit (const_parms, SSA_NAME_VERSION (name));
5097 /* Walk all blocks in dominator order, value-numbering stmts
5098 SSA defs and decide whether outgoing edges are not executable. */
5099 sccvn_dom_walker walker;
5100 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5102 /* Initialize the value ids and prune out remaining VN_TOPs
5103 from dead code. */
5104 tree name;
5105 FOR_EACH_SSA_NAME (i, name, cfun)
5107 vn_ssa_aux_t info = VN_INFO (name);
5108 if (!info->visited
5109 || info->valnum == VN_TOP)
5110 info->valnum = name;
5111 if (info->valnum == name)
5112 info->value_id = get_next_value_id ();
5113 else if (is_gimple_min_invariant (info->valnum))
5114 info->value_id = get_or_alloc_constant_value_id (info->valnum);
5117 /* Propagate. */
5118 FOR_EACH_SSA_NAME (i, name, cfun)
5120 vn_ssa_aux_t info = VN_INFO (name);
5121 if (TREE_CODE (info->valnum) == SSA_NAME
5122 && info->valnum != name
5123 && info->value_id != VN_INFO (info->valnum)->value_id)
5124 info->value_id = VN_INFO (info->valnum)->value_id;
5127 set_hashtable_value_ids ();
5129 if (dump_file && (dump_flags & TDF_DETAILS))
5131 fprintf (dump_file, "Value numbers:\n");
5132 FOR_EACH_SSA_NAME (i, name, cfun)
5134 if (VN_INFO (name)->visited
5135 && SSA_VAL (name) != name)
5137 print_generic_expr (dump_file, name);
5138 fprintf (dump_file, " = ");
5139 print_generic_expr (dump_file, SSA_VAL (name));
5140 fprintf (dump_file, "\n");
5146 /* Return the maximum value id we have ever seen. */
5148 unsigned int
5149 get_max_value_id (void)
5151 return next_value_id;
5154 /* Return the next unique value id. */
5156 unsigned int
5157 get_next_value_id (void)
5159 return next_value_id++;
5163 /* Compare two expressions E1 and E2 and return true if they are equal. */
5165 bool
5166 expressions_equal_p (tree e1, tree e2)
5168 /* The obvious case. */
5169 if (e1 == e2)
5170 return true;
5172 /* If either one is VN_TOP consider them equal. */
5173 if (e1 == VN_TOP || e2 == VN_TOP)
5174 return true;
5176 /* If only one of them is null, they cannot be equal. */
5177 if (!e1 || !e2)
5178 return false;
5180 /* Now perform the actual comparison. */
5181 if (TREE_CODE (e1) == TREE_CODE (e2)
5182 && operand_equal_p (e1, e2, OEP_PURE_SAME))
5183 return true;
5185 return false;
5189 /* Return true if the nary operation NARY may trap. This is a copy
5190 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
5192 bool
5193 vn_nary_may_trap (vn_nary_op_t nary)
5195 tree type;
5196 tree rhs2 = NULL_TREE;
5197 bool honor_nans = false;
5198 bool honor_snans = false;
5199 bool fp_operation = false;
5200 bool honor_trapv = false;
5201 bool handled, ret;
5202 unsigned i;
5204 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
5205 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
5206 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
5208 type = nary->type;
5209 fp_operation = FLOAT_TYPE_P (type);
5210 if (fp_operation)
5212 honor_nans = flag_trapping_math && !flag_finite_math_only;
5213 honor_snans = flag_signaling_nans != 0;
5215 else if (INTEGRAL_TYPE_P (type)
5216 && TYPE_OVERFLOW_TRAPS (type))
5217 honor_trapv = true;
5219 if (nary->length >= 2)
5220 rhs2 = nary->op[1];
5221 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
5222 honor_trapv,
5223 honor_nans, honor_snans, rhs2,
5224 &handled);
5225 if (handled
5226 && ret)
5227 return true;
5229 for (i = 0; i < nary->length; ++i)
5230 if (tree_could_trap_p (nary->op[i]))
5231 return true;
5233 return false;
5237 class eliminate_dom_walker : public dom_walker
5239 public:
5240 eliminate_dom_walker (cdi_direction, bitmap);
5241 ~eliminate_dom_walker ();
5243 virtual edge before_dom_children (basic_block);
5244 virtual void after_dom_children (basic_block);
5246 tree eliminate_avail (tree op);
5247 void eliminate_push_avail (tree op);
5248 tree eliminate_insert (gimple_stmt_iterator *gsi, tree val);
5250 bool do_pre;
5251 unsigned int el_todo;
5252 unsigned int eliminations;
5253 unsigned int insertions;
5255 /* SSA names that had their defs inserted by PRE if do_pre. */
5256 bitmap inserted_exprs;
5258 /* Blocks with statements that have had their EH properties changed. */
5259 bitmap need_eh_cleanup;
5261 /* Blocks with statements that have had their AB properties changed. */
5262 bitmap need_ab_cleanup;
5264 auto_vec<gimple *> to_remove;
5265 auto_vec<gimple *> to_fixup;
5266 auto_vec<tree> avail;
5267 auto_vec<tree> avail_stack;
5270 eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction,
5271 bitmap inserted_exprs_)
5272 : dom_walker (direction), do_pre (inserted_exprs_ != NULL),
5273 el_todo (0), eliminations (0), insertions (0),
5274 inserted_exprs (inserted_exprs_)
5276 need_eh_cleanup = BITMAP_ALLOC (NULL);
5277 need_ab_cleanup = BITMAP_ALLOC (NULL);
5280 eliminate_dom_walker::~eliminate_dom_walker ()
5282 BITMAP_FREE (need_eh_cleanup);
5283 BITMAP_FREE (need_ab_cleanup);
5286 /* Return a leader for OP that is available at the current point of the
5287 eliminate domwalk. */
5289 tree
5290 eliminate_dom_walker::eliminate_avail (tree op)
5292 tree valnum = VN_INFO (op)->valnum;
5293 if (TREE_CODE (valnum) == SSA_NAME)
5295 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
5296 return valnum;
5297 if (avail.length () > SSA_NAME_VERSION (valnum))
5298 return avail[SSA_NAME_VERSION (valnum)];
5300 else if (is_gimple_min_invariant (valnum))
5301 return valnum;
5302 return NULL_TREE;
5305 /* At the current point of the eliminate domwalk make OP available. */
5307 void
5308 eliminate_dom_walker::eliminate_push_avail (tree op)
5310 tree valnum = VN_INFO (op)->valnum;
5311 if (TREE_CODE (valnum) == SSA_NAME)
5313 if (avail.length () <= SSA_NAME_VERSION (valnum))
5314 avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1);
5315 tree pushop = op;
5316 if (avail[SSA_NAME_VERSION (valnum)])
5317 pushop = avail[SSA_NAME_VERSION (valnum)];
5318 avail_stack.safe_push (pushop);
5319 avail[SSA_NAME_VERSION (valnum)] = op;
5323 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
5324 the leader for the expression if insertion was successful. */
5326 tree
5327 eliminate_dom_walker::eliminate_insert (gimple_stmt_iterator *gsi, tree val)
5329 /* We can insert a sequence with a single assignment only. */
5330 gimple_seq stmts = VN_INFO (val)->expr;
5331 if (!gimple_seq_singleton_p (stmts))
5332 return NULL_TREE;
5333 gassign *stmt = dyn_cast <gassign *> (gimple_seq_first_stmt (stmts));
5334 if (!stmt
5335 || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
5336 && gimple_assign_rhs_code (stmt) != VIEW_CONVERT_EXPR
5337 && gimple_assign_rhs_code (stmt) != BIT_FIELD_REF
5338 && (gimple_assign_rhs_code (stmt) != BIT_AND_EXPR
5339 || TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)))
5340 return NULL_TREE;
5342 tree op = gimple_assign_rhs1 (stmt);
5343 if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR
5344 || gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5345 op = TREE_OPERAND (op, 0);
5346 tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (op) : op;
5347 if (!leader)
5348 return NULL_TREE;
5350 tree res;
5351 stmts = NULL;
5352 if (gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5353 res = gimple_build (&stmts, BIT_FIELD_REF,
5354 TREE_TYPE (val), leader,
5355 TREE_OPERAND (gimple_assign_rhs1 (stmt), 1),
5356 TREE_OPERAND (gimple_assign_rhs1 (stmt), 2));
5357 else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
5358 res = gimple_build (&stmts, BIT_AND_EXPR,
5359 TREE_TYPE (val), leader, gimple_assign_rhs2 (stmt));
5360 else
5361 res = gimple_build (&stmts, gimple_assign_rhs_code (stmt),
5362 TREE_TYPE (val), leader);
5363 if (TREE_CODE (res) != SSA_NAME
5364 || SSA_NAME_IS_DEFAULT_DEF (res)
5365 || gimple_bb (SSA_NAME_DEF_STMT (res)))
5367 gimple_seq_discard (stmts);
5369 /* During propagation we have to treat SSA info conservatively
5370 and thus we can end up simplifying the inserted expression
5371 at elimination time to sth not defined in stmts. */
5372 /* But then this is a redundancy we failed to detect. Which means
5373 res now has two values. That doesn't play well with how
5374 we track availability here, so give up. */
5375 if (dump_file && (dump_flags & TDF_DETAILS))
5377 if (TREE_CODE (res) == SSA_NAME)
5378 res = eliminate_avail (res);
5379 if (res)
5381 fprintf (dump_file, "Failed to insert expression for value ");
5382 print_generic_expr (dump_file, val);
5383 fprintf (dump_file, " which is really fully redundant to ");
5384 print_generic_expr (dump_file, res);
5385 fprintf (dump_file, "\n");
5389 return NULL_TREE;
5391 else
5393 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
5394 VN_INFO_GET (res)->valnum = val;
5397 insertions++;
5398 if (dump_file && (dump_flags & TDF_DETAILS))
5400 fprintf (dump_file, "Inserted ");
5401 print_gimple_stmt (dump_file, SSA_NAME_DEF_STMT (res), 0);
5404 return res;
5409 /* Perform elimination for the basic-block B during the domwalk. */
5411 edge
5412 eliminate_dom_walker::before_dom_children (basic_block b)
5414 /* Mark new bb. */
5415 avail_stack.safe_push (NULL_TREE);
5417 /* Skip unreachable blocks marked unreachable during the SCCVN domwalk. */
5418 edge_iterator ei;
5419 edge e;
5420 FOR_EACH_EDGE (e, ei, b->preds)
5421 if (e->flags & EDGE_EXECUTABLE)
5422 break;
5423 if (! e)
5424 return NULL;
5426 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
5428 gphi *phi = gsi.phi ();
5429 tree res = PHI_RESULT (phi);
5431 if (virtual_operand_p (res))
5433 gsi_next (&gsi);
5434 continue;
5437 tree sprime = eliminate_avail (res);
5438 if (sprime
5439 && sprime != res)
5441 if (dump_file && (dump_flags & TDF_DETAILS))
5443 fprintf (dump_file, "Replaced redundant PHI node defining ");
5444 print_generic_expr (dump_file, res);
5445 fprintf (dump_file, " with ");
5446 print_generic_expr (dump_file, sprime);
5447 fprintf (dump_file, "\n");
5450 /* If we inserted this PHI node ourself, it's not an elimination. */
5451 if (! inserted_exprs
5452 || ! bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
5453 eliminations++;
5455 /* If we will propagate into all uses don't bother to do
5456 anything. */
5457 if (may_propagate_copy (res, sprime))
5459 /* Mark the PHI for removal. */
5460 to_remove.safe_push (phi);
5461 gsi_next (&gsi);
5462 continue;
5465 remove_phi_node (&gsi, false);
5467 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
5468 sprime = fold_convert (TREE_TYPE (res), sprime);
5469 gimple *stmt = gimple_build_assign (res, sprime);
5470 gimple_stmt_iterator gsi2 = gsi_after_labels (b);
5471 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
5472 continue;
5475 eliminate_push_avail (res);
5476 gsi_next (&gsi);
5479 for (gimple_stmt_iterator gsi = gsi_start_bb (b);
5480 !gsi_end_p (gsi);
5481 gsi_next (&gsi))
5483 tree sprime = NULL_TREE;
5484 gimple *stmt = gsi_stmt (gsi);
5485 tree lhs = gimple_get_lhs (stmt);
5486 if (lhs && TREE_CODE (lhs) == SSA_NAME
5487 && !gimple_has_volatile_ops (stmt)
5488 /* See PR43491. Do not replace a global register variable when
5489 it is a the RHS of an assignment. Do replace local register
5490 variables since gcc does not guarantee a local variable will
5491 be allocated in register.
5492 ??? The fix isn't effective here. This should instead
5493 be ensured by not value-numbering them the same but treating
5494 them like volatiles? */
5495 && !(gimple_assign_single_p (stmt)
5496 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
5497 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))
5498 && is_global_var (gimple_assign_rhs1 (stmt)))))
5500 sprime = eliminate_avail (lhs);
5501 if (!sprime)
5503 /* If there is no existing usable leader but SCCVN thinks
5504 it has an expression it wants to use as replacement,
5505 insert that. */
5506 tree val = VN_INFO (lhs)->valnum;
5507 if (val != VN_TOP
5508 && TREE_CODE (val) == SSA_NAME
5509 && VN_INFO (val)->needs_insertion
5510 && VN_INFO (val)->expr != NULL
5511 && (sprime = eliminate_insert (&gsi, val)) != NULL_TREE)
5512 eliminate_push_avail (sprime);
5515 /* If this now constitutes a copy duplicate points-to
5516 and range info appropriately. This is especially
5517 important for inserted code. See tree-ssa-copy.c
5518 for similar code. */
5519 if (sprime
5520 && TREE_CODE (sprime) == SSA_NAME)
5522 basic_block sprime_b = gimple_bb (SSA_NAME_DEF_STMT (sprime));
5523 if (POINTER_TYPE_P (TREE_TYPE (lhs))
5524 && VN_INFO_PTR_INFO (lhs)
5525 && ! VN_INFO_PTR_INFO (sprime))
5527 duplicate_ssa_name_ptr_info (sprime,
5528 VN_INFO_PTR_INFO (lhs));
5529 if (b != sprime_b)
5530 mark_ptr_info_alignment_unknown
5531 (SSA_NAME_PTR_INFO (sprime));
5533 else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
5534 && VN_INFO_RANGE_INFO (lhs)
5535 && ! VN_INFO_RANGE_INFO (sprime)
5536 && b == sprime_b)
5537 duplicate_ssa_name_range_info (sprime,
5538 VN_INFO_RANGE_TYPE (lhs),
5539 VN_INFO_RANGE_INFO (lhs));
5542 /* Inhibit the use of an inserted PHI on a loop header when
5543 the address of the memory reference is a simple induction
5544 variable. In other cases the vectorizer won't do anything
5545 anyway (either it's loop invariant or a complicated
5546 expression). */
5547 if (sprime
5548 && TREE_CODE (sprime) == SSA_NAME
5549 && do_pre
5550 && (flag_tree_loop_vectorize || flag_tree_parallelize_loops > 1)
5551 && loop_outer (b->loop_father)
5552 && has_zero_uses (sprime)
5553 && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
5554 && gimple_assign_load_p (stmt))
5556 gimple *def_stmt = SSA_NAME_DEF_STMT (sprime);
5557 basic_block def_bb = gimple_bb (def_stmt);
5558 if (gimple_code (def_stmt) == GIMPLE_PHI
5559 && def_bb->loop_father->header == def_bb)
5561 loop_p loop = def_bb->loop_father;
5562 ssa_op_iter iter;
5563 tree op;
5564 bool found = false;
5565 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5567 affine_iv iv;
5568 def_bb = gimple_bb (SSA_NAME_DEF_STMT (op));
5569 if (def_bb
5570 && flow_bb_inside_loop_p (loop, def_bb)
5571 && simple_iv (loop, loop, op, &iv, true))
5573 found = true;
5574 break;
5577 if (found)
5579 if (dump_file && (dump_flags & TDF_DETAILS))
5581 fprintf (dump_file, "Not replacing ");
5582 print_gimple_expr (dump_file, stmt, 0);
5583 fprintf (dump_file, " with ");
5584 print_generic_expr (dump_file, sprime);
5585 fprintf (dump_file, " which would add a loop"
5586 " carried dependence to loop %d\n",
5587 loop->num);
5589 /* Don't keep sprime available. */
5590 sprime = NULL_TREE;
5595 if (sprime)
5597 /* If we can propagate the value computed for LHS into
5598 all uses don't bother doing anything with this stmt. */
5599 if (may_propagate_copy (lhs, sprime))
5601 /* Mark it for removal. */
5602 to_remove.safe_push (stmt);
5604 /* ??? Don't count copy/constant propagations. */
5605 if (gimple_assign_single_p (stmt)
5606 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5607 || gimple_assign_rhs1 (stmt) == sprime))
5608 continue;
5610 if (dump_file && (dump_flags & TDF_DETAILS))
5612 fprintf (dump_file, "Replaced ");
5613 print_gimple_expr (dump_file, stmt, 0);
5614 fprintf (dump_file, " with ");
5615 print_generic_expr (dump_file, sprime);
5616 fprintf (dump_file, " in all uses of ");
5617 print_gimple_stmt (dump_file, stmt, 0);
5620 eliminations++;
5621 continue;
5624 /* If this is an assignment from our leader (which
5625 happens in the case the value-number is a constant)
5626 then there is nothing to do. */
5627 if (gimple_assign_single_p (stmt)
5628 && sprime == gimple_assign_rhs1 (stmt))
5629 continue;
5631 /* Else replace its RHS. */
5632 bool can_make_abnormal_goto
5633 = is_gimple_call (stmt)
5634 && stmt_can_make_abnormal_goto (stmt);
5636 if (dump_file && (dump_flags & TDF_DETAILS))
5638 fprintf (dump_file, "Replaced ");
5639 print_gimple_expr (dump_file, stmt, 0);
5640 fprintf (dump_file, " with ");
5641 print_generic_expr (dump_file, sprime);
5642 fprintf (dump_file, " in ");
5643 print_gimple_stmt (dump_file, stmt, 0);
5646 eliminations++;
5647 gimple *orig_stmt = stmt;
5648 if (!useless_type_conversion_p (TREE_TYPE (lhs),
5649 TREE_TYPE (sprime)))
5650 sprime = fold_convert (TREE_TYPE (lhs), sprime);
5651 tree vdef = gimple_vdef (stmt);
5652 tree vuse = gimple_vuse (stmt);
5653 propagate_tree_value_into_stmt (&gsi, sprime);
5654 stmt = gsi_stmt (gsi);
5655 update_stmt (stmt);
5656 if (vdef != gimple_vdef (stmt))
5657 VN_INFO (vdef)->valnum = vuse;
5659 /* If we removed EH side-effects from the statement, clean
5660 its EH information. */
5661 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
5663 bitmap_set_bit (need_eh_cleanup,
5664 gimple_bb (stmt)->index);
5665 if (dump_file && (dump_flags & TDF_DETAILS))
5666 fprintf (dump_file, " Removed EH side-effects.\n");
5669 /* Likewise for AB side-effects. */
5670 if (can_make_abnormal_goto
5671 && !stmt_can_make_abnormal_goto (stmt))
5673 bitmap_set_bit (need_ab_cleanup,
5674 gimple_bb (stmt)->index);
5675 if (dump_file && (dump_flags & TDF_DETAILS))
5676 fprintf (dump_file, " Removed AB side-effects.\n");
5679 continue;
5683 /* If the statement is a scalar store, see if the expression
5684 has the same value number as its rhs. If so, the store is
5685 dead. */
5686 if (gimple_assign_single_p (stmt)
5687 && !gimple_has_volatile_ops (stmt)
5688 && !is_gimple_reg (gimple_assign_lhs (stmt))
5689 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5690 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
5692 tree val;
5693 tree rhs = gimple_assign_rhs1 (stmt);
5694 vn_reference_t vnresult;
5695 val = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_WALKREWRITE,
5696 &vnresult, false);
5697 if (TREE_CODE (rhs) == SSA_NAME)
5698 rhs = VN_INFO (rhs)->valnum;
5699 if (val
5700 && operand_equal_p (val, rhs, 0))
5702 /* We can only remove the later store if the former aliases
5703 at least all accesses the later one does or if the store
5704 was to readonly memory storing the same value. */
5705 alias_set_type set = get_alias_set (lhs);
5706 if (! vnresult
5707 || vnresult->set == set
5708 || alias_set_subset_of (set, vnresult->set))
5710 if (dump_file && (dump_flags & TDF_DETAILS))
5712 fprintf (dump_file, "Deleted redundant store ");
5713 print_gimple_stmt (dump_file, stmt, 0);
5716 /* Queue stmt for removal. */
5717 to_remove.safe_push (stmt);
5718 continue;
5723 /* If this is a control statement value numbering left edges
5724 unexecuted on force the condition in a way consistent with
5725 that. */
5726 if (gcond *cond = dyn_cast <gcond *> (stmt))
5728 if ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE)
5729 ^ (EDGE_SUCC (b, 1)->flags & EDGE_EXECUTABLE))
5731 if (dump_file && (dump_flags & TDF_DETAILS))
5733 fprintf (dump_file, "Removing unexecutable edge from ");
5734 print_gimple_stmt (dump_file, stmt, 0);
5736 if (((EDGE_SUCC (b, 0)->flags & EDGE_TRUE_VALUE) != 0)
5737 == ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE) != 0))
5738 gimple_cond_make_true (cond);
5739 else
5740 gimple_cond_make_false (cond);
5741 update_stmt (cond);
5742 el_todo |= TODO_cleanup_cfg;
5743 continue;
5747 bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
5748 bool was_noreturn = (is_gimple_call (stmt)
5749 && gimple_call_noreturn_p (stmt));
5750 tree vdef = gimple_vdef (stmt);
5751 tree vuse = gimple_vuse (stmt);
5753 /* If we didn't replace the whole stmt (or propagate the result
5754 into all uses), replace all uses on this stmt with their
5755 leaders. */
5756 bool modified = false;
5757 use_operand_p use_p;
5758 ssa_op_iter iter;
5759 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5761 tree use = USE_FROM_PTR (use_p);
5762 /* ??? The call code above leaves stmt operands un-updated. */
5763 if (TREE_CODE (use) != SSA_NAME)
5764 continue;
5765 tree sprime = eliminate_avail (use);
5766 if (sprime && sprime != use
5767 && may_propagate_copy (use, sprime)
5768 /* We substitute into debug stmts to avoid excessive
5769 debug temporaries created by removed stmts, but we need
5770 to avoid doing so for inserted sprimes as we never want
5771 to create debug temporaries for them. */
5772 && (!inserted_exprs
5773 || TREE_CODE (sprime) != SSA_NAME
5774 || !is_gimple_debug (stmt)
5775 || !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
5777 propagate_value (use_p, sprime);
5778 modified = true;
5782 /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
5783 into which is a requirement for the IPA devirt machinery. */
5784 gimple *old_stmt = stmt;
5785 if (modified)
5787 /* If a formerly non-invariant ADDR_EXPR is turned into an
5788 invariant one it was on a separate stmt. */
5789 if (gimple_assign_single_p (stmt)
5790 && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
5791 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
5792 gimple_stmt_iterator prev = gsi;
5793 gsi_prev (&prev);
5794 if (fold_stmt (&gsi))
5796 /* fold_stmt may have created new stmts inbetween
5797 the previous stmt and the folded stmt. Mark
5798 all defs created there as varying to not confuse
5799 the SCCVN machinery as we're using that even during
5800 elimination. */
5801 if (gsi_end_p (prev))
5802 prev = gsi_start_bb (b);
5803 else
5804 gsi_next (&prev);
5805 if (gsi_stmt (prev) != gsi_stmt (gsi))
5808 tree def;
5809 ssa_op_iter dit;
5810 FOR_EACH_SSA_TREE_OPERAND (def, gsi_stmt (prev),
5811 dit, SSA_OP_ALL_DEFS)
5812 /* As existing DEFs may move between stmts
5813 we have to guard VN_INFO_GET. */
5814 if (! has_VN_INFO (def))
5815 VN_INFO_GET (def)->valnum = def;
5816 if (gsi_stmt (prev) == gsi_stmt (gsi))
5817 break;
5818 gsi_next (&prev);
5820 while (1);
5822 stmt = gsi_stmt (gsi);
5823 /* In case we folded the stmt away schedule the NOP for removal. */
5824 if (gimple_nop_p (stmt))
5825 to_remove.safe_push (stmt);
5828 /* Visit indirect calls and turn them into direct calls if
5829 possible using the devirtualization machinery. Do this before
5830 checking for required EH/abnormal/noreturn cleanup as devird
5831 may expose more of those. */
5832 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
5834 tree fn = gimple_call_fn (call_stmt);
5835 if (fn
5836 && flag_devirtualize
5837 && virtual_method_call_p (fn))
5839 tree otr_type = obj_type_ref_class (fn);
5840 unsigned HOST_WIDE_INT otr_tok
5841 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn));
5842 tree instance;
5843 ipa_polymorphic_call_context context (current_function_decl,
5844 fn, stmt, &instance);
5845 context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn),
5846 otr_type, stmt);
5847 bool final;
5848 vec <cgraph_node *> targets
5849 = possible_polymorphic_call_targets (obj_type_ref_class (fn),
5850 otr_tok, context, &final);
5851 if (dump_file)
5852 dump_possible_polymorphic_call_targets (dump_file,
5853 obj_type_ref_class (fn),
5854 otr_tok, context);
5855 if (final && targets.length () <= 1 && dbg_cnt (devirt))
5857 tree fn;
5858 if (targets.length () == 1)
5859 fn = targets[0]->decl;
5860 else
5861 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5862 if (dump_enabled_p ())
5864 location_t loc = gimple_location (stmt);
5865 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
5866 "converting indirect call to "
5867 "function %s\n",
5868 lang_hooks.decl_printable_name (fn, 2));
5870 gimple_call_set_fndecl (call_stmt, fn);
5871 /* If changing the call to __builtin_unreachable
5872 or similar noreturn function, adjust gimple_call_fntype
5873 too. */
5874 if (gimple_call_noreturn_p (call_stmt)
5875 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
5876 && TYPE_ARG_TYPES (TREE_TYPE (fn))
5877 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn)))
5878 == void_type_node))
5879 gimple_call_set_fntype (call_stmt, TREE_TYPE (fn));
5880 maybe_remove_unused_call_args (cfun, call_stmt);
5881 modified = true;
5886 if (modified)
5888 /* When changing a call into a noreturn call, cfg cleanup
5889 is needed to fix up the noreturn call. */
5890 if (!was_noreturn
5891 && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
5892 to_fixup.safe_push (stmt);
5893 /* When changing a condition or switch into one we know what
5894 edge will be executed, schedule a cfg cleanup. */
5895 if ((gimple_code (stmt) == GIMPLE_COND
5896 && (gimple_cond_true_p (as_a <gcond *> (stmt))
5897 || gimple_cond_false_p (as_a <gcond *> (stmt))))
5898 || (gimple_code (stmt) == GIMPLE_SWITCH
5899 && TREE_CODE (gimple_switch_index
5900 (as_a <gswitch *> (stmt))) == INTEGER_CST))
5901 el_todo |= TODO_cleanup_cfg;
5902 /* If we removed EH side-effects from the statement, clean
5903 its EH information. */
5904 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
5906 bitmap_set_bit (need_eh_cleanup,
5907 gimple_bb (stmt)->index);
5908 if (dump_file && (dump_flags & TDF_DETAILS))
5909 fprintf (dump_file, " Removed EH side-effects.\n");
5911 /* Likewise for AB side-effects. */
5912 if (can_make_abnormal_goto
5913 && !stmt_can_make_abnormal_goto (stmt))
5915 bitmap_set_bit (need_ab_cleanup,
5916 gimple_bb (stmt)->index);
5917 if (dump_file && (dump_flags & TDF_DETAILS))
5918 fprintf (dump_file, " Removed AB side-effects.\n");
5920 update_stmt (stmt);
5921 if (vdef != gimple_vdef (stmt))
5922 VN_INFO (vdef)->valnum = vuse;
5925 /* Make new values available - for fully redundant LHS we
5926 continue with the next stmt above and skip this. */
5927 def_operand_p defp;
5928 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
5929 eliminate_push_avail (DEF_FROM_PTR (defp));
5932 /* Replace destination PHI arguments. */
5933 FOR_EACH_EDGE (e, ei, b->succs)
5934 if (e->flags & EDGE_EXECUTABLE)
5935 for (gphi_iterator gsi = gsi_start_phis (e->dest);
5936 !gsi_end_p (gsi);
5937 gsi_next (&gsi))
5939 gphi *phi = gsi.phi ();
5940 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
5941 tree arg = USE_FROM_PTR (use_p);
5942 if (TREE_CODE (arg) != SSA_NAME
5943 || virtual_operand_p (arg))
5944 continue;
5945 tree sprime = eliminate_avail (arg);
5946 if (sprime && may_propagate_copy (arg, sprime))
5947 propagate_value (use_p, sprime);
5949 return NULL;
5952 /* Make no longer available leaders no longer available. */
5954 void
5955 eliminate_dom_walker::after_dom_children (basic_block)
5957 tree entry;
5958 while ((entry = avail_stack.pop ()) != NULL_TREE)
5960 tree valnum = VN_INFO (entry)->valnum;
5961 tree old = avail[SSA_NAME_VERSION (valnum)];
5962 if (old == entry)
5963 avail[SSA_NAME_VERSION (valnum)] = NULL_TREE;
5964 else
5965 avail[SSA_NAME_VERSION (valnum)] = entry;
5969 /* Eliminate fully redundant computations. */
5971 unsigned int
5972 vn_eliminate (bitmap inserted_exprs)
5974 eliminate_dom_walker el (CDI_DOMINATORS, inserted_exprs);
5975 el.avail.reserve (num_ssa_names);
5977 el.walk (cfun->cfg->x_entry_block_ptr);
5979 /* We cannot remove stmts during BB walk, especially not release SSA
5980 names there as this confuses the VN machinery. The stmts ending
5981 up in to_remove are either stores or simple copies.
5982 Remove stmts in reverse order to make debug stmt creation possible. */
5983 while (!el.to_remove.is_empty ())
5985 gimple *stmt = el.to_remove.pop ();
5987 if (dump_file && (dump_flags & TDF_DETAILS))
5989 fprintf (dump_file, "Removing dead stmt ");
5990 print_gimple_stmt (dump_file, stmt, 0, 0);
5993 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5994 if (gimple_code (stmt) == GIMPLE_PHI)
5995 remove_phi_node (&gsi, true);
5996 else
5998 basic_block bb = gimple_bb (stmt);
5999 unlink_stmt_vdef (stmt);
6000 if (gsi_remove (&gsi, true))
6001 bitmap_set_bit (el.need_eh_cleanup, bb->index);
6002 if (is_gimple_call (stmt) && stmt_can_make_abnormal_goto (stmt))
6003 bitmap_set_bit (el.need_ab_cleanup, bb->index);
6004 release_defs (stmt);
6007 /* Removing a stmt may expose a forwarder block. */
6008 el.el_todo |= TODO_cleanup_cfg;
6011 /* Fixup stmts that became noreturn calls. This may require splitting
6012 blocks and thus isn't possible during the dominator walk. Do this
6013 in reverse order so we don't inadvertedly remove a stmt we want to
6014 fixup by visiting a dominating now noreturn call first. */
6015 while (!el.to_fixup.is_empty ())
6017 gimple *stmt = el.to_fixup.pop ();
6019 if (dump_file && (dump_flags & TDF_DETAILS))
6021 fprintf (dump_file, "Fixing up noreturn call ");
6022 print_gimple_stmt (dump_file, stmt, 0);
6025 if (fixup_noreturn_call (stmt))
6026 el.el_todo |= TODO_cleanup_cfg;
6029 bool do_eh_cleanup = !bitmap_empty_p (el.need_eh_cleanup);
6030 bool do_ab_cleanup = !bitmap_empty_p (el.need_ab_cleanup);
6032 if (do_eh_cleanup)
6033 gimple_purge_all_dead_eh_edges (el.need_eh_cleanup);
6035 if (do_ab_cleanup)
6036 gimple_purge_all_dead_abnormal_call_edges (el.need_ab_cleanup);
6038 if (do_eh_cleanup || do_ab_cleanup)
6039 el.el_todo |= TODO_cleanup_cfg;
6041 statistics_counter_event (cfun, "Eliminated", el.eliminations);
6042 statistics_counter_event (cfun, "Insertions", el.insertions);
6044 return el.el_todo;
6048 namespace {
6050 const pass_data pass_data_fre =
6052 GIMPLE_PASS, /* type */
6053 "fre", /* name */
6054 OPTGROUP_NONE, /* optinfo_flags */
6055 TV_TREE_FRE, /* tv_id */
6056 ( PROP_cfg | PROP_ssa ), /* properties_required */
6057 0, /* properties_provided */
6058 0, /* properties_destroyed */
6059 0, /* todo_flags_start */
6060 0, /* todo_flags_finish */
6063 class pass_fre : public gimple_opt_pass
6065 public:
6066 pass_fre (gcc::context *ctxt)
6067 : gimple_opt_pass (pass_data_fre, ctxt)
6070 /* opt_pass methods: */
6071 opt_pass * clone () { return new pass_fre (m_ctxt); }
6072 virtual bool gate (function *) { return flag_tree_fre != 0; }
6073 virtual unsigned int execute (function *);
6075 }; // class pass_fre
6077 unsigned int
6078 pass_fre::execute (function *)
6080 unsigned int todo = 0;
6082 run_scc_vn (VN_WALKREWRITE);
6084 /* Remove all the redundant expressions. */
6085 todo |= vn_eliminate (NULL);
6087 scc_vn_restore_ssa_info ();
6088 free_scc_vn ();
6090 return todo;
6093 } // anon namespace
6095 gimple_opt_pass *
6096 make_pass_fre (gcc::context *ctxt)
6098 return new pass_fre (ctxt);