PR c++/86342 - -Wdeprecated-copy and system headers.
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blobe5eddf902b84ac864e239c6846af71e12d4b7cab
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2018 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "insn-config.h"
32 #include "memmodel.h"
33 #include "emit-rtl.h"
34 #include "cgraph.h"
35 #include "gimple-pretty-print.h"
36 #include "alias.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "cfganal.h"
40 #include "tree-inline.h"
41 #include "internal-fn.h"
42 #include "gimple-fold.h"
43 #include "tree-eh.h"
44 #include "gimplify.h"
45 #include "flags.h"
46 #include "dojump.h"
47 #include "explow.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "stmt.h"
51 #include "expr.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "dumpfile.h"
55 #include "cfgloop.h"
56 #include "params.h"
57 #include "tree-ssa-propagate.h"
58 #include "tree-cfg.h"
59 #include "domwalk.h"
60 #include "gimple-iterator.h"
61 #include "gimple-match.h"
62 #include "stringpool.h"
63 #include "attribs.h"
64 #include "tree-pass.h"
65 #include "statistics.h"
66 #include "langhooks.h"
67 #include "ipa-utils.h"
68 #include "dbgcnt.h"
69 #include "tree-cfgcleanup.h"
70 #include "tree-ssa-loop.h"
71 #include "tree-scalar-evolution.h"
72 #include "tree-ssa-sccvn.h"
74 /* This algorithm is based on the SCC algorithm presented by Keith
75 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
76 (http://citeseer.ist.psu.edu/41805.html). In
77 straight line code, it is equivalent to a regular hash based value
78 numbering that is performed in reverse postorder.
80 For code with cycles, there are two alternatives, both of which
81 require keeping the hashtables separate from the actual list of
82 value numbers for SSA names.
84 1. Iterate value numbering in an RPO walk of the blocks, removing
85 all the entries from the hashtable after each iteration (but
86 keeping the SSA name->value number mapping between iterations).
87 Iterate until it does not change.
89 2. Perform value numbering as part of an SCC walk on the SSA graph,
90 iterating only the cycles in the SSA graph until they do not change
91 (using a separate, optimistic hashtable for value numbering the SCC
92 operands).
94 The second is not just faster in practice (because most SSA graph
95 cycles do not involve all the variables in the graph), it also has
96 some nice properties.
98 One of these nice properties is that when we pop an SCC off the
99 stack, we are guaranteed to have processed all the operands coming from
100 *outside of that SCC*, so we do not need to do anything special to
101 ensure they have value numbers.
103 Another nice property is that the SCC walk is done as part of a DFS
104 of the SSA graph, which makes it easy to perform combining and
105 simplifying operations at the same time.
107 The code below is deliberately written in a way that makes it easy
108 to separate the SCC walk from the other work it does.
110 In order to propagate constants through the code, we track which
111 expressions contain constants, and use those while folding. In
112 theory, we could also track expressions whose value numbers are
113 replaced, in case we end up folding based on expression
114 identities.
116 In order to value number memory, we assign value numbers to vuses.
117 This enables us to note that, for example, stores to the same
118 address of the same value from the same starting memory states are
119 equivalent.
120 TODO:
122 1. We can iterate only the changing portions of the SCC's, but
123 I have not seen an SCC big enough for this to be a win.
124 2. If you differentiate between phi nodes for loops and phi nodes
125 for if-then-else, you can properly consider phi nodes in different
126 blocks for equivalence.
127 3. We could value number vuses in more cases, particularly, whole
128 structure copies.
132 static tree *last_vuse_ptr;
133 static vn_lookup_kind vn_walk_kind;
134 static vn_lookup_kind default_vn_walk_kind;
135 bitmap const_parms;
137 /* vn_nary_op hashtable helpers. */
139 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
141 typedef vn_nary_op_s *compare_type;
142 static inline hashval_t hash (const vn_nary_op_s *);
143 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
146 /* Return the computed hashcode for nary operation P1. */
148 inline hashval_t
149 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
151 return vno1->hashcode;
154 /* Compare nary operations P1 and P2 and return true if they are
155 equivalent. */
157 inline bool
158 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
160 return vn_nary_op_eq (vno1, vno2);
163 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
164 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
167 /* vn_phi hashtable helpers. */
169 static int
170 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
172 struct vn_phi_hasher : pointer_hash <vn_phi_s>
174 static inline hashval_t hash (const vn_phi_s *);
175 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
176 static inline void remove (vn_phi_s *);
179 /* Return the computed hashcode for phi operation P1. */
181 inline hashval_t
182 vn_phi_hasher::hash (const vn_phi_s *vp1)
184 return vp1->hashcode;
187 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
189 inline bool
190 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
192 return vn_phi_eq (vp1, vp2);
195 /* Free a phi operation structure VP. */
197 inline void
198 vn_phi_hasher::remove (vn_phi_s *phi)
200 phi->phiargs.release ();
203 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
204 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
207 /* Compare two reference operands P1 and P2 for equality. Return true if
208 they are equal, and false otherwise. */
210 static int
211 vn_reference_op_eq (const void *p1, const void *p2)
213 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
214 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
216 return (vro1->opcode == vro2->opcode
217 /* We do not care for differences in type qualification. */
218 && (vro1->type == vro2->type
219 || (vro1->type && vro2->type
220 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
221 TYPE_MAIN_VARIANT (vro2->type))))
222 && expressions_equal_p (vro1->op0, vro2->op0)
223 && expressions_equal_p (vro1->op1, vro2->op1)
224 && expressions_equal_p (vro1->op2, vro2->op2));
227 /* Free a reference operation structure VP. */
229 static inline void
230 free_reference (vn_reference_s *vr)
232 vr->operands.release ();
236 /* vn_reference hashtable helpers. */
238 struct vn_reference_hasher : pointer_hash <vn_reference_s>
240 static inline hashval_t hash (const vn_reference_s *);
241 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
242 static inline void remove (vn_reference_s *);
245 /* Return the hashcode for a given reference operation P1. */
247 inline hashval_t
248 vn_reference_hasher::hash (const vn_reference_s *vr1)
250 return vr1->hashcode;
253 inline bool
254 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
256 return vn_reference_eq (v, c);
259 inline void
260 vn_reference_hasher::remove (vn_reference_s *v)
262 free_reference (v);
265 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
266 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
269 /* The set of hashtables and alloc_pool's for their items. */
271 typedef struct vn_tables_s
273 vn_nary_op_table_type *nary;
274 vn_phi_table_type *phis;
275 vn_reference_table_type *references;
276 struct obstack nary_obstack;
277 object_allocator<vn_phi_s> *phis_pool;
278 object_allocator<vn_reference_s> *references_pool;
279 } *vn_tables_t;
282 /* vn_constant hashtable helpers. */
284 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
286 static inline hashval_t hash (const vn_constant_s *);
287 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
290 /* Hash table hash function for vn_constant_t. */
292 inline hashval_t
293 vn_constant_hasher::hash (const vn_constant_s *vc1)
295 return vc1->hashcode;
298 /* Hash table equality function for vn_constant_t. */
300 inline bool
301 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
303 if (vc1->hashcode != vc2->hashcode)
304 return false;
306 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
309 static hash_table<vn_constant_hasher> *constant_to_value_id;
310 static bitmap constant_value_ids;
313 /* Valid hashtables storing information we have proven to be
314 correct. */
316 static vn_tables_t valid_info;
318 /* Optimistic hashtables storing information we are making assumptions about
319 during iterations. */
321 static vn_tables_t optimistic_info;
323 /* Pointer to the set of hashtables that is currently being used.
324 Should always point to either the optimistic_info, or the
325 valid_info. */
327 static vn_tables_t current_info;
330 /* Reverse post order index for each basic block. */
332 static int *rpo_numbers;
334 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
336 /* Return the SSA value of the VUSE x, supporting released VDEFs
337 during elimination which will value-number the VDEF to the
338 associated VUSE (but not substitute in the whole lattice). */
340 static inline tree
341 vuse_ssa_val (tree x)
343 if (!x)
344 return NULL_TREE;
348 tree tem = SSA_VAL (x);
349 /* stmt walking can walk over a backedge and reach code we didn't
350 value-number yet. */
351 if (tem == VN_TOP)
352 return x;
353 x = tem;
355 while (SSA_NAME_IN_FREE_LIST (x));
357 return x;
360 /* This represents the top of the VN lattice, which is the universal
361 value. */
363 tree VN_TOP;
365 /* Unique counter for our value ids. */
367 static unsigned int next_value_id;
369 /* Next DFS number and the stack for strongly connected component
370 detection. */
372 static unsigned int next_dfs_num;
373 static vec<tree> sccstack;
377 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
378 are allocated on an obstack for locality reasons, and to free them
379 without looping over the vec. */
381 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
382 static struct obstack vn_ssa_aux_obstack;
384 /* Return whether there is value numbering information for a given SSA name. */
386 bool
387 has_VN_INFO (tree name)
389 if (SSA_NAME_VERSION (name) < vn_ssa_aux_table.length ())
390 return vn_ssa_aux_table[SSA_NAME_VERSION (name)] != NULL;
391 return false;
394 /* Return the value numbering information for a given SSA name. */
396 vn_ssa_aux_t
397 VN_INFO (tree name)
399 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
400 gcc_checking_assert (res);
401 return res;
404 /* Set the value numbering info for a given SSA name to a given
405 value. */
407 static inline void
408 VN_INFO_SET (tree name, vn_ssa_aux_t value)
410 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
413 /* Initialize the value numbering info for a given SSA name.
414 This should be called just once for every SSA name. */
416 vn_ssa_aux_t
417 VN_INFO_GET (tree name)
419 vn_ssa_aux_t newinfo;
421 gcc_assert (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ()
422 || vn_ssa_aux_table[SSA_NAME_VERSION (name)] == NULL);
423 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
424 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
425 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
426 vn_ssa_aux_table.safe_grow_cleared (SSA_NAME_VERSION (name) + 1);
427 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
428 return newinfo;
432 /* Return the vn_kind the expression computed by the stmt should be
433 associated with. */
435 enum vn_kind
436 vn_get_stmt_kind (gimple *stmt)
438 switch (gimple_code (stmt))
440 case GIMPLE_CALL:
441 return VN_REFERENCE;
442 case GIMPLE_PHI:
443 return VN_PHI;
444 case GIMPLE_ASSIGN:
446 enum tree_code code = gimple_assign_rhs_code (stmt);
447 tree rhs1 = gimple_assign_rhs1 (stmt);
448 switch (get_gimple_rhs_class (code))
450 case GIMPLE_UNARY_RHS:
451 case GIMPLE_BINARY_RHS:
452 case GIMPLE_TERNARY_RHS:
453 return VN_NARY;
454 case GIMPLE_SINGLE_RHS:
455 switch (TREE_CODE_CLASS (code))
457 case tcc_reference:
458 /* VOP-less references can go through unary case. */
459 if ((code == REALPART_EXPR
460 || code == IMAGPART_EXPR
461 || code == VIEW_CONVERT_EXPR
462 || code == BIT_FIELD_REF)
463 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
464 return VN_NARY;
466 /* Fallthrough. */
467 case tcc_declaration:
468 return VN_REFERENCE;
470 case tcc_constant:
471 return VN_CONSTANT;
473 default:
474 if (code == ADDR_EXPR)
475 return (is_gimple_min_invariant (rhs1)
476 ? VN_CONSTANT : VN_REFERENCE);
477 else if (code == CONSTRUCTOR)
478 return VN_NARY;
479 return VN_NONE;
481 default:
482 return VN_NONE;
485 default:
486 return VN_NONE;
490 /* Lookup a value id for CONSTANT and return it. If it does not
491 exist returns 0. */
493 unsigned int
494 get_constant_value_id (tree constant)
496 vn_constant_s **slot;
497 struct vn_constant_s vc;
499 vc.hashcode = vn_hash_constant_with_type (constant);
500 vc.constant = constant;
501 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
502 if (slot)
503 return (*slot)->value_id;
504 return 0;
507 /* Lookup a value id for CONSTANT, and if it does not exist, create a
508 new one and return it. If it does exist, return it. */
510 unsigned int
511 get_or_alloc_constant_value_id (tree constant)
513 vn_constant_s **slot;
514 struct vn_constant_s vc;
515 vn_constant_t vcp;
517 vc.hashcode = vn_hash_constant_with_type (constant);
518 vc.constant = constant;
519 slot = constant_to_value_id->find_slot (&vc, INSERT);
520 if (*slot)
521 return (*slot)->value_id;
523 vcp = XNEW (struct vn_constant_s);
524 vcp->hashcode = vc.hashcode;
525 vcp->constant = constant;
526 vcp->value_id = get_next_value_id ();
527 *slot = vcp;
528 bitmap_set_bit (constant_value_ids, vcp->value_id);
529 return vcp->value_id;
532 /* Return true if V is a value id for a constant. */
534 bool
535 value_id_constant_p (unsigned int v)
537 return bitmap_bit_p (constant_value_ids, v);
540 /* Compute the hash for a reference operand VRO1. */
542 static void
543 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
545 hstate.add_int (vro1->opcode);
546 if (vro1->op0)
547 inchash::add_expr (vro1->op0, hstate);
548 if (vro1->op1)
549 inchash::add_expr (vro1->op1, hstate);
550 if (vro1->op2)
551 inchash::add_expr (vro1->op2, hstate);
554 /* Compute a hash for the reference operation VR1 and return it. */
556 static hashval_t
557 vn_reference_compute_hash (const vn_reference_t vr1)
559 inchash::hash hstate;
560 hashval_t result;
561 int i;
562 vn_reference_op_t vro;
563 poly_int64 off = -1;
564 bool deref = false;
566 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
568 if (vro->opcode == MEM_REF)
569 deref = true;
570 else if (vro->opcode != ADDR_EXPR)
571 deref = false;
572 if (maybe_ne (vro->off, -1))
574 if (known_eq (off, -1))
575 off = 0;
576 off += vro->off;
578 else
580 if (maybe_ne (off, -1)
581 && maybe_ne (off, 0))
582 hstate.add_poly_int (off);
583 off = -1;
584 if (deref
585 && vro->opcode == ADDR_EXPR)
587 if (vro->op0)
589 tree op = TREE_OPERAND (vro->op0, 0);
590 hstate.add_int (TREE_CODE (op));
591 inchash::add_expr (op, hstate);
594 else
595 vn_reference_op_compute_hash (vro, hstate);
598 result = hstate.end ();
599 /* ??? We would ICE later if we hash instead of adding that in. */
600 if (vr1->vuse)
601 result += SSA_NAME_VERSION (vr1->vuse);
603 return result;
606 /* Return true if reference operations VR1 and VR2 are equivalent. This
607 means they have the same set of operands and vuses. */
609 bool
610 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
612 unsigned i, j;
614 /* Early out if this is not a hash collision. */
615 if (vr1->hashcode != vr2->hashcode)
616 return false;
618 /* The VOP needs to be the same. */
619 if (vr1->vuse != vr2->vuse)
620 return false;
622 /* If the operands are the same we are done. */
623 if (vr1->operands == vr2->operands)
624 return true;
626 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
627 return false;
629 if (INTEGRAL_TYPE_P (vr1->type)
630 && INTEGRAL_TYPE_P (vr2->type))
632 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
633 return false;
635 else if (INTEGRAL_TYPE_P (vr1->type)
636 && (TYPE_PRECISION (vr1->type)
637 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
638 return false;
639 else if (INTEGRAL_TYPE_P (vr2->type)
640 && (TYPE_PRECISION (vr2->type)
641 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
642 return false;
644 i = 0;
645 j = 0;
648 poly_int64 off1 = 0, off2 = 0;
649 vn_reference_op_t vro1, vro2;
650 vn_reference_op_s tem1, tem2;
651 bool deref1 = false, deref2 = false;
652 for (; vr1->operands.iterate (i, &vro1); i++)
654 if (vro1->opcode == MEM_REF)
655 deref1 = true;
656 /* Do not look through a storage order barrier. */
657 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
658 return false;
659 if (known_eq (vro1->off, -1))
660 break;
661 off1 += vro1->off;
663 for (; vr2->operands.iterate (j, &vro2); j++)
665 if (vro2->opcode == MEM_REF)
666 deref2 = true;
667 /* Do not look through a storage order barrier. */
668 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
669 return false;
670 if (known_eq (vro2->off, -1))
671 break;
672 off2 += vro2->off;
674 if (maybe_ne (off1, off2))
675 return false;
676 if (deref1 && vro1->opcode == ADDR_EXPR)
678 memset (&tem1, 0, sizeof (tem1));
679 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
680 tem1.type = TREE_TYPE (tem1.op0);
681 tem1.opcode = TREE_CODE (tem1.op0);
682 vro1 = &tem1;
683 deref1 = false;
685 if (deref2 && vro2->opcode == ADDR_EXPR)
687 memset (&tem2, 0, sizeof (tem2));
688 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
689 tem2.type = TREE_TYPE (tem2.op0);
690 tem2.opcode = TREE_CODE (tem2.op0);
691 vro2 = &tem2;
692 deref2 = false;
694 if (deref1 != deref2)
695 return false;
696 if (!vn_reference_op_eq (vro1, vro2))
697 return false;
698 ++j;
699 ++i;
701 while (vr1->operands.length () != i
702 || vr2->operands.length () != j);
704 return true;
707 /* Copy the operations present in load/store REF into RESULT, a vector of
708 vn_reference_op_s's. */
710 static void
711 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
713 if (TREE_CODE (ref) == TARGET_MEM_REF)
715 vn_reference_op_s temp;
717 result->reserve (3);
719 memset (&temp, 0, sizeof (temp));
720 temp.type = TREE_TYPE (ref);
721 temp.opcode = TREE_CODE (ref);
722 temp.op0 = TMR_INDEX (ref);
723 temp.op1 = TMR_STEP (ref);
724 temp.op2 = TMR_OFFSET (ref);
725 temp.off = -1;
726 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
727 temp.base = MR_DEPENDENCE_BASE (ref);
728 result->quick_push (temp);
730 memset (&temp, 0, sizeof (temp));
731 temp.type = NULL_TREE;
732 temp.opcode = ERROR_MARK;
733 temp.op0 = TMR_INDEX2 (ref);
734 temp.off = -1;
735 result->quick_push (temp);
737 memset (&temp, 0, sizeof (temp));
738 temp.type = NULL_TREE;
739 temp.opcode = TREE_CODE (TMR_BASE (ref));
740 temp.op0 = TMR_BASE (ref);
741 temp.off = -1;
742 result->quick_push (temp);
743 return;
746 /* For non-calls, store the information that makes up the address. */
747 tree orig = ref;
748 while (ref)
750 vn_reference_op_s temp;
752 memset (&temp, 0, sizeof (temp));
753 temp.type = TREE_TYPE (ref);
754 temp.opcode = TREE_CODE (ref);
755 temp.off = -1;
757 switch (temp.opcode)
759 case MODIFY_EXPR:
760 temp.op0 = TREE_OPERAND (ref, 1);
761 break;
762 case WITH_SIZE_EXPR:
763 temp.op0 = TREE_OPERAND (ref, 1);
764 temp.off = 0;
765 break;
766 case MEM_REF:
767 /* The base address gets its own vn_reference_op_s structure. */
768 temp.op0 = TREE_OPERAND (ref, 1);
769 if (!mem_ref_offset (ref).to_shwi (&temp.off))
770 temp.off = -1;
771 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
772 temp.base = MR_DEPENDENCE_BASE (ref);
773 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
774 break;
775 case BIT_FIELD_REF:
776 /* Record bits, position and storage order. */
777 temp.op0 = TREE_OPERAND (ref, 1);
778 temp.op1 = TREE_OPERAND (ref, 2);
779 if (!multiple_p (bit_field_offset (ref), BITS_PER_UNIT, &temp.off))
780 temp.off = -1;
781 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
782 break;
783 case COMPONENT_REF:
784 /* The field decl is enough to unambiguously specify the field,
785 a matching type is not necessary and a mismatching type
786 is always a spurious difference. */
787 temp.type = NULL_TREE;
788 temp.op0 = TREE_OPERAND (ref, 1);
789 temp.op1 = TREE_OPERAND (ref, 2);
791 tree this_offset = component_ref_field_offset (ref);
792 if (this_offset
793 && poly_int_tree_p (this_offset))
795 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
796 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
798 poly_offset_int off
799 = (wi::to_poly_offset (this_offset)
800 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
801 /* Probibit value-numbering zero offset components
802 of addresses the same before the pass folding
803 __builtin_object_size had a chance to run
804 (checking cfun->after_inlining does the
805 trick here). */
806 if (TREE_CODE (orig) != ADDR_EXPR
807 || maybe_ne (off, 0)
808 || cfun->after_inlining)
809 off.to_shwi (&temp.off);
813 break;
814 case ARRAY_RANGE_REF:
815 case ARRAY_REF:
817 tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
818 /* Record index as operand. */
819 temp.op0 = TREE_OPERAND (ref, 1);
820 /* Always record lower bounds and element size. */
821 temp.op1 = array_ref_low_bound (ref);
822 /* But record element size in units of the type alignment. */
823 temp.op2 = TREE_OPERAND (ref, 3);
824 temp.align = eltype->type_common.align;
825 if (! temp.op2)
826 temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
827 size_int (TYPE_ALIGN_UNIT (eltype)));
828 if (poly_int_tree_p (temp.op0)
829 && poly_int_tree_p (temp.op1)
830 && TREE_CODE (temp.op2) == INTEGER_CST)
832 poly_offset_int off = ((wi::to_poly_offset (temp.op0)
833 - wi::to_poly_offset (temp.op1))
834 * wi::to_offset (temp.op2)
835 * vn_ref_op_align_unit (&temp));
836 off.to_shwi (&temp.off);
839 break;
840 case VAR_DECL:
841 if (DECL_HARD_REGISTER (ref))
843 temp.op0 = ref;
844 break;
846 /* Fallthru. */
847 case PARM_DECL:
848 case CONST_DECL:
849 case RESULT_DECL:
850 /* Canonicalize decls to MEM[&decl] which is what we end up with
851 when valueizing MEM[ptr] with ptr = &decl. */
852 temp.opcode = MEM_REF;
853 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
854 temp.off = 0;
855 result->safe_push (temp);
856 temp.opcode = ADDR_EXPR;
857 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
858 temp.type = TREE_TYPE (temp.op0);
859 temp.off = -1;
860 break;
861 case STRING_CST:
862 case INTEGER_CST:
863 case COMPLEX_CST:
864 case VECTOR_CST:
865 case REAL_CST:
866 case FIXED_CST:
867 case CONSTRUCTOR:
868 case SSA_NAME:
869 temp.op0 = ref;
870 break;
871 case ADDR_EXPR:
872 if (is_gimple_min_invariant (ref))
874 temp.op0 = ref;
875 break;
877 break;
878 /* These are only interesting for their operands, their
879 existence, and their type. They will never be the last
880 ref in the chain of references (IE they require an
881 operand), so we don't have to put anything
882 for op* as it will be handled by the iteration */
883 case REALPART_EXPR:
884 temp.off = 0;
885 break;
886 case VIEW_CONVERT_EXPR:
887 temp.off = 0;
888 temp.reverse = storage_order_barrier_p (ref);
889 break;
890 case IMAGPART_EXPR:
891 /* This is only interesting for its constant offset. */
892 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
893 break;
894 default:
895 gcc_unreachable ();
897 result->safe_push (temp);
899 if (REFERENCE_CLASS_P (ref)
900 || TREE_CODE (ref) == MODIFY_EXPR
901 || TREE_CODE (ref) == WITH_SIZE_EXPR
902 || (TREE_CODE (ref) == ADDR_EXPR
903 && !is_gimple_min_invariant (ref)))
904 ref = TREE_OPERAND (ref, 0);
905 else
906 ref = NULL_TREE;
910 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
911 operands in *OPS, the reference alias set SET and the reference type TYPE.
912 Return true if something useful was produced. */
914 bool
915 ao_ref_init_from_vn_reference (ao_ref *ref,
916 alias_set_type set, tree type,
917 vec<vn_reference_op_s> ops)
919 vn_reference_op_t op;
920 unsigned i;
921 tree base = NULL_TREE;
922 tree *op0_p = &base;
923 poly_offset_int offset = 0;
924 poly_offset_int max_size;
925 poly_offset_int size = -1;
926 tree size_tree = NULL_TREE;
927 alias_set_type base_alias_set = -1;
929 /* First get the final access size from just the outermost expression. */
930 op = &ops[0];
931 if (op->opcode == COMPONENT_REF)
932 size_tree = DECL_SIZE (op->op0);
933 else if (op->opcode == BIT_FIELD_REF)
934 size_tree = op->op0;
935 else
937 machine_mode mode = TYPE_MODE (type);
938 if (mode == BLKmode)
939 size_tree = TYPE_SIZE (type);
940 else
941 size = GET_MODE_BITSIZE (mode);
943 if (size_tree != NULL_TREE
944 && poly_int_tree_p (size_tree))
945 size = wi::to_poly_offset (size_tree);
947 /* Initially, maxsize is the same as the accessed element size.
948 In the following it will only grow (or become -1). */
949 max_size = size;
951 /* Compute cumulative bit-offset for nested component-refs and array-refs,
952 and find the ultimate containing object. */
953 FOR_EACH_VEC_ELT (ops, i, op)
955 switch (op->opcode)
957 /* These may be in the reference ops, but we cannot do anything
958 sensible with them here. */
959 case ADDR_EXPR:
960 /* Apart from ADDR_EXPR arguments to MEM_REF. */
961 if (base != NULL_TREE
962 && TREE_CODE (base) == MEM_REF
963 && op->op0
964 && DECL_P (TREE_OPERAND (op->op0, 0)))
966 vn_reference_op_t pop = &ops[i-1];
967 base = TREE_OPERAND (op->op0, 0);
968 if (known_eq (pop->off, -1))
970 max_size = -1;
971 offset = 0;
973 else
974 offset += pop->off * BITS_PER_UNIT;
975 op0_p = NULL;
976 break;
978 /* Fallthru. */
979 case CALL_EXPR:
980 return false;
982 /* Record the base objects. */
983 case MEM_REF:
984 base_alias_set = get_deref_alias_set (op->op0);
985 *op0_p = build2 (MEM_REF, op->type,
986 NULL_TREE, op->op0);
987 MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
988 MR_DEPENDENCE_BASE (*op0_p) = op->base;
989 op0_p = &TREE_OPERAND (*op0_p, 0);
990 break;
992 case VAR_DECL:
993 case PARM_DECL:
994 case RESULT_DECL:
995 case SSA_NAME:
996 *op0_p = op->op0;
997 op0_p = NULL;
998 break;
1000 /* And now the usual component-reference style ops. */
1001 case BIT_FIELD_REF:
1002 offset += wi::to_poly_offset (op->op1);
1003 break;
1005 case COMPONENT_REF:
1007 tree field = op->op0;
1008 /* We do not have a complete COMPONENT_REF tree here so we
1009 cannot use component_ref_field_offset. Do the interesting
1010 parts manually. */
1011 tree this_offset = DECL_FIELD_OFFSET (field);
1013 if (op->op1 || !poly_int_tree_p (this_offset))
1014 max_size = -1;
1015 else
1017 poly_offset_int woffset = (wi::to_poly_offset (this_offset)
1018 << LOG2_BITS_PER_UNIT);
1019 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1020 offset += woffset;
1022 break;
1025 case ARRAY_RANGE_REF:
1026 case ARRAY_REF:
1027 /* We recorded the lower bound and the element size. */
1028 if (!poly_int_tree_p (op->op0)
1029 || !poly_int_tree_p (op->op1)
1030 || TREE_CODE (op->op2) != INTEGER_CST)
1031 max_size = -1;
1032 else
1034 poly_offset_int woffset
1035 = wi::sext (wi::to_poly_offset (op->op0)
1036 - wi::to_poly_offset (op->op1),
1037 TYPE_PRECISION (TREE_TYPE (op->op0)));
1038 woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
1039 woffset <<= LOG2_BITS_PER_UNIT;
1040 offset += woffset;
1042 break;
1044 case REALPART_EXPR:
1045 break;
1047 case IMAGPART_EXPR:
1048 offset += size;
1049 break;
1051 case VIEW_CONVERT_EXPR:
1052 break;
1054 case STRING_CST:
1055 case INTEGER_CST:
1056 case COMPLEX_CST:
1057 case VECTOR_CST:
1058 case REAL_CST:
1059 case CONSTRUCTOR:
1060 case CONST_DECL:
1061 return false;
1063 default:
1064 return false;
1068 if (base == NULL_TREE)
1069 return false;
1071 ref->ref = NULL_TREE;
1072 ref->base = base;
1073 ref->ref_alias_set = set;
1074 if (base_alias_set != -1)
1075 ref->base_alias_set = base_alias_set;
1076 else
1077 ref->base_alias_set = get_alias_set (base);
1078 /* We discount volatiles from value-numbering elsewhere. */
1079 ref->volatile_p = false;
1081 if (!size.to_shwi (&ref->size) || maybe_lt (ref->size, 0))
1083 ref->offset = 0;
1084 ref->size = -1;
1085 ref->max_size = -1;
1086 return true;
1089 if (!offset.to_shwi (&ref->offset))
1091 ref->offset = 0;
1092 ref->max_size = -1;
1093 return true;
1096 if (!max_size.to_shwi (&ref->max_size) || maybe_lt (ref->max_size, 0))
1097 ref->max_size = -1;
1099 return true;
1102 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1103 vn_reference_op_s's. */
1105 static void
1106 copy_reference_ops_from_call (gcall *call,
1107 vec<vn_reference_op_s> *result)
1109 vn_reference_op_s temp;
1110 unsigned i;
1111 tree lhs = gimple_call_lhs (call);
1112 int lr;
1114 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1115 different. By adding the lhs here in the vector, we ensure that the
1116 hashcode is different, guaranteeing a different value number. */
1117 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1119 memset (&temp, 0, sizeof (temp));
1120 temp.opcode = MODIFY_EXPR;
1121 temp.type = TREE_TYPE (lhs);
1122 temp.op0 = lhs;
1123 temp.off = -1;
1124 result->safe_push (temp);
1127 /* Copy the type, opcode, function, static chain and EH region, if any. */
1128 memset (&temp, 0, sizeof (temp));
1129 temp.type = gimple_call_return_type (call);
1130 temp.opcode = CALL_EXPR;
1131 temp.op0 = gimple_call_fn (call);
1132 temp.op1 = gimple_call_chain (call);
1133 if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1134 temp.op2 = size_int (lr);
1135 temp.off = -1;
1136 result->safe_push (temp);
1138 /* Copy the call arguments. As they can be references as well,
1139 just chain them together. */
1140 for (i = 0; i < gimple_call_num_args (call); ++i)
1142 tree callarg = gimple_call_arg (call, i);
1143 copy_reference_ops_from_ref (callarg, result);
1147 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1148 *I_P to point to the last element of the replacement. */
1149 static bool
1150 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1151 unsigned int *i_p)
1153 unsigned int i = *i_p;
1154 vn_reference_op_t op = &(*ops)[i];
1155 vn_reference_op_t mem_op = &(*ops)[i - 1];
1156 tree addr_base;
1157 poly_int64 addr_offset = 0;
1159 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1160 from .foo.bar to the preceding MEM_REF offset and replace the
1161 address with &OBJ. */
1162 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1163 &addr_offset);
1164 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1165 if (addr_base != TREE_OPERAND (op->op0, 0))
1167 poly_offset_int off
1168 = (poly_offset_int::from (wi::to_poly_wide (mem_op->op0),
1169 SIGNED)
1170 + addr_offset);
1171 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1172 op->op0 = build_fold_addr_expr (addr_base);
1173 if (tree_fits_shwi_p (mem_op->op0))
1174 mem_op->off = tree_to_shwi (mem_op->op0);
1175 else
1176 mem_op->off = -1;
1177 return true;
1179 return false;
1182 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1183 *I_P to point to the last element of the replacement. */
1184 static bool
1185 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1186 unsigned int *i_p)
1188 unsigned int i = *i_p;
1189 vn_reference_op_t op = &(*ops)[i];
1190 vn_reference_op_t mem_op = &(*ops)[i - 1];
1191 gimple *def_stmt;
1192 enum tree_code code;
1193 poly_offset_int off;
1195 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1196 if (!is_gimple_assign (def_stmt))
1197 return false;
1199 code = gimple_assign_rhs_code (def_stmt);
1200 if (code != ADDR_EXPR
1201 && code != POINTER_PLUS_EXPR)
1202 return false;
1204 off = poly_offset_int::from (wi::to_poly_wide (mem_op->op0), SIGNED);
1206 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1207 from .foo.bar to the preceding MEM_REF offset and replace the
1208 address with &OBJ. */
1209 if (code == ADDR_EXPR)
1211 tree addr, addr_base;
1212 poly_int64 addr_offset;
1214 addr = gimple_assign_rhs1 (def_stmt);
1215 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1216 &addr_offset);
1217 /* If that didn't work because the address isn't invariant propagate
1218 the reference tree from the address operation in case the current
1219 dereference isn't offsetted. */
1220 if (!addr_base
1221 && *i_p == ops->length () - 1
1222 && known_eq (off, 0)
1223 /* This makes us disable this transform for PRE where the
1224 reference ops might be also used for code insertion which
1225 is invalid. */
1226 && default_vn_walk_kind == VN_WALKREWRITE)
1228 auto_vec<vn_reference_op_s, 32> tem;
1229 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1230 /* Make sure to preserve TBAA info. The only objects not
1231 wrapped in MEM_REFs that can have their address taken are
1232 STRING_CSTs. */
1233 if (tem.length () >= 2
1234 && tem[tem.length () - 2].opcode == MEM_REF)
1236 vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1237 new_mem_op->op0
1238 = wide_int_to_tree (TREE_TYPE (mem_op->op0),
1239 wi::to_poly_wide (new_mem_op->op0));
1241 else
1242 gcc_assert (tem.last ().opcode == STRING_CST);
1243 ops->pop ();
1244 ops->pop ();
1245 ops->safe_splice (tem);
1246 --*i_p;
1247 return true;
1249 if (!addr_base
1250 || TREE_CODE (addr_base) != MEM_REF
1251 || (TREE_CODE (TREE_OPERAND (addr_base, 0)) == SSA_NAME
1252 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (addr_base, 0))))
1253 return false;
1255 off += addr_offset;
1256 off += mem_ref_offset (addr_base);
1257 op->op0 = TREE_OPERAND (addr_base, 0);
1259 else
1261 tree ptr, ptroff;
1262 ptr = gimple_assign_rhs1 (def_stmt);
1263 ptroff = gimple_assign_rhs2 (def_stmt);
1264 if (TREE_CODE (ptr) != SSA_NAME
1265 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1266 || !poly_int_tree_p (ptroff))
1267 return false;
1269 off += wi::to_poly_offset (ptroff);
1270 op->op0 = ptr;
1273 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1274 if (tree_fits_shwi_p (mem_op->op0))
1275 mem_op->off = tree_to_shwi (mem_op->op0);
1276 else
1277 mem_op->off = -1;
1278 if (TREE_CODE (op->op0) == SSA_NAME)
1279 op->op0 = SSA_VAL (op->op0);
1280 if (TREE_CODE (op->op0) != SSA_NAME)
1281 op->opcode = TREE_CODE (op->op0);
1283 /* And recurse. */
1284 if (TREE_CODE (op->op0) == SSA_NAME)
1285 vn_reference_maybe_forwprop_address (ops, i_p);
1286 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1287 vn_reference_fold_indirect (ops, i_p);
1288 return true;
1291 /* Optimize the reference REF to a constant if possible or return
1292 NULL_TREE if not. */
1294 tree
1295 fully_constant_vn_reference_p (vn_reference_t ref)
1297 vec<vn_reference_op_s> operands = ref->operands;
1298 vn_reference_op_t op;
1300 /* Try to simplify the translated expression if it is
1301 a call to a builtin function with at most two arguments. */
1302 op = &operands[0];
1303 if (op->opcode == CALL_EXPR
1304 && TREE_CODE (op->op0) == ADDR_EXPR
1305 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1306 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1307 && operands.length () >= 2
1308 && operands.length () <= 3)
1310 vn_reference_op_t arg0, arg1 = NULL;
1311 bool anyconst = false;
1312 arg0 = &operands[1];
1313 if (operands.length () > 2)
1314 arg1 = &operands[2];
1315 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1316 || (arg0->opcode == ADDR_EXPR
1317 && is_gimple_min_invariant (arg0->op0)))
1318 anyconst = true;
1319 if (arg1
1320 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1321 || (arg1->opcode == ADDR_EXPR
1322 && is_gimple_min_invariant (arg1->op0))))
1323 anyconst = true;
1324 if (anyconst)
1326 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1327 arg1 ? 2 : 1,
1328 arg0->op0,
1329 arg1 ? arg1->op0 : NULL);
1330 if (folded
1331 && TREE_CODE (folded) == NOP_EXPR)
1332 folded = TREE_OPERAND (folded, 0);
1333 if (folded
1334 && is_gimple_min_invariant (folded))
1335 return folded;
1339 /* Simplify reads from constants or constant initializers. */
1340 else if (BITS_PER_UNIT == 8
1341 && is_gimple_reg_type (ref->type)
1342 && (!INTEGRAL_TYPE_P (ref->type)
1343 || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
1345 poly_int64 off = 0;
1346 HOST_WIDE_INT size;
1347 if (INTEGRAL_TYPE_P (ref->type))
1348 size = TYPE_PRECISION (ref->type);
1349 else
1350 size = tree_to_shwi (TYPE_SIZE (ref->type));
1351 if (size % BITS_PER_UNIT != 0
1352 || size > MAX_BITSIZE_MODE_ANY_MODE)
1353 return NULL_TREE;
1354 size /= BITS_PER_UNIT;
1355 unsigned i;
1356 for (i = 0; i < operands.length (); ++i)
1358 if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1360 ++i;
1361 break;
1363 if (known_eq (operands[i].off, -1))
1364 return NULL_TREE;
1365 off += operands[i].off;
1366 if (operands[i].opcode == MEM_REF)
1368 ++i;
1369 break;
1372 vn_reference_op_t base = &operands[--i];
1373 tree ctor = error_mark_node;
1374 tree decl = NULL_TREE;
1375 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1376 ctor = base->op0;
1377 else if (base->opcode == MEM_REF
1378 && base[1].opcode == ADDR_EXPR
1379 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1380 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL
1381 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == STRING_CST))
1383 decl = TREE_OPERAND (base[1].op0, 0);
1384 if (TREE_CODE (decl) == STRING_CST)
1385 ctor = decl;
1386 else
1387 ctor = ctor_for_folding (decl);
1389 if (ctor == NULL_TREE)
1390 return build_zero_cst (ref->type);
1391 else if (ctor != error_mark_node)
1393 HOST_WIDE_INT const_off;
1394 if (decl)
1396 tree res = fold_ctor_reference (ref->type, ctor,
1397 off * BITS_PER_UNIT,
1398 size * BITS_PER_UNIT, decl);
1399 if (res)
1401 STRIP_USELESS_TYPE_CONVERSION (res);
1402 if (is_gimple_min_invariant (res))
1403 return res;
1406 else if (off.is_constant (&const_off))
1408 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1409 int len = native_encode_expr (ctor, buf, size, const_off);
1410 if (len > 0)
1411 return native_interpret_expr (ref->type, buf, len);
1416 return NULL_TREE;
1419 /* Return true if OPS contain a storage order barrier. */
1421 static bool
1422 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1424 vn_reference_op_t op;
1425 unsigned i;
1427 FOR_EACH_VEC_ELT (ops, i, op)
1428 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1429 return true;
1431 return false;
1434 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1435 structures into their value numbers. This is done in-place, and
1436 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1437 whether any operands were valueized. */
1439 static vec<vn_reference_op_s>
1440 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1442 vn_reference_op_t vro;
1443 unsigned int i;
1445 *valueized_anything = false;
1447 FOR_EACH_VEC_ELT (orig, i, vro)
1449 if (vro->opcode == SSA_NAME
1450 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1452 tree tem = SSA_VAL (vro->op0);
1453 if (tem != vro->op0)
1455 *valueized_anything = true;
1456 vro->op0 = tem;
1458 /* If it transforms from an SSA_NAME to a constant, update
1459 the opcode. */
1460 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1461 vro->opcode = TREE_CODE (vro->op0);
1463 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1465 tree tem = SSA_VAL (vro->op1);
1466 if (tem != vro->op1)
1468 *valueized_anything = true;
1469 vro->op1 = tem;
1472 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1474 tree tem = SSA_VAL (vro->op2);
1475 if (tem != vro->op2)
1477 *valueized_anything = true;
1478 vro->op2 = tem;
1481 /* If it transforms from an SSA_NAME to an address, fold with
1482 a preceding indirect reference. */
1483 if (i > 0
1484 && vro->op0
1485 && TREE_CODE (vro->op0) == ADDR_EXPR
1486 && orig[i - 1].opcode == MEM_REF)
1488 if (vn_reference_fold_indirect (&orig, &i))
1489 *valueized_anything = true;
1491 else if (i > 0
1492 && vro->opcode == SSA_NAME
1493 && orig[i - 1].opcode == MEM_REF)
1495 if (vn_reference_maybe_forwprop_address (&orig, &i))
1496 *valueized_anything = true;
1498 /* If it transforms a non-constant ARRAY_REF into a constant
1499 one, adjust the constant offset. */
1500 else if (vro->opcode == ARRAY_REF
1501 && known_eq (vro->off, -1)
1502 && poly_int_tree_p (vro->op0)
1503 && poly_int_tree_p (vro->op1)
1504 && TREE_CODE (vro->op2) == INTEGER_CST)
1506 poly_offset_int off = ((wi::to_poly_offset (vro->op0)
1507 - wi::to_poly_offset (vro->op1))
1508 * wi::to_offset (vro->op2)
1509 * vn_ref_op_align_unit (vro));
1510 off.to_shwi (&vro->off);
1514 return orig;
1517 static vec<vn_reference_op_s>
1518 valueize_refs (vec<vn_reference_op_s> orig)
1520 bool tem;
1521 return valueize_refs_1 (orig, &tem);
1524 static vec<vn_reference_op_s> shared_lookup_references;
1526 /* Create a vector of vn_reference_op_s structures from REF, a
1527 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1528 this function. *VALUEIZED_ANYTHING will specify whether any
1529 operands were valueized. */
1531 static vec<vn_reference_op_s>
1532 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1534 if (!ref)
1535 return vNULL;
1536 shared_lookup_references.truncate (0);
1537 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1538 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1539 valueized_anything);
1540 return shared_lookup_references;
1543 /* Create a vector of vn_reference_op_s structures from CALL, a
1544 call statement. The vector is shared among all callers of
1545 this function. */
1547 static vec<vn_reference_op_s>
1548 valueize_shared_reference_ops_from_call (gcall *call)
1550 if (!call)
1551 return vNULL;
1552 shared_lookup_references.truncate (0);
1553 copy_reference_ops_from_call (call, &shared_lookup_references);
1554 shared_lookup_references = valueize_refs (shared_lookup_references);
1555 return shared_lookup_references;
1558 /* Lookup a SCCVN reference operation VR in the current hash table.
1559 Returns the resulting value number if it exists in the hash table,
1560 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1561 vn_reference_t stored in the hashtable if something is found. */
1563 static tree
1564 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1566 vn_reference_s **slot;
1567 hashval_t hash;
1569 hash = vr->hashcode;
1570 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1571 if (!slot && current_info == optimistic_info)
1572 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1573 if (slot)
1575 if (vnresult)
1576 *vnresult = (vn_reference_t)*slot;
1577 return ((vn_reference_t)*slot)->result;
1580 return NULL_TREE;
1583 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1584 with the current VUSE and performs the expression lookup. */
1586 static void *
1587 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1588 unsigned int cnt, void *vr_)
1590 vn_reference_t vr = (vn_reference_t)vr_;
1591 vn_reference_s **slot;
1592 hashval_t hash;
1594 /* This bounds the stmt walks we perform on reference lookups
1595 to O(1) instead of O(N) where N is the number of dominating
1596 stores. */
1597 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1598 return (void *)-1;
1600 if (last_vuse_ptr)
1601 *last_vuse_ptr = vuse;
1603 /* Fixup vuse and hash. */
1604 if (vr->vuse)
1605 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1606 vr->vuse = vuse_ssa_val (vuse);
1607 if (vr->vuse)
1608 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1610 hash = vr->hashcode;
1611 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1612 if (!slot && current_info == optimistic_info)
1613 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1614 if (slot)
1615 return *slot;
1617 return NULL;
1620 /* Lookup an existing or insert a new vn_reference entry into the
1621 value table for the VUSE, SET, TYPE, OPERANDS reference which
1622 has the value VALUE which is either a constant or an SSA name. */
1624 static vn_reference_t
1625 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1626 alias_set_type set,
1627 tree type,
1628 vec<vn_reference_op_s,
1629 va_heap> operands,
1630 tree value)
1632 vn_reference_s vr1;
1633 vn_reference_t result;
1634 unsigned value_id;
1635 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1636 vr1.operands = operands;
1637 vr1.type = type;
1638 vr1.set = set;
1639 vr1.hashcode = vn_reference_compute_hash (&vr1);
1640 if (vn_reference_lookup_1 (&vr1, &result))
1641 return result;
1642 if (TREE_CODE (value) == SSA_NAME)
1643 value_id = VN_INFO (value)->value_id;
1644 else
1645 value_id = get_or_alloc_constant_value_id (value);
1646 return vn_reference_insert_pieces (vuse, set, type,
1647 operands.copy (), value, value_id);
1650 static vn_nary_op_t vn_nary_op_insert_stmt (gimple *stmt, tree result);
1651 static unsigned mprts_hook_cnt;
1653 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
1655 static tree
1656 vn_lookup_simplify_result (gimple_match_op *res_op)
1658 if (!res_op->code.is_tree_code ())
1659 return NULL_TREE;
1660 tree *ops = res_op->ops;
1661 unsigned int length = res_op->num_ops;
1662 if (res_op->code == CONSTRUCTOR
1663 /* ??? We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
1664 and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree. */
1665 && TREE_CODE (res_op->ops[0]) == CONSTRUCTOR)
1667 length = CONSTRUCTOR_NELTS (res_op->ops[0]);
1668 ops = XALLOCAVEC (tree, length);
1669 for (unsigned i = 0; i < length; ++i)
1670 ops[i] = CONSTRUCTOR_ELT (res_op->ops[0], i)->value;
1672 vn_nary_op_t vnresult = NULL;
1673 tree res = vn_nary_op_lookup_pieces (length, (tree_code) res_op->code,
1674 res_op->type, ops, &vnresult);
1675 /* We can end up endlessly recursing simplifications if the lookup above
1676 presents us with a def-use chain that mirrors the original simplification.
1677 See PR80887 for an example. Limit successful lookup artificially
1678 to 10 times if we are called as mprts_hook. */
1679 if (res
1680 && mprts_hook
1681 && --mprts_hook_cnt == 0)
1683 if (dump_file && (dump_flags & TDF_DETAILS))
1684 fprintf (dump_file, "Resetting mprts_hook after too many "
1685 "invocations.\n");
1686 mprts_hook = NULL;
1688 return res;
1691 /* Return a value-number for RCODE OPS... either by looking up an existing
1692 value-number for the simplified result or by inserting the operation if
1693 INSERT is true. */
1695 static tree
1696 vn_nary_build_or_lookup_1 (gimple_match_op *res_op, bool insert)
1698 tree result = NULL_TREE;
1699 /* We will be creating a value number for
1700 RCODE (OPS...).
1701 So first simplify and lookup this expression to see if it
1702 is already available. */
1703 mprts_hook = vn_lookup_simplify_result;
1704 mprts_hook_cnt = 9;
1705 bool res = false;
1706 switch (TREE_CODE_LENGTH ((tree_code) res_op->code))
1708 case 1:
1709 res = gimple_resimplify1 (NULL, res_op, vn_valueize);
1710 break;
1711 case 2:
1712 res = gimple_resimplify2 (NULL, res_op, vn_valueize);
1713 break;
1714 case 3:
1715 res = gimple_resimplify3 (NULL, res_op, vn_valueize);
1716 break;
1718 mprts_hook = NULL;
1719 gimple *new_stmt = NULL;
1720 if (res
1721 && gimple_simplified_result_is_gimple_val (res_op))
1722 /* The expression is already available. */
1723 result = res_op->ops[0];
1724 else
1726 tree val = vn_lookup_simplify_result (res_op);
1727 if (!val && insert)
1729 gimple_seq stmts = NULL;
1730 result = maybe_push_res_to_seq (res_op, &stmts);
1731 if (result)
1733 gcc_assert (gimple_seq_singleton_p (stmts));
1734 new_stmt = gimple_seq_first_stmt (stmts);
1737 else
1738 /* The expression is already available. */
1739 result = val;
1741 if (new_stmt)
1743 /* The expression is not yet available, value-number lhs to
1744 the new SSA_NAME we created. */
1745 /* Initialize value-number information properly. */
1746 VN_INFO_GET (result)->valnum = result;
1747 VN_INFO (result)->value_id = get_next_value_id ();
1748 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
1749 new_stmt);
1750 VN_INFO (result)->needs_insertion = true;
1751 /* ??? PRE phi-translation inserts NARYs without corresponding
1752 SSA name result. Re-use those but set their result according
1753 to the stmt we just built. */
1754 vn_nary_op_t nary = NULL;
1755 vn_nary_op_lookup_stmt (new_stmt, &nary);
1756 if (nary)
1758 gcc_assert (nary->result == NULL_TREE);
1759 nary->result = gimple_assign_lhs (new_stmt);
1761 /* As all "inserted" statements are singleton SCCs, insert
1762 to the valid table. This is strictly needed to
1763 avoid re-generating new value SSA_NAMEs for the same
1764 expression during SCC iteration over and over (the
1765 optimistic table gets cleared after each iteration).
1766 We do not need to insert into the optimistic table, as
1767 lookups there will fall back to the valid table. */
1768 else if (current_info == optimistic_info)
1770 current_info = valid_info;
1771 vn_nary_op_insert_stmt (new_stmt, result);
1772 current_info = optimistic_info;
1774 else
1775 vn_nary_op_insert_stmt (new_stmt, result);
1776 if (dump_file && (dump_flags & TDF_DETAILS))
1778 fprintf (dump_file, "Inserting name ");
1779 print_generic_expr (dump_file, result);
1780 fprintf (dump_file, " for expression ");
1781 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
1782 fprintf (dump_file, "\n");
1785 return result;
1788 /* Return a value-number for RCODE OPS... either by looking up an existing
1789 value-number for the simplified result or by inserting the operation. */
1791 static tree
1792 vn_nary_build_or_lookup (gimple_match_op *res_op)
1794 return vn_nary_build_or_lookup_1 (res_op, true);
1797 /* Try to simplify the expression RCODE OPS... of type TYPE and return
1798 its value if present. */
1800 tree
1801 vn_nary_simplify (vn_nary_op_t nary)
1803 if (nary->length > gimple_match_op::MAX_NUM_OPS)
1804 return NULL_TREE;
1805 gimple_match_op op (nary->opcode, nary->type, nary->length);
1806 memcpy (op.ops, nary->op, sizeof (tree) * nary->length);
1807 return vn_nary_build_or_lookup_1 (&op, false);
1811 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1812 from the statement defining VUSE and if not successful tries to
1813 translate *REFP and VR_ through an aggregate copy at the definition
1814 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
1815 of *REF and *VR. If only disambiguation was performed then
1816 *DISAMBIGUATE_ONLY is set to true. */
1818 static void *
1819 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1820 bool *disambiguate_only)
1822 vn_reference_t vr = (vn_reference_t)vr_;
1823 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
1824 tree base = ao_ref_base (ref);
1825 HOST_WIDE_INT offseti, maxsizei;
1826 static vec<vn_reference_op_s> lhs_ops;
1827 ao_ref lhs_ref;
1828 bool lhs_ref_ok = false;
1829 poly_int64 copy_size;
1831 /* If the reference is based on a parameter that was determined as
1832 pointing to readonly memory it doesn't change. */
1833 if (TREE_CODE (base) == MEM_REF
1834 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
1835 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
1836 && bitmap_bit_p (const_parms,
1837 SSA_NAME_VERSION (TREE_OPERAND (base, 0))))
1839 *disambiguate_only = true;
1840 return NULL;
1843 /* First try to disambiguate after value-replacing in the definitions LHS. */
1844 if (is_gimple_assign (def_stmt))
1846 tree lhs = gimple_assign_lhs (def_stmt);
1847 bool valueized_anything = false;
1848 /* Avoid re-allocation overhead. */
1849 lhs_ops.truncate (0);
1850 copy_reference_ops_from_ref (lhs, &lhs_ops);
1851 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1852 if (valueized_anything)
1854 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1855 get_alias_set (lhs),
1856 TREE_TYPE (lhs), lhs_ops);
1857 if (lhs_ref_ok
1858 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1860 *disambiguate_only = true;
1861 return NULL;
1864 else
1866 ao_ref_init (&lhs_ref, lhs);
1867 lhs_ref_ok = true;
1870 /* If we reach a clobbering statement try to skip it and see if
1871 we find a VN result with exactly the same value as the
1872 possible clobber. In this case we can ignore the clobber
1873 and return the found value.
1874 Note that we don't need to worry about partial overlapping
1875 accesses as we then can use TBAA to disambiguate against the
1876 clobbering statement when looking up a load (thus the
1877 VN_WALKREWRITE guard). */
1878 if (vn_walk_kind == VN_WALKREWRITE
1879 && is_gimple_reg_type (TREE_TYPE (lhs))
1880 && types_compatible_p (TREE_TYPE (lhs), vr->type))
1882 tree *saved_last_vuse_ptr = last_vuse_ptr;
1883 /* Do not update last_vuse_ptr in vn_reference_lookup_2. */
1884 last_vuse_ptr = NULL;
1885 tree saved_vuse = vr->vuse;
1886 hashval_t saved_hashcode = vr->hashcode;
1887 void *res = vn_reference_lookup_2 (ref,
1888 gimple_vuse (def_stmt), 0, vr);
1889 /* Need to restore vr->vuse and vr->hashcode. */
1890 vr->vuse = saved_vuse;
1891 vr->hashcode = saved_hashcode;
1892 last_vuse_ptr = saved_last_vuse_ptr;
1893 if (res && res != (void *)-1)
1895 vn_reference_t vnresult = (vn_reference_t) res;
1896 if (vnresult->result
1897 && operand_equal_p (vnresult->result,
1898 gimple_assign_rhs1 (def_stmt), 0))
1899 return res;
1903 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1904 && gimple_call_num_args (def_stmt) <= 4)
1906 /* For builtin calls valueize its arguments and call the
1907 alias oracle again. Valueization may improve points-to
1908 info of pointers and constify size and position arguments.
1909 Originally this was motivated by PR61034 which has
1910 conditional calls to free falsely clobbering ref because
1911 of imprecise points-to info of the argument. */
1912 tree oldargs[4];
1913 bool valueized_anything = false;
1914 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1916 oldargs[i] = gimple_call_arg (def_stmt, i);
1917 tree val = vn_valueize (oldargs[i]);
1918 if (val != oldargs[i])
1920 gimple_call_set_arg (def_stmt, i, val);
1921 valueized_anything = true;
1924 if (valueized_anything)
1926 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1927 ref);
1928 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1929 gimple_call_set_arg (def_stmt, i, oldargs[i]);
1930 if (!res)
1932 *disambiguate_only = true;
1933 return NULL;
1938 if (*disambiguate_only)
1939 return (void *)-1;
1941 /* If we cannot constrain the size of the reference we cannot
1942 test if anything kills it. */
1943 if (!ref->max_size_known_p ())
1944 return (void *)-1;
1946 poly_int64 offset = ref->offset;
1947 poly_int64 maxsize = ref->max_size;
1949 /* We can't deduce anything useful from clobbers. */
1950 if (gimple_clobber_p (def_stmt))
1951 return (void *)-1;
1953 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1954 from that definition.
1955 1) Memset. */
1956 if (is_gimple_reg_type (vr->type)
1957 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1958 && (integer_zerop (gimple_call_arg (def_stmt, 1))
1959 || ((TREE_CODE (gimple_call_arg (def_stmt, 1)) == INTEGER_CST
1960 || (INTEGRAL_TYPE_P (vr->type) && known_eq (ref->size, 8)))
1961 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1962 && offset.is_constant (&offseti)
1963 && offseti % BITS_PER_UNIT == 0))
1964 && poly_int_tree_p (gimple_call_arg (def_stmt, 2))
1965 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
1966 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME))
1968 tree base2;
1969 poly_int64 offset2, size2, maxsize2;
1970 bool reverse;
1971 tree ref2 = gimple_call_arg (def_stmt, 0);
1972 if (TREE_CODE (ref2) == SSA_NAME)
1974 ref2 = SSA_VAL (ref2);
1975 if (TREE_CODE (ref2) == SSA_NAME
1976 && (TREE_CODE (base) != MEM_REF
1977 || TREE_OPERAND (base, 0) != ref2))
1979 gimple *def_stmt = SSA_NAME_DEF_STMT (ref2);
1980 if (gimple_assign_single_p (def_stmt)
1981 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
1982 ref2 = gimple_assign_rhs1 (def_stmt);
1985 if (TREE_CODE (ref2) == ADDR_EXPR)
1987 ref2 = TREE_OPERAND (ref2, 0);
1988 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
1989 &reverse);
1990 if (!known_size_p (maxsize2)
1991 || !operand_equal_p (base, base2, OEP_ADDRESS_OF))
1992 return (void *)-1;
1994 else if (TREE_CODE (ref2) == SSA_NAME)
1996 poly_int64 soff;
1997 if (TREE_CODE (base) != MEM_REF
1998 || !(mem_ref_offset (base) << LOG2_BITS_PER_UNIT).to_shwi (&soff))
1999 return (void *)-1;
2000 offset += soff;
2001 offset2 = 0;
2002 if (TREE_OPERAND (base, 0) != ref2)
2004 gimple *def = SSA_NAME_DEF_STMT (ref2);
2005 if (is_gimple_assign (def)
2006 && gimple_assign_rhs_code (def) == POINTER_PLUS_EXPR
2007 && gimple_assign_rhs1 (def) == TREE_OPERAND (base, 0)
2008 && poly_int_tree_p (gimple_assign_rhs2 (def))
2009 && (wi::to_poly_offset (gimple_assign_rhs2 (def))
2010 << LOG2_BITS_PER_UNIT).to_shwi (&offset2))
2012 ref2 = gimple_assign_rhs1 (def);
2013 if (TREE_CODE (ref2) == SSA_NAME)
2014 ref2 = SSA_VAL (ref2);
2016 else
2017 return (void *)-1;
2020 else
2021 return (void *)-1;
2022 tree len = gimple_call_arg (def_stmt, 2);
2023 if (known_subrange_p (offset, maxsize, offset2,
2024 wi::to_poly_offset (len) << LOG2_BITS_PER_UNIT))
2026 tree val;
2027 if (integer_zerop (gimple_call_arg (def_stmt, 1)))
2028 val = build_zero_cst (vr->type);
2029 else if (INTEGRAL_TYPE_P (vr->type)
2030 && known_eq (ref->size, 8))
2032 gimple_match_op res_op (NOP_EXPR, vr->type,
2033 gimple_call_arg (def_stmt, 1));
2034 val = vn_nary_build_or_lookup (&res_op);
2035 if (!val
2036 || (TREE_CODE (val) == SSA_NAME
2037 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2038 return (void *)-1;
2040 else
2042 unsigned len = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (vr->type));
2043 unsigned char *buf = XALLOCAVEC (unsigned char, len);
2044 memset (buf, TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 1)),
2045 len);
2046 val = native_interpret_expr (vr->type, buf, len);
2047 if (!val)
2048 return (void *)-1;
2050 return vn_reference_lookup_or_insert_for_pieces
2051 (vuse, vr->set, vr->type, vr->operands, val);
2055 /* 2) Assignment from an empty CONSTRUCTOR. */
2056 else if (is_gimple_reg_type (vr->type)
2057 && gimple_assign_single_p (def_stmt)
2058 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
2059 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
2061 tree base2;
2062 poly_int64 offset2, size2, maxsize2;
2063 bool reverse;
2064 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2065 &offset2, &size2, &maxsize2, &reverse);
2066 if (known_size_p (maxsize2)
2067 && operand_equal_p (base, base2, 0)
2068 && known_subrange_p (offset, maxsize, offset2, size2))
2070 tree val = build_zero_cst (vr->type);
2071 return vn_reference_lookup_or_insert_for_pieces
2072 (vuse, vr->set, vr->type, vr->operands, val);
2076 /* 3) Assignment from a constant. We can use folds native encode/interpret
2077 routines to extract the assigned bits. */
2078 else if (known_eq (ref->size, maxsize)
2079 && is_gimple_reg_type (vr->type)
2080 && !contains_storage_order_barrier_p (vr->operands)
2081 && gimple_assign_single_p (def_stmt)
2082 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
2083 /* native_encode and native_decode operate on arrays of bytes
2084 and so fundamentally need a compile-time size and offset. */
2085 && maxsize.is_constant (&maxsizei)
2086 && maxsizei % BITS_PER_UNIT == 0
2087 && offset.is_constant (&offseti)
2088 && offseti % BITS_PER_UNIT == 0
2089 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
2090 || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
2091 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
2093 tree base2;
2094 HOST_WIDE_INT offset2, size2;
2095 bool reverse;
2096 base2 = get_ref_base_and_extent_hwi (gimple_assign_lhs (def_stmt),
2097 &offset2, &size2, &reverse);
2098 if (base2
2099 && !reverse
2100 && size2 % BITS_PER_UNIT == 0
2101 && offset2 % BITS_PER_UNIT == 0
2102 && operand_equal_p (base, base2, 0)
2103 && known_subrange_p (offseti, maxsizei, offset2, size2))
2105 /* We support up to 512-bit values (for V8DFmode). */
2106 unsigned char buffer[64];
2107 int len;
2109 tree rhs = gimple_assign_rhs1 (def_stmt);
2110 if (TREE_CODE (rhs) == SSA_NAME)
2111 rhs = SSA_VAL (rhs);
2112 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
2113 buffer, sizeof (buffer),
2114 (offseti - offset2) / BITS_PER_UNIT);
2115 if (len > 0 && len * BITS_PER_UNIT >= maxsizei)
2117 tree type = vr->type;
2118 /* Make sure to interpret in a type that has a range
2119 covering the whole access size. */
2120 if (INTEGRAL_TYPE_P (vr->type)
2121 && maxsizei != TYPE_PRECISION (vr->type))
2122 type = build_nonstandard_integer_type (maxsizei,
2123 TYPE_UNSIGNED (type));
2124 tree val = native_interpret_expr (type, buffer,
2125 maxsizei / BITS_PER_UNIT);
2126 /* If we chop off bits because the types precision doesn't
2127 match the memory access size this is ok when optimizing
2128 reads but not when called from the DSE code during
2129 elimination. */
2130 if (val
2131 && type != vr->type)
2133 if (! int_fits_type_p (val, vr->type))
2134 val = NULL_TREE;
2135 else
2136 val = fold_convert (vr->type, val);
2139 if (val)
2140 return vn_reference_lookup_or_insert_for_pieces
2141 (vuse, vr->set, vr->type, vr->operands, val);
2146 /* 4) Assignment from an SSA name which definition we may be able
2147 to access pieces from. */
2148 else if (known_eq (ref->size, maxsize)
2149 && is_gimple_reg_type (vr->type)
2150 && !contains_storage_order_barrier_p (vr->operands)
2151 && gimple_assign_single_p (def_stmt)
2152 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
2154 tree base2;
2155 poly_int64 offset2, size2, maxsize2;
2156 bool reverse;
2157 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2158 &offset2, &size2, &maxsize2,
2159 &reverse);
2160 if (!reverse
2161 && known_size_p (maxsize2)
2162 && known_eq (maxsize2, size2)
2163 && operand_equal_p (base, base2, 0)
2164 && known_subrange_p (offset, maxsize, offset2, size2)
2165 /* ??? We can't handle bitfield precision extracts without
2166 either using an alternate type for the BIT_FIELD_REF and
2167 then doing a conversion or possibly adjusting the offset
2168 according to endianness. */
2169 && (! INTEGRAL_TYPE_P (vr->type)
2170 || known_eq (ref->size, TYPE_PRECISION (vr->type)))
2171 && multiple_p (ref->size, BITS_PER_UNIT))
2173 gimple_match_op op (BIT_FIELD_REF, vr->type,
2174 SSA_VAL (gimple_assign_rhs1 (def_stmt)),
2175 bitsize_int (ref->size),
2176 bitsize_int (offset - offset2));
2177 tree val = vn_nary_build_or_lookup (&op);
2178 if (val
2179 && (TREE_CODE (val) != SSA_NAME
2180 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2182 vn_reference_t res = vn_reference_lookup_or_insert_for_pieces
2183 (vuse, vr->set, vr->type, vr->operands, val);
2184 return res;
2189 /* 5) For aggregate copies translate the reference through them if
2190 the copy kills ref. */
2191 else if (vn_walk_kind == VN_WALKREWRITE
2192 && gimple_assign_single_p (def_stmt)
2193 && (DECL_P (gimple_assign_rhs1 (def_stmt))
2194 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
2195 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
2197 tree base2;
2198 int i, j, k;
2199 auto_vec<vn_reference_op_s> rhs;
2200 vn_reference_op_t vro;
2201 ao_ref r;
2203 if (!lhs_ref_ok)
2204 return (void *)-1;
2206 /* See if the assignment kills REF. */
2207 base2 = ao_ref_base (&lhs_ref);
2208 if (!lhs_ref.max_size_known_p ()
2209 || (base != base2
2210 && (TREE_CODE (base) != MEM_REF
2211 || TREE_CODE (base2) != MEM_REF
2212 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
2213 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
2214 TREE_OPERAND (base2, 1))))
2215 || !stmt_kills_ref_p (def_stmt, ref))
2216 return (void *)-1;
2218 /* Find the common base of ref and the lhs. lhs_ops already
2219 contains valueized operands for the lhs. */
2220 i = vr->operands.length () - 1;
2221 j = lhs_ops.length () - 1;
2222 while (j >= 0 && i >= 0
2223 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
2225 i--;
2226 j--;
2229 /* ??? The innermost op should always be a MEM_REF and we already
2230 checked that the assignment to the lhs kills vr. Thus for
2231 aggregate copies using char[] types the vn_reference_op_eq
2232 may fail when comparing types for compatibility. But we really
2233 don't care here - further lookups with the rewritten operands
2234 will simply fail if we messed up types too badly. */
2235 poly_int64 extra_off = 0;
2236 if (j == 0 && i >= 0
2237 && lhs_ops[0].opcode == MEM_REF
2238 && maybe_ne (lhs_ops[0].off, -1))
2240 if (known_eq (lhs_ops[0].off, vr->operands[i].off))
2241 i--, j--;
2242 else if (vr->operands[i].opcode == MEM_REF
2243 && maybe_ne (vr->operands[i].off, -1))
2245 extra_off = vr->operands[i].off - lhs_ops[0].off;
2246 i--, j--;
2250 /* i now points to the first additional op.
2251 ??? LHS may not be completely contained in VR, one or more
2252 VIEW_CONVERT_EXPRs could be in its way. We could at least
2253 try handling outermost VIEW_CONVERT_EXPRs. */
2254 if (j != -1)
2255 return (void *)-1;
2257 /* Punt if the additional ops contain a storage order barrier. */
2258 for (k = i; k >= 0; k--)
2260 vro = &vr->operands[k];
2261 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
2262 return (void *)-1;
2265 /* Now re-write REF to be based on the rhs of the assignment. */
2266 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
2268 /* Apply an extra offset to the inner MEM_REF of the RHS. */
2269 if (maybe_ne (extra_off, 0))
2271 if (rhs.length () < 2)
2272 return (void *)-1;
2273 int ix = rhs.length () - 2;
2274 if (rhs[ix].opcode != MEM_REF
2275 || known_eq (rhs[ix].off, -1))
2276 return (void *)-1;
2277 rhs[ix].off += extra_off;
2278 rhs[ix].op0 = int_const_binop (PLUS_EXPR, rhs[ix].op0,
2279 build_int_cst (TREE_TYPE (rhs[ix].op0),
2280 extra_off));
2283 /* We need to pre-pend vr->operands[0..i] to rhs. */
2284 vec<vn_reference_op_s> old = vr->operands;
2285 if (i + 1 + rhs.length () > vr->operands.length ())
2286 vr->operands.safe_grow (i + 1 + rhs.length ());
2287 else
2288 vr->operands.truncate (i + 1 + rhs.length ());
2289 FOR_EACH_VEC_ELT (rhs, j, vro)
2290 vr->operands[i + 1 + j] = *vro;
2291 vr->operands = valueize_refs (vr->operands);
2292 if (old == shared_lookup_references)
2293 shared_lookup_references = vr->operands;
2294 vr->hashcode = vn_reference_compute_hash (vr);
2296 /* Try folding the new reference to a constant. */
2297 tree val = fully_constant_vn_reference_p (vr);
2298 if (val)
2299 return vn_reference_lookup_or_insert_for_pieces
2300 (vuse, vr->set, vr->type, vr->operands, val);
2302 /* Adjust *ref from the new operands. */
2303 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2304 return (void *)-1;
2305 /* This can happen with bitfields. */
2306 if (maybe_ne (ref->size, r.size))
2307 return (void *)-1;
2308 *ref = r;
2310 /* Do not update last seen VUSE after translating. */
2311 last_vuse_ptr = NULL;
2313 /* Keep looking for the adjusted *REF / VR pair. */
2314 return NULL;
2317 /* 6) For memcpy copies translate the reference through them if
2318 the copy kills ref. */
2319 else if (vn_walk_kind == VN_WALKREWRITE
2320 && is_gimple_reg_type (vr->type)
2321 /* ??? Handle BCOPY as well. */
2322 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2323 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2324 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2325 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2326 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2327 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2328 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2329 && poly_int_tree_p (gimple_call_arg (def_stmt, 2), &copy_size))
2331 tree lhs, rhs;
2332 ao_ref r;
2333 poly_int64 rhs_offset, lhs_offset;
2334 vn_reference_op_s op;
2335 poly_uint64 mem_offset;
2336 poly_int64 at, byte_maxsize;
2338 /* Only handle non-variable, addressable refs. */
2339 if (maybe_ne (ref->size, maxsize)
2340 || !multiple_p (offset, BITS_PER_UNIT, &at)
2341 || !multiple_p (maxsize, BITS_PER_UNIT, &byte_maxsize))
2342 return (void *)-1;
2344 /* Extract a pointer base and an offset for the destination. */
2345 lhs = gimple_call_arg (def_stmt, 0);
2346 lhs_offset = 0;
2347 if (TREE_CODE (lhs) == SSA_NAME)
2349 lhs = SSA_VAL (lhs);
2350 if (TREE_CODE (lhs) == SSA_NAME)
2352 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
2353 if (gimple_assign_single_p (def_stmt)
2354 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2355 lhs = gimple_assign_rhs1 (def_stmt);
2358 if (TREE_CODE (lhs) == ADDR_EXPR)
2360 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2361 &lhs_offset);
2362 if (!tem)
2363 return (void *)-1;
2364 if (TREE_CODE (tem) == MEM_REF
2365 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
2367 lhs = TREE_OPERAND (tem, 0);
2368 if (TREE_CODE (lhs) == SSA_NAME)
2369 lhs = SSA_VAL (lhs);
2370 lhs_offset += mem_offset;
2372 else if (DECL_P (tem))
2373 lhs = build_fold_addr_expr (tem);
2374 else
2375 return (void *)-1;
2377 if (TREE_CODE (lhs) != SSA_NAME
2378 && TREE_CODE (lhs) != ADDR_EXPR)
2379 return (void *)-1;
2381 /* Extract a pointer base and an offset for the source. */
2382 rhs = gimple_call_arg (def_stmt, 1);
2383 rhs_offset = 0;
2384 if (TREE_CODE (rhs) == SSA_NAME)
2385 rhs = SSA_VAL (rhs);
2386 if (TREE_CODE (rhs) == ADDR_EXPR)
2388 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2389 &rhs_offset);
2390 if (!tem)
2391 return (void *)-1;
2392 if (TREE_CODE (tem) == MEM_REF
2393 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
2395 rhs = TREE_OPERAND (tem, 0);
2396 rhs_offset += mem_offset;
2398 else if (DECL_P (tem)
2399 || TREE_CODE (tem) == STRING_CST)
2400 rhs = build_fold_addr_expr (tem);
2401 else
2402 return (void *)-1;
2404 if (TREE_CODE (rhs) != SSA_NAME
2405 && TREE_CODE (rhs) != ADDR_EXPR)
2406 return (void *)-1;
2408 /* The bases of the destination and the references have to agree. */
2409 if (TREE_CODE (base) == MEM_REF)
2411 if (TREE_OPERAND (base, 0) != lhs
2412 || !poly_int_tree_p (TREE_OPERAND (base, 1), &mem_offset))
2413 return (void *) -1;
2414 at += mem_offset;
2416 else if (!DECL_P (base)
2417 || TREE_CODE (lhs) != ADDR_EXPR
2418 || TREE_OPERAND (lhs, 0) != base)
2419 return (void *)-1;
2421 /* If the access is completely outside of the memcpy destination
2422 area there is no aliasing. */
2423 if (!ranges_maybe_overlap_p (lhs_offset, copy_size, at, byte_maxsize))
2424 return NULL;
2425 /* And the access has to be contained within the memcpy destination. */
2426 if (!known_subrange_p (at, byte_maxsize, lhs_offset, copy_size))
2427 return (void *)-1;
2429 /* Make room for 2 operands in the new reference. */
2430 if (vr->operands.length () < 2)
2432 vec<vn_reference_op_s> old = vr->operands;
2433 vr->operands.safe_grow_cleared (2);
2434 if (old == shared_lookup_references)
2435 shared_lookup_references = vr->operands;
2437 else
2438 vr->operands.truncate (2);
2440 /* The looked-through reference is a simple MEM_REF. */
2441 memset (&op, 0, sizeof (op));
2442 op.type = vr->type;
2443 op.opcode = MEM_REF;
2444 op.op0 = build_int_cst (ptr_type_node, at - lhs_offset + rhs_offset);
2445 op.off = at - lhs_offset + rhs_offset;
2446 vr->operands[0] = op;
2447 op.type = TREE_TYPE (rhs);
2448 op.opcode = TREE_CODE (rhs);
2449 op.op0 = rhs;
2450 op.off = -1;
2451 vr->operands[1] = op;
2452 vr->hashcode = vn_reference_compute_hash (vr);
2454 /* Try folding the new reference to a constant. */
2455 tree val = fully_constant_vn_reference_p (vr);
2456 if (val)
2457 return vn_reference_lookup_or_insert_for_pieces
2458 (vuse, vr->set, vr->type, vr->operands, val);
2460 /* Adjust *ref from the new operands. */
2461 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2462 return (void *)-1;
2463 /* This can happen with bitfields. */
2464 if (maybe_ne (ref->size, r.size))
2465 return (void *)-1;
2466 *ref = r;
2468 /* Do not update last seen VUSE after translating. */
2469 last_vuse_ptr = NULL;
2471 /* Keep looking for the adjusted *REF / VR pair. */
2472 return NULL;
2475 /* Bail out and stop walking. */
2476 return (void *)-1;
2479 /* Return a reference op vector from OP that can be used for
2480 vn_reference_lookup_pieces. The caller is responsible for releasing
2481 the vector. */
2483 vec<vn_reference_op_s>
2484 vn_reference_operands_for_lookup (tree op)
2486 bool valueized;
2487 return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
2490 /* Lookup a reference operation by it's parts, in the current hash table.
2491 Returns the resulting value number if it exists in the hash table,
2492 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2493 vn_reference_t stored in the hashtable if something is found. */
2495 tree
2496 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2497 vec<vn_reference_op_s> operands,
2498 vn_reference_t *vnresult, vn_lookup_kind kind)
2500 struct vn_reference_s vr1;
2501 vn_reference_t tmp;
2502 tree cst;
2504 if (!vnresult)
2505 vnresult = &tmp;
2506 *vnresult = NULL;
2508 vr1.vuse = vuse_ssa_val (vuse);
2509 shared_lookup_references.truncate (0);
2510 shared_lookup_references.safe_grow (operands.length ());
2511 memcpy (shared_lookup_references.address (),
2512 operands.address (),
2513 sizeof (vn_reference_op_s)
2514 * operands.length ());
2515 vr1.operands = operands = shared_lookup_references
2516 = valueize_refs (shared_lookup_references);
2517 vr1.type = type;
2518 vr1.set = set;
2519 vr1.hashcode = vn_reference_compute_hash (&vr1);
2520 if ((cst = fully_constant_vn_reference_p (&vr1)))
2521 return cst;
2523 vn_reference_lookup_1 (&vr1, vnresult);
2524 if (!*vnresult
2525 && kind != VN_NOWALK
2526 && vr1.vuse)
2528 ao_ref r;
2529 vn_walk_kind = kind;
2530 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2531 *vnresult =
2532 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2533 vn_reference_lookup_2,
2534 vn_reference_lookup_3,
2535 vuse_ssa_val, &vr1);
2536 gcc_checking_assert (vr1.operands == shared_lookup_references);
2539 if (*vnresult)
2540 return (*vnresult)->result;
2542 return NULL_TREE;
2545 /* Lookup OP in the current hash table, and return the resulting value
2546 number if it exists in the hash table. Return NULL_TREE if it does
2547 not exist in the hash table or if the result field of the structure
2548 was NULL.. VNRESULT will be filled in with the vn_reference_t
2549 stored in the hashtable if one exists. When TBAA_P is false assume
2550 we are looking up a store and treat it as having alias-set zero. */
2552 tree
2553 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2554 vn_reference_t *vnresult, bool tbaa_p)
2556 vec<vn_reference_op_s> operands;
2557 struct vn_reference_s vr1;
2558 tree cst;
2559 bool valuezied_anything;
2561 if (vnresult)
2562 *vnresult = NULL;
2564 vr1.vuse = vuse_ssa_val (vuse);
2565 vr1.operands = operands
2566 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2567 vr1.type = TREE_TYPE (op);
2568 vr1.set = tbaa_p ? get_alias_set (op) : 0;
2569 vr1.hashcode = vn_reference_compute_hash (&vr1);
2570 if ((cst = fully_constant_vn_reference_p (&vr1)))
2571 return cst;
2573 if (kind != VN_NOWALK
2574 && vr1.vuse)
2576 vn_reference_t wvnresult;
2577 ao_ref r;
2578 /* Make sure to use a valueized reference if we valueized anything.
2579 Otherwise preserve the full reference for advanced TBAA. */
2580 if (!valuezied_anything
2581 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2582 vr1.operands))
2583 ao_ref_init (&r, op);
2584 if (! tbaa_p)
2585 r.ref_alias_set = r.base_alias_set = 0;
2586 vn_walk_kind = kind;
2587 wvnresult =
2588 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2589 vn_reference_lookup_2,
2590 vn_reference_lookup_3,
2591 vuse_ssa_val, &vr1);
2592 gcc_checking_assert (vr1.operands == shared_lookup_references);
2593 if (wvnresult)
2595 if (vnresult)
2596 *vnresult = wvnresult;
2597 return wvnresult->result;
2600 return NULL_TREE;
2603 return vn_reference_lookup_1 (&vr1, vnresult);
2606 /* Lookup CALL in the current hash table and return the entry in
2607 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2609 void
2610 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2611 vn_reference_t vr)
2613 if (vnresult)
2614 *vnresult = NULL;
2616 tree vuse = gimple_vuse (call);
2618 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2619 vr->operands = valueize_shared_reference_ops_from_call (call);
2620 vr->type = gimple_expr_type (call);
2621 vr->set = 0;
2622 vr->hashcode = vn_reference_compute_hash (vr);
2623 vn_reference_lookup_1 (vr, vnresult);
2626 /* Insert OP into the current hash table with a value number of
2627 RESULT, and return the resulting reference structure we created. */
2629 static vn_reference_t
2630 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2632 vn_reference_s **slot;
2633 vn_reference_t vr1;
2634 bool tem;
2636 vr1 = current_info->references_pool->allocate ();
2637 if (TREE_CODE (result) == SSA_NAME)
2638 vr1->value_id = VN_INFO (result)->value_id;
2639 else
2640 vr1->value_id = get_or_alloc_constant_value_id (result);
2641 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2642 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2643 vr1->type = TREE_TYPE (op);
2644 vr1->set = get_alias_set (op);
2645 vr1->hashcode = vn_reference_compute_hash (vr1);
2646 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2647 vr1->result_vdef = vdef;
2649 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2650 INSERT);
2652 /* Because we lookup stores using vuses, and value number failures
2653 using the vdefs (see visit_reference_op_store for how and why),
2654 it's possible that on failure we may try to insert an already
2655 inserted store. This is not wrong, there is no ssa name for a
2656 store that we could use as a differentiator anyway. Thus, unlike
2657 the other lookup functions, you cannot gcc_assert (!*slot)
2658 here. */
2660 /* But free the old slot in case of a collision. */
2661 if (*slot)
2662 free_reference (*slot);
2664 *slot = vr1;
2665 return vr1;
2668 /* Insert a reference by it's pieces into the current hash table with
2669 a value number of RESULT. Return the resulting reference
2670 structure we created. */
2672 vn_reference_t
2673 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2674 vec<vn_reference_op_s> operands,
2675 tree result, unsigned int value_id)
2678 vn_reference_s **slot;
2679 vn_reference_t vr1;
2681 vr1 = current_info->references_pool->allocate ();
2682 vr1->value_id = value_id;
2683 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2684 vr1->operands = valueize_refs (operands);
2685 vr1->type = type;
2686 vr1->set = set;
2687 vr1->hashcode = vn_reference_compute_hash (vr1);
2688 if (result && TREE_CODE (result) == SSA_NAME)
2689 result = SSA_VAL (result);
2690 vr1->result = result;
2692 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2693 INSERT);
2695 /* At this point we should have all the things inserted that we have
2696 seen before, and we should never try inserting something that
2697 already exists. */
2698 gcc_assert (!*slot);
2699 if (*slot)
2700 free_reference (*slot);
2702 *slot = vr1;
2703 return vr1;
2706 /* Compute and return the hash value for nary operation VBO1. */
2708 static hashval_t
2709 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2711 inchash::hash hstate;
2712 unsigned i;
2714 for (i = 0; i < vno1->length; ++i)
2715 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2716 vno1->op[i] = SSA_VAL (vno1->op[i]);
2718 if (((vno1->length == 2
2719 && commutative_tree_code (vno1->opcode))
2720 || (vno1->length == 3
2721 && commutative_ternary_tree_code (vno1->opcode)))
2722 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2723 std::swap (vno1->op[0], vno1->op[1]);
2724 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
2725 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2727 std::swap (vno1->op[0], vno1->op[1]);
2728 vno1->opcode = swap_tree_comparison (vno1->opcode);
2731 hstate.add_int (vno1->opcode);
2732 for (i = 0; i < vno1->length; ++i)
2733 inchash::add_expr (vno1->op[i], hstate);
2735 return hstate.end ();
2738 /* Compare nary operations VNO1 and VNO2 and return true if they are
2739 equivalent. */
2741 bool
2742 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2744 unsigned i;
2746 if (vno1->hashcode != vno2->hashcode)
2747 return false;
2749 if (vno1->length != vno2->length)
2750 return false;
2752 if (vno1->opcode != vno2->opcode
2753 || !types_compatible_p (vno1->type, vno2->type))
2754 return false;
2756 for (i = 0; i < vno1->length; ++i)
2757 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2758 return false;
2760 /* BIT_INSERT_EXPR has an implict operand as the type precision
2761 of op1. Need to check to make sure they are the same. */
2762 if (vno1->opcode == BIT_INSERT_EXPR
2763 && TREE_CODE (vno1->op[1]) == INTEGER_CST
2764 && TYPE_PRECISION (TREE_TYPE (vno1->op[1]))
2765 != TYPE_PRECISION (TREE_TYPE (vno2->op[1])))
2766 return false;
2768 return true;
2771 /* Initialize VNO from the pieces provided. */
2773 static void
2774 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2775 enum tree_code code, tree type, tree *ops)
2777 vno->opcode = code;
2778 vno->length = length;
2779 vno->type = type;
2780 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2783 /* Initialize VNO from OP. */
2785 static void
2786 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2788 unsigned i;
2790 vno->opcode = TREE_CODE (op);
2791 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2792 vno->type = TREE_TYPE (op);
2793 for (i = 0; i < vno->length; ++i)
2794 vno->op[i] = TREE_OPERAND (op, i);
2797 /* Return the number of operands for a vn_nary ops structure from STMT. */
2799 static unsigned int
2800 vn_nary_length_from_stmt (gimple *stmt)
2802 switch (gimple_assign_rhs_code (stmt))
2804 case REALPART_EXPR:
2805 case IMAGPART_EXPR:
2806 case VIEW_CONVERT_EXPR:
2807 return 1;
2809 case BIT_FIELD_REF:
2810 return 3;
2812 case CONSTRUCTOR:
2813 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2815 default:
2816 return gimple_num_ops (stmt) - 1;
2820 /* Initialize VNO from STMT. */
2822 static void
2823 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
2825 unsigned i;
2827 vno->opcode = gimple_assign_rhs_code (stmt);
2828 vno->type = gimple_expr_type (stmt);
2829 switch (vno->opcode)
2831 case REALPART_EXPR:
2832 case IMAGPART_EXPR:
2833 case VIEW_CONVERT_EXPR:
2834 vno->length = 1;
2835 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2836 break;
2838 case BIT_FIELD_REF:
2839 vno->length = 3;
2840 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2841 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2842 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2843 break;
2845 case CONSTRUCTOR:
2846 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2847 for (i = 0; i < vno->length; ++i)
2848 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2849 break;
2851 default:
2852 gcc_checking_assert (!gimple_assign_single_p (stmt));
2853 vno->length = gimple_num_ops (stmt) - 1;
2854 for (i = 0; i < vno->length; ++i)
2855 vno->op[i] = gimple_op (stmt, i + 1);
2859 /* Compute the hashcode for VNO and look for it in the hash table;
2860 return the resulting value number if it exists in the hash table.
2861 Return NULL_TREE if it does not exist in the hash table or if the
2862 result field of the operation is NULL. VNRESULT will contain the
2863 vn_nary_op_t from the hashtable if it exists. */
2865 static tree
2866 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2868 vn_nary_op_s **slot;
2870 if (vnresult)
2871 *vnresult = NULL;
2873 vno->hashcode = vn_nary_op_compute_hash (vno);
2874 slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2875 NO_INSERT);
2876 if (!slot && current_info == optimistic_info)
2877 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2878 NO_INSERT);
2879 if (!slot)
2880 return NULL_TREE;
2881 if (vnresult)
2882 *vnresult = *slot;
2883 return (*slot)->result;
2886 /* Lookup a n-ary operation by its pieces and return the resulting value
2887 number if it exists in the hash table. Return NULL_TREE if it does
2888 not exist in the hash table or if the result field of the operation
2889 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2890 if it exists. */
2892 tree
2893 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2894 tree type, tree *ops, vn_nary_op_t *vnresult)
2896 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2897 sizeof_vn_nary_op (length));
2898 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2899 return vn_nary_op_lookup_1 (vno1, vnresult);
2902 /* Lookup OP in the current hash table, and return the resulting value
2903 number if it exists in the hash table. Return NULL_TREE if it does
2904 not exist in the hash table or if the result field of the operation
2905 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2906 if it exists. */
2908 tree
2909 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2911 vn_nary_op_t vno1
2912 = XALLOCAVAR (struct vn_nary_op_s,
2913 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2914 init_vn_nary_op_from_op (vno1, op);
2915 return vn_nary_op_lookup_1 (vno1, vnresult);
2918 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2919 value number if it exists in the hash table. Return NULL_TREE if
2920 it does not exist in the hash table. VNRESULT will contain the
2921 vn_nary_op_t from the hashtable if it exists. */
2923 tree
2924 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
2926 vn_nary_op_t vno1
2927 = XALLOCAVAR (struct vn_nary_op_s,
2928 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2929 init_vn_nary_op_from_stmt (vno1, stmt);
2930 return vn_nary_op_lookup_1 (vno1, vnresult);
2933 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2935 static vn_nary_op_t
2936 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2938 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2941 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2942 obstack. */
2944 static vn_nary_op_t
2945 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2947 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2948 &current_info->nary_obstack);
2950 vno1->value_id = value_id;
2951 vno1->length = length;
2952 vno1->result = result;
2954 return vno1;
2957 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2958 VNO->HASHCODE first. */
2960 static vn_nary_op_t
2961 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
2962 bool compute_hash)
2964 vn_nary_op_s **slot;
2966 if (compute_hash)
2967 vno->hashcode = vn_nary_op_compute_hash (vno);
2969 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
2970 /* While we do not want to insert things twice it's awkward to
2971 avoid it in the case where visit_nary_op pattern-matches stuff
2972 and ends up simplifying the replacement to itself. We then
2973 get two inserts, one from visit_nary_op and one from
2974 vn_nary_build_or_lookup.
2975 So allow inserts with the same value number. */
2976 if (*slot && (*slot)->result == vno->result)
2977 return *slot;
2979 gcc_assert (!*slot);
2981 *slot = vno;
2982 return vno;
2985 /* Insert a n-ary operation into the current hash table using it's
2986 pieces. Return the vn_nary_op_t structure we created and put in
2987 the hashtable. */
2989 vn_nary_op_t
2990 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2991 tree type, tree *ops,
2992 tree result, unsigned int value_id)
2994 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2995 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2996 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2999 /* Insert OP into the current hash table with a value number of
3000 RESULT. Return the vn_nary_op_t structure we created and put in
3001 the hashtable. */
3003 vn_nary_op_t
3004 vn_nary_op_insert (tree op, tree result)
3006 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
3007 vn_nary_op_t vno1;
3009 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
3010 init_vn_nary_op_from_op (vno1, op);
3011 return vn_nary_op_insert_into (vno1, current_info->nary, true);
3014 /* Insert the rhs of STMT into the current hash table with a value number of
3015 RESULT. */
3017 static vn_nary_op_t
3018 vn_nary_op_insert_stmt (gimple *stmt, tree result)
3020 vn_nary_op_t vno1
3021 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
3022 result, VN_INFO (result)->value_id);
3023 init_vn_nary_op_from_stmt (vno1, stmt);
3024 return vn_nary_op_insert_into (vno1, current_info->nary, true);
3027 /* Compute a hashcode for PHI operation VP1 and return it. */
3029 static inline hashval_t
3030 vn_phi_compute_hash (vn_phi_t vp1)
3032 inchash::hash hstate (vp1->phiargs.length () > 2
3033 ? vp1->block->index : vp1->phiargs.length ());
3034 tree phi1op;
3035 tree type;
3036 edge e;
3037 edge_iterator ei;
3039 /* If all PHI arguments are constants we need to distinguish
3040 the PHI node via its type. */
3041 type = vp1->type;
3042 hstate.merge_hash (vn_hash_type (type));
3044 FOR_EACH_EDGE (e, ei, vp1->block->preds)
3046 /* Don't hash backedge values they need to be handled as VN_TOP
3047 for optimistic value-numbering. */
3048 if (e->flags & EDGE_DFS_BACK)
3049 continue;
3051 phi1op = vp1->phiargs[e->dest_idx];
3052 if (phi1op == VN_TOP)
3053 continue;
3054 inchash::add_expr (phi1op, hstate);
3057 return hstate.end ();
3061 /* Return true if COND1 and COND2 represent the same condition, set
3062 *INVERTED_P if one needs to be inverted to make it the same as
3063 the other. */
3065 static bool
3066 cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
3067 gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
3069 enum tree_code code1 = gimple_cond_code (cond1);
3070 enum tree_code code2 = gimple_cond_code (cond2);
3072 *inverted_p = false;
3073 if (code1 == code2)
3075 else if (code1 == swap_tree_comparison (code2))
3076 std::swap (lhs2, rhs2);
3077 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
3078 *inverted_p = true;
3079 else if (code1 == invert_tree_comparison
3080 (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
3082 std::swap (lhs2, rhs2);
3083 *inverted_p = true;
3085 else
3086 return false;
3088 return ((expressions_equal_p (lhs1, lhs2)
3089 && expressions_equal_p (rhs1, rhs2))
3090 || (commutative_tree_code (code1)
3091 && expressions_equal_p (lhs1, rhs2)
3092 && expressions_equal_p (rhs1, lhs2)));
3095 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
3097 static int
3098 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
3100 if (vp1->hashcode != vp2->hashcode)
3101 return false;
3103 if (vp1->block != vp2->block)
3105 if (vp1->phiargs.length () != vp2->phiargs.length ())
3106 return false;
3108 switch (vp1->phiargs.length ())
3110 case 1:
3111 /* Single-arg PHIs are just copies. */
3112 break;
3114 case 2:
3116 /* Rule out backedges into the PHI. */
3117 if (vp1->block->loop_father->header == vp1->block
3118 || vp2->block->loop_father->header == vp2->block)
3119 return false;
3121 /* If the PHI nodes do not have compatible types
3122 they are not the same. */
3123 if (!types_compatible_p (vp1->type, vp2->type))
3124 return false;
3126 basic_block idom1
3127 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3128 basic_block idom2
3129 = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
3130 /* If the immediate dominator end in switch stmts multiple
3131 values may end up in the same PHI arg via intermediate
3132 CFG merges. */
3133 if (EDGE_COUNT (idom1->succs) != 2
3134 || EDGE_COUNT (idom2->succs) != 2)
3135 return false;
3137 /* Verify the controlling stmt is the same. */
3138 gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1));
3139 gcond *last2 = safe_dyn_cast <gcond *> (last_stmt (idom2));
3140 if (! last1 || ! last2)
3141 return false;
3142 bool inverted_p;
3143 if (! cond_stmts_equal_p (last1, vp1->cclhs, vp1->ccrhs,
3144 last2, vp2->cclhs, vp2->ccrhs,
3145 &inverted_p))
3146 return false;
3148 /* Get at true/false controlled edges into the PHI. */
3149 edge te1, te2, fe1, fe2;
3150 if (! extract_true_false_controlled_edges (idom1, vp1->block,
3151 &te1, &fe1)
3152 || ! extract_true_false_controlled_edges (idom2, vp2->block,
3153 &te2, &fe2))
3154 return false;
3156 /* Swap edges if the second condition is the inverted of the
3157 first. */
3158 if (inverted_p)
3159 std::swap (te2, fe2);
3161 /* ??? Handle VN_TOP specially. */
3162 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
3163 vp2->phiargs[te2->dest_idx])
3164 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
3165 vp2->phiargs[fe2->dest_idx]))
3166 return false;
3168 return true;
3171 default:
3172 return false;
3176 /* If the PHI nodes do not have compatible types
3177 they are not the same. */
3178 if (!types_compatible_p (vp1->type, vp2->type))
3179 return false;
3181 /* Any phi in the same block will have it's arguments in the
3182 same edge order, because of how we store phi nodes. */
3183 int i;
3184 tree phi1op;
3185 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
3187 tree phi2op = vp2->phiargs[i];
3188 if (phi1op == VN_TOP || phi2op == VN_TOP)
3189 continue;
3190 if (!expressions_equal_p (phi1op, phi2op))
3191 return false;
3194 return true;
3197 static vec<tree> shared_lookup_phiargs;
3199 /* Lookup PHI in the current hash table, and return the resulting
3200 value number if it exists in the hash table. Return NULL_TREE if
3201 it does not exist in the hash table. */
3203 static tree
3204 vn_phi_lookup (gimple *phi)
3206 vn_phi_s **slot;
3207 struct vn_phi_s vp1;
3208 edge e;
3209 edge_iterator ei;
3211 shared_lookup_phiargs.truncate (0);
3212 shared_lookup_phiargs.safe_grow (gimple_phi_num_args (phi));
3214 /* Canonicalize the SSA_NAME's to their value number. */
3215 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3217 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3218 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3219 shared_lookup_phiargs[e->dest_idx] = def;
3221 vp1.type = TREE_TYPE (gimple_phi_result (phi));
3222 vp1.phiargs = shared_lookup_phiargs;
3223 vp1.block = gimple_bb (phi);
3224 /* Extract values of the controlling condition. */
3225 vp1.cclhs = NULL_TREE;
3226 vp1.ccrhs = NULL_TREE;
3227 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1.block);
3228 if (EDGE_COUNT (idom1->succs) == 2)
3229 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3231 vp1.cclhs = vn_valueize (gimple_cond_lhs (last1));
3232 vp1.ccrhs = vn_valueize (gimple_cond_rhs (last1));
3234 vp1.hashcode = vn_phi_compute_hash (&vp1);
3235 slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3236 NO_INSERT);
3237 if (!slot && current_info == optimistic_info)
3238 slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3239 NO_INSERT);
3240 if (!slot)
3241 return NULL_TREE;
3242 return (*slot)->result;
3245 /* Insert PHI into the current hash table with a value number of
3246 RESULT. */
3248 static vn_phi_t
3249 vn_phi_insert (gimple *phi, tree result)
3251 vn_phi_s **slot;
3252 vn_phi_t vp1 = current_info->phis_pool->allocate ();
3253 vec<tree> args = vNULL;
3254 edge e;
3255 edge_iterator ei;
3257 args.safe_grow (gimple_phi_num_args (phi));
3259 /* Canonicalize the SSA_NAME's to their value number. */
3260 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3262 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3263 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3264 args[e->dest_idx] = def;
3266 vp1->value_id = VN_INFO (result)->value_id;
3267 vp1->type = TREE_TYPE (gimple_phi_result (phi));
3268 vp1->phiargs = args;
3269 vp1->block = gimple_bb (phi);
3270 /* Extract values of the controlling condition. */
3271 vp1->cclhs = NULL_TREE;
3272 vp1->ccrhs = NULL_TREE;
3273 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3274 if (EDGE_COUNT (idom1->succs) == 2)
3275 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3277 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
3278 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
3280 vp1->result = result;
3281 vp1->hashcode = vn_phi_compute_hash (vp1);
3283 slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
3285 /* Because we iterate over phi operations more than once, it's
3286 possible the slot might already exist here, hence no assert.*/
3287 *slot = vp1;
3288 return vp1;
3292 /* Print set of components in strongly connected component SCC to OUT. */
3294 static void
3295 print_scc (FILE *out, vec<tree> scc)
3297 tree var;
3298 unsigned int i;
3300 fprintf (out, "SCC consists of %u:", scc.length ());
3301 FOR_EACH_VEC_ELT (scc, i, var)
3303 fprintf (out, " ");
3304 print_generic_expr (out, var);
3306 fprintf (out, "\n");
3309 /* Return true if BB1 is dominated by BB2 taking into account edges
3310 that are not executable. */
3312 static bool
3313 dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
3315 edge_iterator ei;
3316 edge e;
3318 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3319 return true;
3321 /* Before iterating we'd like to know if there exists a
3322 (executable) path from bb2 to bb1 at all, if not we can
3323 directly return false. For now simply iterate once. */
3325 /* Iterate to the single executable bb1 predecessor. */
3326 if (EDGE_COUNT (bb1->preds) > 1)
3328 edge prede = NULL;
3329 FOR_EACH_EDGE (e, ei, bb1->preds)
3330 if (e->flags & EDGE_EXECUTABLE)
3332 if (prede)
3334 prede = NULL;
3335 break;
3337 prede = e;
3339 if (prede)
3341 bb1 = prede->src;
3343 /* Re-do the dominance check with changed bb1. */
3344 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3345 return true;
3349 /* Iterate to the single executable bb2 successor. */
3350 edge succe = NULL;
3351 FOR_EACH_EDGE (e, ei, bb2->succs)
3352 if (e->flags & EDGE_EXECUTABLE)
3354 if (succe)
3356 succe = NULL;
3357 break;
3359 succe = e;
3361 if (succe)
3363 /* Verify the reached block is only reached through succe.
3364 If there is only one edge we can spare us the dominator
3365 check and iterate directly. */
3366 if (EDGE_COUNT (succe->dest->preds) > 1)
3368 FOR_EACH_EDGE (e, ei, succe->dest->preds)
3369 if (e != succe
3370 && (e->flags & EDGE_EXECUTABLE))
3372 succe = NULL;
3373 break;
3376 if (succe)
3378 bb2 = succe->dest;
3380 /* Re-do the dominance check with changed bb2. */
3381 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3382 return true;
3386 /* We could now iterate updating bb1 / bb2. */
3387 return false;
3390 /* Set the value number of FROM to TO, return true if it has changed
3391 as a result. */
3393 static inline bool
3394 set_ssa_val_to (tree from, tree to)
3396 tree currval = SSA_VAL (from);
3397 poly_int64 toff, coff;
3399 /* The only thing we allow as value numbers are ssa_names
3400 and invariants. So assert that here. We don't allow VN_TOP
3401 as visiting a stmt should produce a value-number other than
3402 that.
3403 ??? Still VN_TOP can happen for unreachable code, so force
3404 it to varying in that case. Not all code is prepared to
3405 get VN_TOP on valueization. */
3406 if (to == VN_TOP)
3408 if (dump_file && (dump_flags & TDF_DETAILS))
3409 fprintf (dump_file, "Forcing value number to varying on "
3410 "receiving VN_TOP\n");
3411 to = from;
3414 gcc_assert (to != NULL_TREE
3415 && ((TREE_CODE (to) == SSA_NAME
3416 && (to == from || SSA_VAL (to) == to))
3417 || is_gimple_min_invariant (to)));
3419 if (from != to)
3421 if (currval == from)
3423 if (dump_file && (dump_flags & TDF_DETAILS))
3425 fprintf (dump_file, "Not changing value number of ");
3426 print_generic_expr (dump_file, from);
3427 fprintf (dump_file, " from VARYING to ");
3428 print_generic_expr (dump_file, to);
3429 fprintf (dump_file, "\n");
3431 return false;
3433 else if (currval != VN_TOP
3434 && ! is_gimple_min_invariant (currval)
3435 && is_gimple_min_invariant (to))
3437 if (dump_file && (dump_flags & TDF_DETAILS))
3439 fprintf (dump_file, "Forcing VARYING instead of changing "
3440 "value number of ");
3441 print_generic_expr (dump_file, from);
3442 fprintf (dump_file, " from ");
3443 print_generic_expr (dump_file, currval);
3444 fprintf (dump_file, " (non-constant) to ");
3445 print_generic_expr (dump_file, to);
3446 fprintf (dump_file, " (constant)\n");
3448 to = from;
3450 else if (TREE_CODE (to) == SSA_NAME
3451 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
3452 to = from;
3455 if (dump_file && (dump_flags & TDF_DETAILS))
3457 fprintf (dump_file, "Setting value number of ");
3458 print_generic_expr (dump_file, from);
3459 fprintf (dump_file, " to ");
3460 print_generic_expr (dump_file, to);
3463 if (currval != to
3464 && !operand_equal_p (currval, to, 0)
3465 /* Different undefined SSA names are not actually different. See
3466 PR82320 for a testcase were we'd otherwise not terminate iteration. */
3467 && !(TREE_CODE (currval) == SSA_NAME
3468 && TREE_CODE (to) == SSA_NAME
3469 && ssa_undefined_value_p (currval, false)
3470 && ssa_undefined_value_p (to, false))
3471 /* ??? For addresses involving volatile objects or types operand_equal_p
3472 does not reliably detect ADDR_EXPRs as equal. We know we are only
3473 getting invariant gimple addresses here, so can use
3474 get_addr_base_and_unit_offset to do this comparison. */
3475 && !(TREE_CODE (currval) == ADDR_EXPR
3476 && TREE_CODE (to) == ADDR_EXPR
3477 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
3478 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
3479 && known_eq (coff, toff)))
3481 if (dump_file && (dump_flags & TDF_DETAILS))
3482 fprintf (dump_file, " (changed)\n");
3484 /* If we equate two SSA names we have to make the side-band info
3485 of the leader conservative (and remember whatever original value
3486 was present). */
3487 if (TREE_CODE (to) == SSA_NAME)
3489 if (INTEGRAL_TYPE_P (TREE_TYPE (to))
3490 && SSA_NAME_RANGE_INFO (to))
3492 if (SSA_NAME_IS_DEFAULT_DEF (to)
3493 || dominated_by_p_w_unex
3494 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3495 gimple_bb (SSA_NAME_DEF_STMT (to))))
3496 /* Keep the info from the dominator. */
3498 else
3500 /* Save old info. */
3501 if (! VN_INFO (to)->info.range_info)
3503 VN_INFO (to)->info.range_info = SSA_NAME_RANGE_INFO (to);
3504 VN_INFO (to)->range_info_anti_range_p
3505 = SSA_NAME_ANTI_RANGE_P (to);
3507 /* Rather than allocating memory and unioning the info
3508 just clear it. */
3509 if (dump_file && (dump_flags & TDF_DETAILS))
3511 fprintf (dump_file, "clearing range info of ");
3512 print_generic_expr (dump_file, to);
3513 fprintf (dump_file, "\n");
3515 SSA_NAME_RANGE_INFO (to) = NULL;
3518 else if (POINTER_TYPE_P (TREE_TYPE (to))
3519 && SSA_NAME_PTR_INFO (to))
3521 if (SSA_NAME_IS_DEFAULT_DEF (to)
3522 || dominated_by_p_w_unex
3523 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3524 gimple_bb (SSA_NAME_DEF_STMT (to))))
3525 /* Keep the info from the dominator. */
3527 else if (! SSA_NAME_PTR_INFO (from)
3528 /* Handle the case of trivially equivalent info. */
3529 || memcmp (SSA_NAME_PTR_INFO (to),
3530 SSA_NAME_PTR_INFO (from),
3531 sizeof (ptr_info_def)) != 0)
3533 /* Save old info. */
3534 if (! VN_INFO (to)->info.ptr_info)
3535 VN_INFO (to)->info.ptr_info = SSA_NAME_PTR_INFO (to);
3536 /* Rather than allocating memory and unioning the info
3537 just clear it. */
3538 if (dump_file && (dump_flags & TDF_DETAILS))
3540 fprintf (dump_file, "clearing points-to info of ");
3541 print_generic_expr (dump_file, to);
3542 fprintf (dump_file, "\n");
3544 SSA_NAME_PTR_INFO (to) = NULL;
3549 VN_INFO (from)->valnum = to;
3550 return true;
3552 if (dump_file && (dump_flags & TDF_DETAILS))
3553 fprintf (dump_file, "\n");
3554 return false;
3557 /* Mark as processed all the definitions in the defining stmt of USE, or
3558 the USE itself. */
3560 static void
3561 mark_use_processed (tree use)
3563 ssa_op_iter iter;
3564 def_operand_p defp;
3565 gimple *stmt = SSA_NAME_DEF_STMT (use);
3567 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
3569 VN_INFO (use)->use_processed = true;
3570 return;
3573 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3575 tree def = DEF_FROM_PTR (defp);
3577 VN_INFO (def)->use_processed = true;
3581 /* Set all definitions in STMT to value number to themselves.
3582 Return true if a value number changed. */
3584 static bool
3585 defs_to_varying (gimple *stmt)
3587 bool changed = false;
3588 ssa_op_iter iter;
3589 def_operand_p defp;
3591 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3593 tree def = DEF_FROM_PTR (defp);
3594 changed |= set_ssa_val_to (def, def);
3596 return changed;
3599 /* Visit a copy between LHS and RHS, return true if the value number
3600 changed. */
3602 static bool
3603 visit_copy (tree lhs, tree rhs)
3605 /* Valueize. */
3606 rhs = SSA_VAL (rhs);
3608 return set_ssa_val_to (lhs, rhs);
3611 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
3612 is the same. */
3614 static tree
3615 valueized_wider_op (tree wide_type, tree op)
3617 if (TREE_CODE (op) == SSA_NAME)
3618 op = SSA_VAL (op);
3620 /* Either we have the op widened available. */
3621 tree ops[3] = {};
3622 ops[0] = op;
3623 tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
3624 wide_type, ops, NULL);
3625 if (tem)
3626 return tem;
3628 /* Or the op is truncated from some existing value. */
3629 if (TREE_CODE (op) == SSA_NAME)
3631 gimple *def = SSA_NAME_DEF_STMT (op);
3632 if (is_gimple_assign (def)
3633 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3635 tem = gimple_assign_rhs1 (def);
3636 if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)))
3638 if (TREE_CODE (tem) == SSA_NAME)
3639 tem = SSA_VAL (tem);
3640 return tem;
3645 /* For constants simply extend it. */
3646 if (TREE_CODE (op) == INTEGER_CST)
3647 return wide_int_to_tree (wide_type, wi::to_wide (op));
3649 return NULL_TREE;
3652 /* Visit a nary operator RHS, value number it, and return true if the
3653 value number of LHS has changed as a result. */
3655 static bool
3656 visit_nary_op (tree lhs, gassign *stmt)
3658 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
3659 if (result)
3660 return set_ssa_val_to (lhs, result);
3662 /* Do some special pattern matching for redundancies of operations
3663 in different types. */
3664 enum tree_code code = gimple_assign_rhs_code (stmt);
3665 tree type = TREE_TYPE (lhs);
3666 tree rhs1 = gimple_assign_rhs1 (stmt);
3667 switch (code)
3669 CASE_CONVERT:
3670 /* Match arithmetic done in a different type where we can easily
3671 substitute the result from some earlier sign-changed or widened
3672 operation. */
3673 if (INTEGRAL_TYPE_P (type)
3674 && TREE_CODE (rhs1) == SSA_NAME
3675 /* We only handle sign-changes or zero-extension -> & mask. */
3676 && ((TYPE_UNSIGNED (TREE_TYPE (rhs1))
3677 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs1)))
3678 || TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (rhs1))))
3680 gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
3681 if (def
3682 && (gimple_assign_rhs_code (def) == PLUS_EXPR
3683 || gimple_assign_rhs_code (def) == MINUS_EXPR
3684 || gimple_assign_rhs_code (def) == MULT_EXPR))
3686 tree ops[3] = {};
3687 /* Either we have the op widened available. */
3688 ops[0] = valueized_wider_op (type,
3689 gimple_assign_rhs1 (def));
3690 if (ops[0])
3691 ops[1] = valueized_wider_op (type,
3692 gimple_assign_rhs2 (def));
3693 if (ops[0] && ops[1])
3695 ops[0] = vn_nary_op_lookup_pieces
3696 (2, gimple_assign_rhs_code (def), type, ops, NULL);
3697 /* We have wider operation available. */
3698 if (ops[0])
3700 unsigned lhs_prec = TYPE_PRECISION (type);
3701 unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1));
3702 if (lhs_prec == rhs_prec)
3704 gimple_match_op match_op (NOP_EXPR, type, ops[0]);
3705 result = vn_nary_build_or_lookup (&match_op);
3706 if (result)
3708 bool changed = set_ssa_val_to (lhs, result);
3709 vn_nary_op_insert_stmt (stmt, result);
3710 return changed;
3713 else
3715 tree mask = wide_int_to_tree
3716 (type, wi::mask (rhs_prec, false, lhs_prec));
3717 gimple_match_op match_op (BIT_AND_EXPR,
3718 TREE_TYPE (lhs),
3719 ops[0], mask);
3720 result = vn_nary_build_or_lookup (&match_op);
3721 if (result)
3723 bool changed = set_ssa_val_to (lhs, result);
3724 vn_nary_op_insert_stmt (stmt, result);
3725 return changed;
3732 default:;
3735 bool changed = set_ssa_val_to (lhs, lhs);
3736 vn_nary_op_insert_stmt (stmt, lhs);
3737 return changed;
3740 /* Visit a call STMT storing into LHS. Return true if the value number
3741 of the LHS has changed as a result. */
3743 static bool
3744 visit_reference_op_call (tree lhs, gcall *stmt)
3746 bool changed = false;
3747 struct vn_reference_s vr1;
3748 vn_reference_t vnresult = NULL;
3749 tree vdef = gimple_vdef (stmt);
3751 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
3752 if (lhs && TREE_CODE (lhs) != SSA_NAME)
3753 lhs = NULL_TREE;
3755 vn_reference_lookup_call (stmt, &vnresult, &vr1);
3756 if (vnresult)
3758 if (vnresult->result_vdef && vdef)
3759 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
3760 else if (vdef)
3761 /* If the call was discovered to be pure or const reflect
3762 that as far as possible. */
3763 changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt)));
3765 if (!vnresult->result && lhs)
3766 vnresult->result = lhs;
3768 if (vnresult->result && lhs)
3769 changed |= set_ssa_val_to (lhs, vnresult->result);
3771 else
3773 vn_reference_t vr2;
3774 vn_reference_s **slot;
3775 tree vdef_val = vdef;
3776 if (vdef)
3778 /* If we value numbered an indirect functions function to
3779 one not clobbering memory value number its VDEF to its
3780 VUSE. */
3781 tree fn = gimple_call_fn (stmt);
3782 if (fn && TREE_CODE (fn) == SSA_NAME)
3784 fn = SSA_VAL (fn);
3785 if (TREE_CODE (fn) == ADDR_EXPR
3786 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
3787 && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
3788 & (ECF_CONST | ECF_PURE)))
3789 vdef_val = vuse_ssa_val (gimple_vuse (stmt));
3791 changed |= set_ssa_val_to (vdef, vdef_val);
3793 if (lhs)
3794 changed |= set_ssa_val_to (lhs, lhs);
3795 vr2 = current_info->references_pool->allocate ();
3796 vr2->vuse = vr1.vuse;
3797 /* As we are not walking the virtual operand chain we know the
3798 shared_lookup_references are still original so we can re-use
3799 them here. */
3800 vr2->operands = vr1.operands.copy ();
3801 vr2->type = vr1.type;
3802 vr2->set = vr1.set;
3803 vr2->hashcode = vr1.hashcode;
3804 vr2->result = lhs;
3805 vr2->result_vdef = vdef_val;
3806 slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3807 INSERT);
3808 gcc_assert (!*slot);
3809 *slot = vr2;
3812 return changed;
3815 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3816 and return true if the value number of the LHS has changed as a result. */
3818 static bool
3819 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
3821 bool changed = false;
3822 tree last_vuse;
3823 tree result;
3825 last_vuse = gimple_vuse (stmt);
3826 last_vuse_ptr = &last_vuse;
3827 result = vn_reference_lookup (op, gimple_vuse (stmt),
3828 default_vn_walk_kind, NULL, true);
3829 last_vuse_ptr = NULL;
3831 /* We handle type-punning through unions by value-numbering based
3832 on offset and size of the access. Be prepared to handle a
3833 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3834 if (result
3835 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3837 /* We will be setting the value number of lhs to the value number
3838 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3839 So first simplify and lookup this expression to see if it
3840 is already available. */
3841 gimple_match_op res_op (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
3842 result = vn_nary_build_or_lookup (&res_op);
3845 if (result)
3846 changed = set_ssa_val_to (lhs, result);
3847 else
3849 changed = set_ssa_val_to (lhs, lhs);
3850 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
3853 return changed;
3857 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3858 and return true if the value number of the LHS has changed as a result. */
3860 static bool
3861 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
3863 bool changed = false;
3864 vn_reference_t vnresult = NULL;
3865 tree assign;
3866 bool resultsame = false;
3867 tree vuse = gimple_vuse (stmt);
3868 tree vdef = gimple_vdef (stmt);
3870 if (TREE_CODE (op) == SSA_NAME)
3871 op = SSA_VAL (op);
3873 /* First we want to lookup using the *vuses* from the store and see
3874 if there the last store to this location with the same address
3875 had the same value.
3877 The vuses represent the memory state before the store. If the
3878 memory state, address, and value of the store is the same as the
3879 last store to this location, then this store will produce the
3880 same memory state as that store.
3882 In this case the vdef versions for this store are value numbered to those
3883 vuse versions, since they represent the same memory state after
3884 this store.
3886 Otherwise, the vdefs for the store are used when inserting into
3887 the table, since the store generates a new memory state. */
3889 vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
3890 if (vnresult
3891 && vnresult->result)
3893 tree result = vnresult->result;
3894 if (TREE_CODE (result) == SSA_NAME)
3895 result = SSA_VAL (result);
3896 resultsame = expressions_equal_p (result, op);
3897 if (resultsame)
3899 /* If the TBAA state isn't compatible for downstream reads
3900 we cannot value-number the VDEFs the same. */
3901 alias_set_type set = get_alias_set (lhs);
3902 if (vnresult->set != set
3903 && ! alias_set_subset_of (set, vnresult->set))
3904 resultsame = false;
3908 if (!resultsame)
3910 /* Only perform the following when being called from PRE
3911 which embeds tail merging. */
3912 if (default_vn_walk_kind == VN_WALK)
3914 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3915 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
3916 if (vnresult)
3918 VN_INFO (vdef)->use_processed = true;
3919 return set_ssa_val_to (vdef, vnresult->result_vdef);
3923 if (dump_file && (dump_flags & TDF_DETAILS))
3925 fprintf (dump_file, "No store match\n");
3926 fprintf (dump_file, "Value numbering store ");
3927 print_generic_expr (dump_file, lhs);
3928 fprintf (dump_file, " to ");
3929 print_generic_expr (dump_file, op);
3930 fprintf (dump_file, "\n");
3932 /* Have to set value numbers before insert, since insert is
3933 going to valueize the references in-place. */
3934 if (vdef)
3935 changed |= set_ssa_val_to (vdef, vdef);
3937 /* Do not insert structure copies into the tables. */
3938 if (is_gimple_min_invariant (op)
3939 || is_gimple_reg (op))
3940 vn_reference_insert (lhs, op, vdef, NULL);
3942 /* Only perform the following when being called from PRE
3943 which embeds tail merging. */
3944 if (default_vn_walk_kind == VN_WALK)
3946 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3947 vn_reference_insert (assign, lhs, vuse, vdef);
3950 else
3952 /* We had a match, so value number the vdef to have the value
3953 number of the vuse it came from. */
3955 if (dump_file && (dump_flags & TDF_DETAILS))
3956 fprintf (dump_file, "Store matched earlier value, "
3957 "value numbering store vdefs to matching vuses.\n");
3959 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3962 return changed;
3965 /* Visit and value number PHI, return true if the value number
3966 changed. */
3968 static bool
3969 visit_phi (gimple *phi)
3971 tree result, sameval = VN_TOP, seen_undef = NULL_TREE;
3972 unsigned n_executable = 0;
3973 bool allsame = true;
3974 edge_iterator ei;
3975 edge e;
3977 /* TODO: We could check for this in init_sccvn, and replace this
3978 with a gcc_assert. */
3979 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3980 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3982 /* See if all non-TOP arguments have the same value. TOP is
3983 equivalent to everything, so we can ignore it. */
3984 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3985 if (e->flags & EDGE_EXECUTABLE)
3987 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3989 ++n_executable;
3990 if (TREE_CODE (def) == SSA_NAME)
3991 def = SSA_VAL (def);
3992 if (def == VN_TOP)
3994 /* Ignore undefined defs for sameval but record one. */
3995 else if (TREE_CODE (def) == SSA_NAME
3996 && ssa_undefined_value_p (def, false))
3997 seen_undef = def;
3998 else if (sameval == VN_TOP)
3999 sameval = def;
4000 else if (!expressions_equal_p (def, sameval))
4002 allsame = false;
4003 break;
4008 /* If none of the edges was executable keep the value-number at VN_TOP,
4009 if only a single edge is exectuable use its value. */
4010 if (n_executable <= 1)
4011 result = seen_undef ? seen_undef : sameval;
4012 /* If we saw only undefined values and VN_TOP use one of the
4013 undefined values. */
4014 else if (sameval == VN_TOP)
4015 result = seen_undef ? seen_undef : sameval;
4016 /* First see if it is equivalent to a phi node in this block. We prefer
4017 this as it allows IV elimination - see PRs 66502 and 67167. */
4018 else if ((result = vn_phi_lookup (phi)))
4020 /* If all values are the same use that, unless we've seen undefined
4021 values as well and the value isn't constant.
4022 CCP/copyprop have the same restriction to not remove uninit warnings. */
4023 else if (allsame
4024 && (! seen_undef || is_gimple_min_invariant (sameval)))
4025 result = sameval;
4026 else
4028 result = PHI_RESULT (phi);
4029 /* Only insert PHIs that are varying, for constant value numbers
4030 we mess up equivalences otherwise as we are only comparing
4031 the immediate controlling predicates. */
4032 vn_phi_insert (phi, result);
4035 return set_ssa_val_to (PHI_RESULT (phi), result);
4038 /* Try to simplify RHS using equivalences and constant folding. */
4040 static tree
4041 try_to_simplify (gassign *stmt)
4043 enum tree_code code = gimple_assign_rhs_code (stmt);
4044 tree tem;
4046 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
4047 in this case, there is no point in doing extra work. */
4048 if (code == SSA_NAME)
4049 return NULL_TREE;
4051 /* First try constant folding based on our current lattice. */
4052 mprts_hook = vn_lookup_simplify_result;
4053 mprts_hook_cnt = 9;
4054 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
4055 mprts_hook = NULL;
4056 if (tem
4057 && (TREE_CODE (tem) == SSA_NAME
4058 || is_gimple_min_invariant (tem)))
4059 return tem;
4061 return NULL_TREE;
4064 /* Visit and value number USE, return true if the value number
4065 changed. */
4067 static bool
4068 visit_use (tree use)
4070 bool changed = false;
4071 gimple *stmt = SSA_NAME_DEF_STMT (use);
4073 mark_use_processed (use);
4075 gcc_assert (!SSA_NAME_IN_FREE_LIST (use)
4076 && !SSA_NAME_IS_DEFAULT_DEF (use));
4078 if (dump_file && (dump_flags & TDF_DETAILS))
4080 fprintf (dump_file, "Value numbering ");
4081 print_generic_expr (dump_file, use);
4082 fprintf (dump_file, " stmt = ");
4083 print_gimple_stmt (dump_file, stmt, 0);
4086 if (gimple_code (stmt) == GIMPLE_PHI)
4087 changed = visit_phi (stmt);
4088 else if (gimple_has_volatile_ops (stmt))
4089 changed = defs_to_varying (stmt);
4090 else if (gassign *ass = dyn_cast <gassign *> (stmt))
4092 enum tree_code code = gimple_assign_rhs_code (ass);
4093 tree lhs = gimple_assign_lhs (ass);
4094 tree rhs1 = gimple_assign_rhs1 (ass);
4095 tree simplified;
4097 /* Shortcut for copies. Simplifying copies is pointless,
4098 since we copy the expression and value they represent. */
4099 if (code == SSA_NAME
4100 && TREE_CODE (lhs) == SSA_NAME)
4102 changed = visit_copy (lhs, rhs1);
4103 goto done;
4105 simplified = try_to_simplify (ass);
4106 if (simplified)
4108 if (dump_file && (dump_flags & TDF_DETAILS))
4110 fprintf (dump_file, "RHS ");
4111 print_gimple_expr (dump_file, ass, 0);
4112 fprintf (dump_file, " simplified to ");
4113 print_generic_expr (dump_file, simplified);
4114 fprintf (dump_file, "\n");
4117 /* Setting value numbers to constants will occasionally
4118 screw up phi congruence because constants are not
4119 uniquely associated with a single ssa name that can be
4120 looked up. */
4121 if (simplified
4122 && is_gimple_min_invariant (simplified)
4123 && TREE_CODE (lhs) == SSA_NAME)
4125 changed = set_ssa_val_to (lhs, simplified);
4126 goto done;
4128 else if (simplified
4129 && TREE_CODE (simplified) == SSA_NAME
4130 && TREE_CODE (lhs) == SSA_NAME)
4132 changed = visit_copy (lhs, simplified);
4133 goto done;
4136 if ((TREE_CODE (lhs) == SSA_NAME
4137 /* We can substitute SSA_NAMEs that are live over
4138 abnormal edges with their constant value. */
4139 && !(gimple_assign_copy_p (ass)
4140 && is_gimple_min_invariant (rhs1))
4141 && !(simplified
4142 && is_gimple_min_invariant (simplified))
4143 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4144 /* Stores or copies from SSA_NAMEs that are live over
4145 abnormal edges are a problem. */
4146 || (code == SSA_NAME
4147 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
4148 changed = defs_to_varying (ass);
4149 else if (REFERENCE_CLASS_P (lhs)
4150 || DECL_P (lhs))
4151 changed = visit_reference_op_store (lhs, rhs1, ass);
4152 else if (TREE_CODE (lhs) == SSA_NAME)
4154 if ((gimple_assign_copy_p (ass)
4155 && is_gimple_min_invariant (rhs1))
4156 || (simplified
4157 && is_gimple_min_invariant (simplified)))
4159 if (simplified)
4160 changed = set_ssa_val_to (lhs, simplified);
4161 else
4162 changed = set_ssa_val_to (lhs, rhs1);
4164 else
4166 /* Visit the original statement. */
4167 switch (vn_get_stmt_kind (ass))
4169 case VN_NARY:
4170 changed = visit_nary_op (lhs, ass);
4171 break;
4172 case VN_REFERENCE:
4173 changed = visit_reference_op_load (lhs, rhs1, ass);
4174 break;
4175 default:
4176 changed = defs_to_varying (ass);
4177 break;
4181 else
4182 changed = defs_to_varying (ass);
4184 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
4186 tree lhs = gimple_call_lhs (call_stmt);
4187 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4189 /* Try constant folding based on our current lattice. */
4190 tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
4191 vn_valueize);
4192 if (simplified)
4194 if (dump_file && (dump_flags & TDF_DETAILS))
4196 fprintf (dump_file, "call ");
4197 print_gimple_expr (dump_file, call_stmt, 0);
4198 fprintf (dump_file, " simplified to ");
4199 print_generic_expr (dump_file, simplified);
4200 fprintf (dump_file, "\n");
4203 /* Setting value numbers to constants will occasionally
4204 screw up phi congruence because constants are not
4205 uniquely associated with a single ssa name that can be
4206 looked up. */
4207 if (simplified
4208 && is_gimple_min_invariant (simplified))
4210 changed = set_ssa_val_to (lhs, simplified);
4211 if (gimple_vdef (call_stmt))
4212 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4213 SSA_VAL (gimple_vuse (call_stmt)));
4214 goto done;
4216 else if (simplified
4217 && TREE_CODE (simplified) == SSA_NAME)
4219 changed = visit_copy (lhs, simplified);
4220 if (gimple_vdef (call_stmt))
4221 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4222 SSA_VAL (gimple_vuse (call_stmt)));
4223 goto done;
4225 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4227 changed = defs_to_varying (call_stmt);
4228 goto done;
4232 /* Pick up flags from a devirtualization target. */
4233 tree fn = gimple_call_fn (stmt);
4234 int extra_fnflags = 0;
4235 if (fn && TREE_CODE (fn) == SSA_NAME)
4237 fn = SSA_VAL (fn);
4238 if (TREE_CODE (fn) == ADDR_EXPR
4239 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
4240 extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0));
4242 if (!gimple_call_internal_p (call_stmt)
4243 && (/* Calls to the same function with the same vuse
4244 and the same operands do not necessarily return the same
4245 value, unless they're pure or const. */
4246 ((gimple_call_flags (call_stmt) | extra_fnflags)
4247 & (ECF_PURE | ECF_CONST))
4248 /* If calls have a vdef, subsequent calls won't have
4249 the same incoming vuse. So, if 2 calls with vdef have the
4250 same vuse, we know they're not subsequent.
4251 We can value number 2 calls to the same function with the
4252 same vuse and the same operands which are not subsequent
4253 the same, because there is no code in the program that can
4254 compare the 2 values... */
4255 || (gimple_vdef (call_stmt)
4256 /* ... unless the call returns a pointer which does
4257 not alias with anything else. In which case the
4258 information that the values are distinct are encoded
4259 in the IL. */
4260 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
4261 /* Only perform the following when being called from PRE
4262 which embeds tail merging. */
4263 && default_vn_walk_kind == VN_WALK)))
4264 changed = visit_reference_op_call (lhs, call_stmt);
4265 else
4266 changed = defs_to_varying (call_stmt);
4268 else
4269 changed = defs_to_varying (stmt);
4270 done:
4271 return changed;
4274 /* Compare two operands by reverse postorder index */
4276 static int
4277 compare_ops (const void *pa, const void *pb)
4279 const tree opa = *((const tree *)pa);
4280 const tree opb = *((const tree *)pb);
4281 gimple *opstmta = SSA_NAME_DEF_STMT (opa);
4282 gimple *opstmtb = SSA_NAME_DEF_STMT (opb);
4283 basic_block bba;
4284 basic_block bbb;
4286 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
4287 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4288 else if (gimple_nop_p (opstmta))
4289 return -1;
4290 else if (gimple_nop_p (opstmtb))
4291 return 1;
4293 bba = gimple_bb (opstmta);
4294 bbb = gimple_bb (opstmtb);
4296 if (!bba && !bbb)
4297 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4298 else if (!bba)
4299 return -1;
4300 else if (!bbb)
4301 return 1;
4303 if (bba == bbb)
4305 if (gimple_code (opstmta) == GIMPLE_PHI
4306 && gimple_code (opstmtb) == GIMPLE_PHI)
4307 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4308 else if (gimple_code (opstmta) == GIMPLE_PHI)
4309 return -1;
4310 else if (gimple_code (opstmtb) == GIMPLE_PHI)
4311 return 1;
4312 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
4313 return gimple_uid (opstmta) - gimple_uid (opstmtb);
4314 else
4315 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4317 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
4320 /* Sort an array containing members of a strongly connected component
4321 SCC so that the members are ordered by RPO number.
4322 This means that when the sort is complete, iterating through the
4323 array will give you the members in RPO order. */
4325 static void
4326 sort_scc (vec<tree> scc)
4328 scc.qsort (compare_ops);
4331 /* Insert the no longer used nary ONARY to the hash INFO. */
4333 static void
4334 copy_nary (vn_nary_op_t onary, vn_tables_t info)
4336 size_t size = sizeof_vn_nary_op (onary->length);
4337 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
4338 &info->nary_obstack);
4339 memcpy (nary, onary, size);
4340 vn_nary_op_insert_into (nary, info->nary, false);
4343 /* Insert the no longer used phi OPHI to the hash INFO. */
4345 static void
4346 copy_phi (vn_phi_t ophi, vn_tables_t info)
4348 vn_phi_t phi = info->phis_pool->allocate ();
4349 vn_phi_s **slot;
4350 memcpy (phi, ophi, sizeof (*phi));
4351 ophi->phiargs.create (0);
4352 slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
4353 gcc_assert (!*slot);
4354 *slot = phi;
4357 /* Insert the no longer used reference OREF to the hash INFO. */
4359 static void
4360 copy_reference (vn_reference_t oref, vn_tables_t info)
4362 vn_reference_t ref;
4363 vn_reference_s **slot;
4364 ref = info->references_pool->allocate ();
4365 memcpy (ref, oref, sizeof (*ref));
4366 oref->operands.create (0);
4367 slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
4368 if (*slot)
4369 free_reference (*slot);
4370 *slot = ref;
4373 /* Process a strongly connected component in the SSA graph. */
4375 static void
4376 process_scc (vec<tree> scc)
4378 tree var;
4379 unsigned int i;
4380 unsigned int iterations = 0;
4381 bool changed = true;
4382 vn_nary_op_iterator_type hin;
4383 vn_phi_iterator_type hip;
4384 vn_reference_iterator_type hir;
4385 vn_nary_op_t nary;
4386 vn_phi_t phi;
4387 vn_reference_t ref;
4389 /* If the SCC has a single member, just visit it. */
4390 if (scc.length () == 1)
4392 tree use = scc[0];
4393 if (VN_INFO (use)->use_processed)
4394 return;
4395 /* We need to make sure it doesn't form a cycle itself, which can
4396 happen for self-referential PHI nodes. In that case we would
4397 end up inserting an expression with VN_TOP operands into the
4398 valid table which makes us derive bogus equivalences later.
4399 The cheapest way to check this is to assume it for all PHI nodes. */
4400 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
4401 /* Fallthru to iteration. */ ;
4402 else
4404 visit_use (use);
4405 return;
4409 if (dump_file && (dump_flags & TDF_DETAILS))
4410 print_scc (dump_file, scc);
4412 /* Iterate over the SCC with the optimistic table until it stops
4413 changing. */
4414 current_info = optimistic_info;
4415 while (changed)
4417 changed = false;
4418 iterations++;
4419 if (dump_file && (dump_flags & TDF_DETAILS))
4420 fprintf (dump_file, "Starting iteration %d\n", iterations);
4421 /* As we are value-numbering optimistically we have to
4422 clear the expression tables and the simplified expressions
4423 in each iteration until we converge. */
4424 optimistic_info->nary->empty ();
4425 optimistic_info->phis->empty ();
4426 optimistic_info->references->empty ();
4427 obstack_free (&optimistic_info->nary_obstack, NULL);
4428 gcc_obstack_init (&optimistic_info->nary_obstack);
4429 optimistic_info->phis_pool->release ();
4430 optimistic_info->references_pool->release ();
4431 FOR_EACH_VEC_ELT (scc, i, var)
4432 gcc_assert (!VN_INFO (var)->needs_insertion
4433 && VN_INFO (var)->expr == NULL);
4434 FOR_EACH_VEC_ELT (scc, i, var)
4435 changed |= visit_use (var);
4438 if (dump_file && (dump_flags & TDF_DETAILS))
4439 fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
4440 statistics_histogram_event (cfun, "SCC iterations", iterations);
4442 /* Finally, copy the contents of the no longer used optimistic
4443 table to the valid table. */
4444 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
4445 copy_nary (nary, valid_info);
4446 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
4447 copy_phi (phi, valid_info);
4448 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
4449 ref, vn_reference_t, hir)
4450 copy_reference (ref, valid_info);
4452 current_info = valid_info;
4456 /* Pop the components of the found SCC for NAME off the SCC stack
4457 and process them. Returns true if all went well, false if
4458 we run into resource limits. */
4460 static void
4461 extract_and_process_scc_for_name (tree name)
4463 auto_vec<tree> scc;
4464 tree x;
4466 /* Found an SCC, pop the components off the SCC stack and
4467 process them. */
4470 x = sccstack.pop ();
4472 VN_INFO (x)->on_sccstack = false;
4473 scc.safe_push (x);
4474 } while (x != name);
4476 /* Drop all defs in the SCC to varying in case a SCC turns out to be
4477 incredibly large.
4478 ??? Just switch to a non-optimistic mode that avoids any iteration. */
4479 if (scc.length () > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
4481 if (dump_file)
4483 print_scc (dump_file, scc);
4484 fprintf (dump_file, "WARNING: Giving up value-numbering SCC due to "
4485 "size %u exceeding %u\n", scc.length (),
4486 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
4488 tree var;
4489 unsigned i;
4490 FOR_EACH_VEC_ELT (scc, i, var)
4492 gimple *def = SSA_NAME_DEF_STMT (var);
4493 mark_use_processed (var);
4494 if (SSA_NAME_IS_DEFAULT_DEF (var)
4495 || gimple_code (def) == GIMPLE_PHI)
4496 set_ssa_val_to (var, var);
4497 else
4498 defs_to_varying (def);
4500 return;
4503 if (scc.length () > 1)
4504 sort_scc (scc);
4506 process_scc (scc);
4509 /* Depth first search on NAME to discover and process SCC's in the SSA
4510 graph.
4511 Execution of this algorithm relies on the fact that the SCC's are
4512 popped off the stack in topological order.
4513 Returns true if successful, false if we stopped processing SCC's due
4514 to resource constraints. */
4516 static void
4517 DFS (tree name)
4519 auto_vec<ssa_op_iter> itervec;
4520 auto_vec<tree> namevec;
4521 use_operand_p usep = NULL;
4522 gimple *defstmt;
4523 tree use;
4524 ssa_op_iter iter;
4526 start_over:
4527 /* SCC info */
4528 VN_INFO (name)->dfsnum = next_dfs_num++;
4529 VN_INFO (name)->visited = true;
4530 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
4532 sccstack.safe_push (name);
4533 VN_INFO (name)->on_sccstack = true;
4534 defstmt = SSA_NAME_DEF_STMT (name);
4536 /* Recursively DFS on our operands, looking for SCC's. */
4537 if (!gimple_nop_p (defstmt))
4539 /* Push a new iterator. */
4540 if (gphi *phi = dyn_cast <gphi *> (defstmt))
4541 usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
4542 else
4543 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
4545 else
4546 clear_and_done_ssa_iter (&iter);
4548 while (1)
4550 /* If we are done processing uses of a name, go up the stack
4551 of iterators and process SCCs as we found them. */
4552 if (op_iter_done (&iter))
4554 /* See if we found an SCC. */
4555 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
4556 extract_and_process_scc_for_name (name);
4558 /* Check if we are done. */
4559 if (namevec.is_empty ())
4560 return;
4562 /* Restore the last use walker and continue walking there. */
4563 use = name;
4564 name = namevec.pop ();
4565 memcpy (&iter, &itervec.last (),
4566 sizeof (ssa_op_iter));
4567 itervec.pop ();
4568 goto continue_walking;
4571 use = USE_FROM_PTR (usep);
4573 /* Since we handle phi nodes, we will sometimes get
4574 invariants in the use expression. */
4575 if (TREE_CODE (use) == SSA_NAME)
4577 if (! (VN_INFO (use)->visited))
4579 /* Recurse by pushing the current use walking state on
4580 the stack and starting over. */
4581 itervec.safe_push (iter);
4582 namevec.safe_push (name);
4583 name = use;
4584 goto start_over;
4586 continue_walking:
4587 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
4588 VN_INFO (use)->low);
4590 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
4591 && VN_INFO (use)->on_sccstack)
4593 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
4594 VN_INFO (name)->low);
4598 usep = op_iter_next_use (&iter);
4602 /* Allocate a value number table. */
4604 static void
4605 allocate_vn_table (vn_tables_t table)
4607 table->phis = new vn_phi_table_type (23);
4608 table->nary = new vn_nary_op_table_type (23);
4609 table->references = new vn_reference_table_type (23);
4611 gcc_obstack_init (&table->nary_obstack);
4612 table->phis_pool = new object_allocator<vn_phi_s> ("VN phis");
4613 table->references_pool = new object_allocator<vn_reference_s>
4614 ("VN references");
4617 /* Free a value number table. */
4619 static void
4620 free_vn_table (vn_tables_t table)
4622 delete table->phis;
4623 table->phis = NULL;
4624 delete table->nary;
4625 table->nary = NULL;
4626 delete table->references;
4627 table->references = NULL;
4628 obstack_free (&table->nary_obstack, NULL);
4629 delete table->phis_pool;
4630 delete table->references_pool;
4633 static void
4634 init_scc_vn (void)
4636 int j;
4637 int *rpo_numbers_temp;
4639 calculate_dominance_info (CDI_DOMINATORS);
4640 mark_dfs_back_edges ();
4642 sccstack.create (0);
4643 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
4645 constant_value_ids = BITMAP_ALLOC (NULL);
4647 next_dfs_num = 1;
4648 next_value_id = 1;
4650 vn_ssa_aux_table.create (num_ssa_names + 1);
4651 /* VEC_alloc doesn't actually grow it to the right size, it just
4652 preallocates the space to do so. */
4653 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
4654 gcc_obstack_init (&vn_ssa_aux_obstack);
4656 shared_lookup_phiargs.create (0);
4657 shared_lookup_references.create (0);
4658 rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
4659 rpo_numbers_temp =
4660 XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
4661 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4663 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4664 the i'th block in RPO order is bb. We want to map bb's to RPO
4665 numbers, so we need to rearrange this array. */
4666 for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
4667 rpo_numbers[rpo_numbers_temp[j]] = j;
4669 XDELETE (rpo_numbers_temp);
4671 VN_TOP = build_decl (UNKNOWN_LOCATION, VAR_DECL,
4672 get_identifier ("VN_TOP"), error_mark_node);
4674 renumber_gimple_stmt_uids ();
4676 /* Create the valid and optimistic value numbering tables. */
4677 valid_info = XCNEW (struct vn_tables_s);
4678 allocate_vn_table (valid_info);
4679 optimistic_info = XCNEW (struct vn_tables_s);
4680 allocate_vn_table (optimistic_info);
4681 current_info = valid_info;
4683 /* Create the VN_INFO structures, and initialize value numbers to
4684 TOP or VARYING for parameters. */
4685 size_t i;
4686 tree name;
4688 FOR_EACH_SSA_NAME (i, name, cfun)
4690 VN_INFO_GET (name)->valnum = VN_TOP;
4691 VN_INFO (name)->needs_insertion = false;
4692 VN_INFO (name)->expr = NULL;
4693 VN_INFO (name)->value_id = 0;
4695 if (!SSA_NAME_IS_DEFAULT_DEF (name))
4696 continue;
4698 switch (TREE_CODE (SSA_NAME_VAR (name)))
4700 case VAR_DECL:
4701 /* All undefined vars are VARYING. */
4702 VN_INFO (name)->valnum = name;
4703 VN_INFO (name)->visited = true;
4704 break;
4706 case PARM_DECL:
4707 /* Parameters are VARYING but we can record a condition
4708 if we know it is a non-NULL pointer. */
4709 VN_INFO (name)->visited = true;
4710 VN_INFO (name)->valnum = name;
4711 if (POINTER_TYPE_P (TREE_TYPE (name))
4712 && nonnull_arg_p (SSA_NAME_VAR (name)))
4714 tree ops[2];
4715 ops[0] = name;
4716 ops[1] = build_int_cst (TREE_TYPE (name), 0);
4717 vn_nary_op_insert_pieces (2, NE_EXPR, boolean_type_node, ops,
4718 boolean_true_node, 0);
4719 if (dump_file && (dump_flags & TDF_DETAILS))
4721 fprintf (dump_file, "Recording ");
4722 print_generic_expr (dump_file, name, TDF_SLIM);
4723 fprintf (dump_file, " != 0\n");
4726 break;
4728 case RESULT_DECL:
4729 /* If the result is passed by invisible reference the default
4730 def is initialized, otherwise it's uninitialized. Still
4731 undefined is varying. */
4732 VN_INFO (name)->visited = true;
4733 VN_INFO (name)->valnum = name;
4734 break;
4736 default:
4737 gcc_unreachable ();
4742 /* Restore SSA info that has been reset on value leaders. */
4744 void
4745 scc_vn_restore_ssa_info (void)
4747 unsigned i;
4748 tree name;
4750 FOR_EACH_SSA_NAME (i, name, cfun)
4752 if (has_VN_INFO (name))
4754 if (VN_INFO (name)->needs_insertion)
4756 else if (POINTER_TYPE_P (TREE_TYPE (name))
4757 && VN_INFO (name)->info.ptr_info)
4758 SSA_NAME_PTR_INFO (name) = VN_INFO (name)->info.ptr_info;
4759 else if (INTEGRAL_TYPE_P (TREE_TYPE (name))
4760 && VN_INFO (name)->info.range_info)
4762 SSA_NAME_RANGE_INFO (name) = VN_INFO (name)->info.range_info;
4763 SSA_NAME_ANTI_RANGE_P (name)
4764 = VN_INFO (name)->range_info_anti_range_p;
4770 void
4771 free_scc_vn (void)
4773 size_t i;
4774 tree name;
4776 delete constant_to_value_id;
4777 constant_to_value_id = NULL;
4778 BITMAP_FREE (constant_value_ids);
4779 shared_lookup_phiargs.release ();
4780 shared_lookup_references.release ();
4781 XDELETEVEC (rpo_numbers);
4783 FOR_EACH_SSA_NAME (i, name, cfun)
4785 if (has_VN_INFO (name)
4786 && VN_INFO (name)->needs_insertion)
4787 release_ssa_name (name);
4789 obstack_free (&vn_ssa_aux_obstack, NULL);
4790 vn_ssa_aux_table.release ();
4792 sccstack.release ();
4793 free_vn_table (valid_info);
4794 XDELETE (valid_info);
4795 free_vn_table (optimistic_info);
4796 XDELETE (optimistic_info);
4798 BITMAP_FREE (const_parms);
4801 /* Set *ID according to RESULT. */
4803 static void
4804 set_value_id_for_result (tree result, unsigned int *id)
4806 if (result && TREE_CODE (result) == SSA_NAME)
4807 *id = VN_INFO (result)->value_id;
4808 else if (result && is_gimple_min_invariant (result))
4809 *id = get_or_alloc_constant_value_id (result);
4810 else
4811 *id = get_next_value_id ();
4814 /* Set the value ids in the valid hash tables. */
4816 static void
4817 set_hashtable_value_ids (void)
4819 vn_nary_op_iterator_type hin;
4820 vn_phi_iterator_type hip;
4821 vn_reference_iterator_type hir;
4822 vn_nary_op_t vno;
4823 vn_reference_t vr;
4824 vn_phi_t vp;
4826 /* Now set the value ids of the things we had put in the hash
4827 table. */
4829 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4830 set_value_id_for_result (vno->result, &vno->value_id);
4832 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4833 set_value_id_for_result (vp->result, &vp->value_id);
4835 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4836 hir)
4837 set_value_id_for_result (vr->result, &vr->value_id);
4840 class sccvn_dom_walker : public dom_walker
4842 public:
4843 sccvn_dom_walker ()
4844 : dom_walker (CDI_DOMINATORS, REACHABLE_BLOCKS), cond_stack (0) {}
4846 virtual edge before_dom_children (basic_block);
4847 virtual void after_dom_children (basic_block);
4849 void record_cond (basic_block,
4850 enum tree_code code, tree lhs, tree rhs, bool value);
4851 void record_conds (basic_block,
4852 enum tree_code code, tree lhs, tree rhs, bool value);
4854 auto_vec<std::pair <basic_block, std::pair <vn_nary_op_t, vn_nary_op_t> > >
4855 cond_stack;
4858 /* Record a temporary condition for the BB and its dominated blocks. */
4860 void
4861 sccvn_dom_walker::record_cond (basic_block bb,
4862 enum tree_code code, tree lhs, tree rhs,
4863 bool value)
4865 tree ops[2] = { lhs, rhs };
4866 vn_nary_op_t old = NULL;
4867 if (vn_nary_op_lookup_pieces (2, code, boolean_type_node, ops, &old))
4868 current_info->nary->remove_elt_with_hash (old, old->hashcode);
4869 vn_nary_op_t cond
4870 = vn_nary_op_insert_pieces (2, code, boolean_type_node, ops,
4871 value
4872 ? boolean_true_node
4873 : boolean_false_node, 0);
4874 if (dump_file && (dump_flags & TDF_DETAILS))
4876 fprintf (dump_file, "Recording temporarily ");
4877 print_generic_expr (dump_file, ops[0], TDF_SLIM);
4878 fprintf (dump_file, " %s ", get_tree_code_name (code));
4879 print_generic_expr (dump_file, ops[1], TDF_SLIM);
4880 fprintf (dump_file, " == %s%s\n",
4881 value ? "true" : "false",
4882 old ? " (old entry saved)" : "");
4884 cond_stack.safe_push (std::make_pair (bb, std::make_pair (cond, old)));
4887 /* Record temporary conditions for the BB and its dominated blocks
4888 according to LHS CODE RHS == VALUE and its dominated conditions. */
4890 void
4891 sccvn_dom_walker::record_conds (basic_block bb,
4892 enum tree_code code, tree lhs, tree rhs,
4893 bool value)
4895 /* Record the original condition. */
4896 record_cond (bb, code, lhs, rhs, value);
4898 if (!value)
4899 return;
4901 /* Record dominated conditions if the condition is true. Note that
4902 the inversion is already recorded. */
4903 switch (code)
4905 case LT_EXPR:
4906 case GT_EXPR:
4907 record_cond (bb, code == LT_EXPR ? LE_EXPR : GE_EXPR, lhs, rhs, true);
4908 record_cond (bb, NE_EXPR, lhs, rhs, true);
4909 record_cond (bb, EQ_EXPR, lhs, rhs, false);
4910 break;
4912 case EQ_EXPR:
4913 record_cond (bb, LE_EXPR, lhs, rhs, true);
4914 record_cond (bb, GE_EXPR, lhs, rhs, true);
4915 record_cond (bb, LT_EXPR, lhs, rhs, false);
4916 record_cond (bb, GT_EXPR, lhs, rhs, false);
4917 break;
4919 default:
4920 break;
4924 /* Restore expressions and values derived from conditionals. */
4926 void
4927 sccvn_dom_walker::after_dom_children (basic_block bb)
4929 while (!cond_stack.is_empty ()
4930 && cond_stack.last ().first == bb)
4932 vn_nary_op_t cond = cond_stack.last ().second.first;
4933 vn_nary_op_t old = cond_stack.last ().second.second;
4934 current_info->nary->remove_elt_with_hash (cond, cond->hashcode);
4935 if (old)
4936 vn_nary_op_insert_into (old, current_info->nary, false);
4937 cond_stack.pop ();
4941 /* Value number all statements in BB. */
4943 edge
4944 sccvn_dom_walker::before_dom_children (basic_block bb)
4946 if (dump_file && (dump_flags & TDF_DETAILS))
4947 fprintf (dump_file, "Visiting BB %d\n", bb->index);
4949 /* If we have a single predecessor record the equivalence from a
4950 possible condition on the predecessor edge. */
4951 edge pred_e = single_pred_edge_ignoring_loop_edges (bb, false);
4952 if (pred_e)
4954 /* Check if there are multiple executable successor edges in
4955 the source block. Otherwise there is no additional info
4956 to be recorded. */
4957 edge_iterator ei;
4958 edge e2;
4959 FOR_EACH_EDGE (e2, ei, pred_e->src->succs)
4960 if (e2 != pred_e
4961 && e2->flags & EDGE_EXECUTABLE)
4962 break;
4963 if (e2 && (e2->flags & EDGE_EXECUTABLE))
4965 gimple *stmt = last_stmt (pred_e->src);
4966 if (stmt
4967 && gimple_code (stmt) == GIMPLE_COND)
4969 enum tree_code code = gimple_cond_code (stmt);
4970 tree lhs = gimple_cond_lhs (stmt);
4971 tree rhs = gimple_cond_rhs (stmt);
4972 record_conds (bb, code, lhs, rhs,
4973 (pred_e->flags & EDGE_TRUE_VALUE) != 0);
4974 code = invert_tree_comparison (code, HONOR_NANS (lhs));
4975 if (code != ERROR_MARK)
4976 record_conds (bb, code, lhs, rhs,
4977 (pred_e->flags & EDGE_TRUE_VALUE) == 0);
4982 /* Value-number all defs in the basic-block. */
4983 for (gphi_iterator gsi = gsi_start_phis (bb);
4984 !gsi_end_p (gsi); gsi_next (&gsi))
4986 gphi *phi = gsi.phi ();
4987 tree res = PHI_RESULT (phi);
4988 if (!VN_INFO (res)->visited)
4989 DFS (res);
4991 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
4992 !gsi_end_p (gsi); gsi_next (&gsi))
4994 ssa_op_iter i;
4995 tree op;
4996 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
4997 if (!VN_INFO (op)->visited)
4998 DFS (op);
5001 /* Finally look at the last stmt. */
5002 gimple *stmt = last_stmt (bb);
5003 if (!stmt)
5004 return NULL;
5006 enum gimple_code code = gimple_code (stmt);
5007 if (code != GIMPLE_COND
5008 && code != GIMPLE_SWITCH
5009 && code != GIMPLE_GOTO)
5010 return NULL;
5012 if (dump_file && (dump_flags & TDF_DETAILS))
5014 fprintf (dump_file, "Visiting control stmt ending BB %d: ", bb->index);
5015 print_gimple_stmt (dump_file, stmt, 0);
5018 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
5019 if value-numbering can prove they are not reachable. Handling
5020 computed gotos is also possible. */
5021 tree val;
5022 switch (code)
5024 case GIMPLE_COND:
5026 tree lhs = vn_valueize (gimple_cond_lhs (stmt));
5027 tree rhs = vn_valueize (gimple_cond_rhs (stmt));
5028 val = gimple_simplify (gimple_cond_code (stmt),
5029 boolean_type_node, lhs, rhs,
5030 NULL, vn_valueize);
5031 /* If that didn't simplify to a constant see if we have recorded
5032 temporary expressions from taken edges. */
5033 if (!val || TREE_CODE (val) != INTEGER_CST)
5035 tree ops[2];
5036 ops[0] = lhs;
5037 ops[1] = rhs;
5038 val = vn_nary_op_lookup_pieces (2, gimple_cond_code (stmt),
5039 boolean_type_node, ops, NULL);
5041 break;
5043 case GIMPLE_SWITCH:
5044 val = gimple_switch_index (as_a <gswitch *> (stmt));
5045 break;
5046 case GIMPLE_GOTO:
5047 val = gimple_goto_dest (stmt);
5048 break;
5049 default:
5050 gcc_unreachable ();
5052 if (!val)
5053 return NULL;
5055 edge taken = find_taken_edge (bb, vn_valueize (val));
5056 if (!taken)
5057 return NULL;
5059 if (dump_file && (dump_flags & TDF_DETAILS))
5060 fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
5061 "not executable\n", bb->index, bb->index, taken->dest->index);
5063 return taken;
5066 /* Do SCCVN. Returns true if it finished, false if we bailed out
5067 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
5068 how we use the alias oracle walking during the VN process. */
5070 void
5071 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
5073 size_t i;
5075 default_vn_walk_kind = default_vn_walk_kind_;
5077 init_scc_vn ();
5079 /* Collect pointers we know point to readonly memory. */
5080 const_parms = BITMAP_ALLOC (NULL);
5081 tree fnspec = lookup_attribute ("fn spec",
5082 TYPE_ATTRIBUTES (TREE_TYPE (cfun->decl)));
5083 if (fnspec)
5085 fnspec = TREE_VALUE (TREE_VALUE (fnspec));
5086 i = 1;
5087 for (tree arg = DECL_ARGUMENTS (cfun->decl);
5088 arg; arg = DECL_CHAIN (arg), ++i)
5090 if (i >= (unsigned) TREE_STRING_LENGTH (fnspec))
5091 break;
5092 if (TREE_STRING_POINTER (fnspec)[i] == 'R'
5093 || TREE_STRING_POINTER (fnspec)[i] == 'r')
5095 tree name = ssa_default_def (cfun, arg);
5096 if (name)
5097 bitmap_set_bit (const_parms, SSA_NAME_VERSION (name));
5102 /* Walk all blocks in dominator order, value-numbering stmts
5103 SSA defs and decide whether outgoing edges are not executable. */
5104 sccvn_dom_walker walker;
5105 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5107 /* Initialize the value ids and prune out remaining VN_TOPs
5108 from dead code. */
5109 tree name;
5110 FOR_EACH_SSA_NAME (i, name, cfun)
5112 vn_ssa_aux_t info = VN_INFO (name);
5113 if (!info->visited
5114 || info->valnum == VN_TOP)
5115 info->valnum = name;
5116 if (info->valnum == name)
5117 info->value_id = get_next_value_id ();
5118 else if (is_gimple_min_invariant (info->valnum))
5119 info->value_id = get_or_alloc_constant_value_id (info->valnum);
5122 /* Propagate. */
5123 FOR_EACH_SSA_NAME (i, name, cfun)
5125 vn_ssa_aux_t info = VN_INFO (name);
5126 if (TREE_CODE (info->valnum) == SSA_NAME
5127 && info->valnum != name
5128 && info->value_id != VN_INFO (info->valnum)->value_id)
5129 info->value_id = VN_INFO (info->valnum)->value_id;
5132 set_hashtable_value_ids ();
5134 if (dump_file && (dump_flags & TDF_DETAILS))
5136 fprintf (dump_file, "Value numbers:\n");
5137 FOR_EACH_SSA_NAME (i, name, cfun)
5139 if (VN_INFO (name)->visited
5140 && SSA_VAL (name) != name)
5142 print_generic_expr (dump_file, name);
5143 fprintf (dump_file, " = ");
5144 print_generic_expr (dump_file, SSA_VAL (name));
5145 fprintf (dump_file, "\n");
5151 /* Return the maximum value id we have ever seen. */
5153 unsigned int
5154 get_max_value_id (void)
5156 return next_value_id;
5159 /* Return the next unique value id. */
5161 unsigned int
5162 get_next_value_id (void)
5164 return next_value_id++;
5168 /* Compare two expressions E1 and E2 and return true if they are equal. */
5170 bool
5171 expressions_equal_p (tree e1, tree e2)
5173 /* The obvious case. */
5174 if (e1 == e2)
5175 return true;
5177 /* If either one is VN_TOP consider them equal. */
5178 if (e1 == VN_TOP || e2 == VN_TOP)
5179 return true;
5181 /* If only one of them is null, they cannot be equal. */
5182 if (!e1 || !e2)
5183 return false;
5185 /* Now perform the actual comparison. */
5186 if (TREE_CODE (e1) == TREE_CODE (e2)
5187 && operand_equal_p (e1, e2, OEP_PURE_SAME))
5188 return true;
5190 return false;
5194 /* Return true if the nary operation NARY may trap. This is a copy
5195 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
5197 bool
5198 vn_nary_may_trap (vn_nary_op_t nary)
5200 tree type;
5201 tree rhs2 = NULL_TREE;
5202 bool honor_nans = false;
5203 bool honor_snans = false;
5204 bool fp_operation = false;
5205 bool honor_trapv = false;
5206 bool handled, ret;
5207 unsigned i;
5209 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
5210 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
5211 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
5213 type = nary->type;
5214 fp_operation = FLOAT_TYPE_P (type);
5215 if (fp_operation)
5217 honor_nans = flag_trapping_math && !flag_finite_math_only;
5218 honor_snans = flag_signaling_nans != 0;
5220 else if (INTEGRAL_TYPE_P (type)
5221 && TYPE_OVERFLOW_TRAPS (type))
5222 honor_trapv = true;
5224 if (nary->length >= 2)
5225 rhs2 = nary->op[1];
5226 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
5227 honor_trapv,
5228 honor_nans, honor_snans, rhs2,
5229 &handled);
5230 if (handled
5231 && ret)
5232 return true;
5234 for (i = 0; i < nary->length; ++i)
5235 if (tree_could_trap_p (nary->op[i]))
5236 return true;
5238 return false;
5242 class eliminate_dom_walker : public dom_walker
5244 public:
5245 eliminate_dom_walker (cdi_direction, bitmap);
5246 ~eliminate_dom_walker ();
5248 virtual edge before_dom_children (basic_block);
5249 virtual void after_dom_children (basic_block);
5251 tree eliminate_avail (tree op);
5252 void eliminate_push_avail (tree op);
5253 tree eliminate_insert (gimple_stmt_iterator *gsi, tree val);
5255 bool do_pre;
5256 unsigned int el_todo;
5257 unsigned int eliminations;
5258 unsigned int insertions;
5260 /* SSA names that had their defs inserted by PRE if do_pre. */
5261 bitmap inserted_exprs;
5263 /* Blocks with statements that have had their EH properties changed. */
5264 bitmap need_eh_cleanup;
5266 /* Blocks with statements that have had their AB properties changed. */
5267 bitmap need_ab_cleanup;
5269 auto_vec<gimple *> to_remove;
5270 auto_vec<gimple *> to_fixup;
5271 auto_vec<tree> avail;
5272 auto_vec<tree> avail_stack;
5275 eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction,
5276 bitmap inserted_exprs_)
5277 : dom_walker (direction), do_pre (inserted_exprs_ != NULL),
5278 el_todo (0), eliminations (0), insertions (0),
5279 inserted_exprs (inserted_exprs_)
5281 need_eh_cleanup = BITMAP_ALLOC (NULL);
5282 need_ab_cleanup = BITMAP_ALLOC (NULL);
5285 eliminate_dom_walker::~eliminate_dom_walker ()
5287 BITMAP_FREE (need_eh_cleanup);
5288 BITMAP_FREE (need_ab_cleanup);
5291 /* Return a leader for OP that is available at the current point of the
5292 eliminate domwalk. */
5294 tree
5295 eliminate_dom_walker::eliminate_avail (tree op)
5297 tree valnum = VN_INFO (op)->valnum;
5298 if (TREE_CODE (valnum) == SSA_NAME)
5300 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
5301 return valnum;
5302 if (avail.length () > SSA_NAME_VERSION (valnum))
5303 return avail[SSA_NAME_VERSION (valnum)];
5305 else if (is_gimple_min_invariant (valnum))
5306 return valnum;
5307 return NULL_TREE;
5310 /* At the current point of the eliminate domwalk make OP available. */
5312 void
5313 eliminate_dom_walker::eliminate_push_avail (tree op)
5315 tree valnum = VN_INFO (op)->valnum;
5316 if (TREE_CODE (valnum) == SSA_NAME)
5318 if (avail.length () <= SSA_NAME_VERSION (valnum))
5319 avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1);
5320 tree pushop = op;
5321 if (avail[SSA_NAME_VERSION (valnum)])
5322 pushop = avail[SSA_NAME_VERSION (valnum)];
5323 avail_stack.safe_push (pushop);
5324 avail[SSA_NAME_VERSION (valnum)] = op;
5328 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
5329 the leader for the expression if insertion was successful. */
5331 tree
5332 eliminate_dom_walker::eliminate_insert (gimple_stmt_iterator *gsi, tree val)
5334 /* We can insert a sequence with a single assignment only. */
5335 gimple_seq stmts = VN_INFO (val)->expr;
5336 if (!gimple_seq_singleton_p (stmts))
5337 return NULL_TREE;
5338 gassign *stmt = dyn_cast <gassign *> (gimple_seq_first_stmt (stmts));
5339 if (!stmt
5340 || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
5341 && gimple_assign_rhs_code (stmt) != VIEW_CONVERT_EXPR
5342 && gimple_assign_rhs_code (stmt) != BIT_FIELD_REF
5343 && (gimple_assign_rhs_code (stmt) != BIT_AND_EXPR
5344 || TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)))
5345 return NULL_TREE;
5347 tree op = gimple_assign_rhs1 (stmt);
5348 if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR
5349 || gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5350 op = TREE_OPERAND (op, 0);
5351 tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (op) : op;
5352 if (!leader)
5353 return NULL_TREE;
5355 tree res;
5356 stmts = NULL;
5357 if (gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5358 res = gimple_build (&stmts, BIT_FIELD_REF,
5359 TREE_TYPE (val), leader,
5360 TREE_OPERAND (gimple_assign_rhs1 (stmt), 1),
5361 TREE_OPERAND (gimple_assign_rhs1 (stmt), 2));
5362 else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
5363 res = gimple_build (&stmts, BIT_AND_EXPR,
5364 TREE_TYPE (val), leader, gimple_assign_rhs2 (stmt));
5365 else
5366 res = gimple_build (&stmts, gimple_assign_rhs_code (stmt),
5367 TREE_TYPE (val), leader);
5368 if (TREE_CODE (res) != SSA_NAME
5369 || SSA_NAME_IS_DEFAULT_DEF (res)
5370 || gimple_bb (SSA_NAME_DEF_STMT (res)))
5372 gimple_seq_discard (stmts);
5374 /* During propagation we have to treat SSA info conservatively
5375 and thus we can end up simplifying the inserted expression
5376 at elimination time to sth not defined in stmts. */
5377 /* But then this is a redundancy we failed to detect. Which means
5378 res now has two values. That doesn't play well with how
5379 we track availability here, so give up. */
5380 if (dump_file && (dump_flags & TDF_DETAILS))
5382 if (TREE_CODE (res) == SSA_NAME)
5383 res = eliminate_avail (res);
5384 if (res)
5386 fprintf (dump_file, "Failed to insert expression for value ");
5387 print_generic_expr (dump_file, val);
5388 fprintf (dump_file, " which is really fully redundant to ");
5389 print_generic_expr (dump_file, res);
5390 fprintf (dump_file, "\n");
5394 return NULL_TREE;
5396 else
5398 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
5399 VN_INFO_GET (res)->valnum = val;
5402 insertions++;
5403 if (dump_file && (dump_flags & TDF_DETAILS))
5405 fprintf (dump_file, "Inserted ");
5406 print_gimple_stmt (dump_file, SSA_NAME_DEF_STMT (res), 0);
5409 return res;
5414 /* Perform elimination for the basic-block B during the domwalk. */
5416 edge
5417 eliminate_dom_walker::before_dom_children (basic_block b)
5419 /* Mark new bb. */
5420 avail_stack.safe_push (NULL_TREE);
5422 /* Skip unreachable blocks marked unreachable during the SCCVN domwalk. */
5423 edge_iterator ei;
5424 edge e;
5425 FOR_EACH_EDGE (e, ei, b->preds)
5426 if (e->flags & EDGE_EXECUTABLE)
5427 break;
5428 if (! e)
5429 return NULL;
5431 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
5433 gphi *phi = gsi.phi ();
5434 tree res = PHI_RESULT (phi);
5436 if (virtual_operand_p (res))
5438 gsi_next (&gsi);
5439 continue;
5442 tree sprime = eliminate_avail (res);
5443 if (sprime
5444 && sprime != res)
5446 if (dump_file && (dump_flags & TDF_DETAILS))
5448 fprintf (dump_file, "Replaced redundant PHI node defining ");
5449 print_generic_expr (dump_file, res);
5450 fprintf (dump_file, " with ");
5451 print_generic_expr (dump_file, sprime);
5452 fprintf (dump_file, "\n");
5455 /* If we inserted this PHI node ourself, it's not an elimination. */
5456 if (! inserted_exprs
5457 || ! bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
5458 eliminations++;
5460 /* If we will propagate into all uses don't bother to do
5461 anything. */
5462 if (may_propagate_copy (res, sprime))
5464 /* Mark the PHI for removal. */
5465 to_remove.safe_push (phi);
5466 gsi_next (&gsi);
5467 continue;
5470 remove_phi_node (&gsi, false);
5472 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
5473 sprime = fold_convert (TREE_TYPE (res), sprime);
5474 gimple *stmt = gimple_build_assign (res, sprime);
5475 gimple_stmt_iterator gsi2 = gsi_after_labels (b);
5476 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
5477 continue;
5480 eliminate_push_avail (res);
5481 gsi_next (&gsi);
5484 for (gimple_stmt_iterator gsi = gsi_start_bb (b);
5485 !gsi_end_p (gsi);
5486 gsi_next (&gsi))
5488 tree sprime = NULL_TREE;
5489 gimple *stmt = gsi_stmt (gsi);
5490 tree lhs = gimple_get_lhs (stmt);
5491 if (lhs && TREE_CODE (lhs) == SSA_NAME
5492 && !gimple_has_volatile_ops (stmt)
5493 /* See PR43491. Do not replace a global register variable when
5494 it is a the RHS of an assignment. Do replace local register
5495 variables since gcc does not guarantee a local variable will
5496 be allocated in register.
5497 ??? The fix isn't effective here. This should instead
5498 be ensured by not value-numbering them the same but treating
5499 them like volatiles? */
5500 && !(gimple_assign_single_p (stmt)
5501 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
5502 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))
5503 && is_global_var (gimple_assign_rhs1 (stmt)))))
5505 sprime = eliminate_avail (lhs);
5506 if (!sprime)
5508 /* If there is no existing usable leader but SCCVN thinks
5509 it has an expression it wants to use as replacement,
5510 insert that. */
5511 tree val = VN_INFO (lhs)->valnum;
5512 if (val != VN_TOP
5513 && TREE_CODE (val) == SSA_NAME
5514 && VN_INFO (val)->needs_insertion
5515 && VN_INFO (val)->expr != NULL
5516 && (sprime = eliminate_insert (&gsi, val)) != NULL_TREE)
5517 eliminate_push_avail (sprime);
5520 /* If this now constitutes a copy duplicate points-to
5521 and range info appropriately. This is especially
5522 important for inserted code. See tree-ssa-copy.c
5523 for similar code. */
5524 if (sprime
5525 && TREE_CODE (sprime) == SSA_NAME)
5527 basic_block sprime_b = gimple_bb (SSA_NAME_DEF_STMT (sprime));
5528 if (POINTER_TYPE_P (TREE_TYPE (lhs))
5529 && VN_INFO_PTR_INFO (lhs)
5530 && ! VN_INFO_PTR_INFO (sprime))
5532 duplicate_ssa_name_ptr_info (sprime,
5533 VN_INFO_PTR_INFO (lhs));
5534 if (b != sprime_b)
5535 mark_ptr_info_alignment_unknown
5536 (SSA_NAME_PTR_INFO (sprime));
5538 else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
5539 && VN_INFO_RANGE_INFO (lhs)
5540 && ! VN_INFO_RANGE_INFO (sprime)
5541 && b == sprime_b)
5542 duplicate_ssa_name_range_info (sprime,
5543 VN_INFO_RANGE_TYPE (lhs),
5544 VN_INFO_RANGE_INFO (lhs));
5547 /* Inhibit the use of an inserted PHI on a loop header when
5548 the address of the memory reference is a simple induction
5549 variable. In other cases the vectorizer won't do anything
5550 anyway (either it's loop invariant or a complicated
5551 expression). */
5552 if (sprime
5553 && TREE_CODE (sprime) == SSA_NAME
5554 && do_pre
5555 && (flag_tree_loop_vectorize || flag_tree_parallelize_loops > 1)
5556 && loop_outer (b->loop_father)
5557 && has_zero_uses (sprime)
5558 && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
5559 && gimple_assign_load_p (stmt))
5561 gimple *def_stmt = SSA_NAME_DEF_STMT (sprime);
5562 basic_block def_bb = gimple_bb (def_stmt);
5563 if (gimple_code (def_stmt) == GIMPLE_PHI
5564 && def_bb->loop_father->header == def_bb)
5566 loop_p loop = def_bb->loop_father;
5567 ssa_op_iter iter;
5568 tree op;
5569 bool found = false;
5570 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5572 affine_iv iv;
5573 def_bb = gimple_bb (SSA_NAME_DEF_STMT (op));
5574 if (def_bb
5575 && flow_bb_inside_loop_p (loop, def_bb)
5576 && simple_iv (loop, loop, op, &iv, true))
5578 found = true;
5579 break;
5582 if (found)
5584 if (dump_file && (dump_flags & TDF_DETAILS))
5586 fprintf (dump_file, "Not replacing ");
5587 print_gimple_expr (dump_file, stmt, 0);
5588 fprintf (dump_file, " with ");
5589 print_generic_expr (dump_file, sprime);
5590 fprintf (dump_file, " which would add a loop"
5591 " carried dependence to loop %d\n",
5592 loop->num);
5594 /* Don't keep sprime available. */
5595 sprime = NULL_TREE;
5600 if (sprime)
5602 /* If we can propagate the value computed for LHS into
5603 all uses don't bother doing anything with this stmt. */
5604 if (may_propagate_copy (lhs, sprime))
5606 /* Mark it for removal. */
5607 to_remove.safe_push (stmt);
5609 /* ??? Don't count copy/constant propagations. */
5610 if (gimple_assign_single_p (stmt)
5611 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5612 || gimple_assign_rhs1 (stmt) == sprime))
5613 continue;
5615 if (dump_file && (dump_flags & TDF_DETAILS))
5617 fprintf (dump_file, "Replaced ");
5618 print_gimple_expr (dump_file, stmt, 0);
5619 fprintf (dump_file, " with ");
5620 print_generic_expr (dump_file, sprime);
5621 fprintf (dump_file, " in all uses of ");
5622 print_gimple_stmt (dump_file, stmt, 0);
5625 eliminations++;
5626 continue;
5629 /* If this is an assignment from our leader (which
5630 happens in the case the value-number is a constant)
5631 then there is nothing to do. */
5632 if (gimple_assign_single_p (stmt)
5633 && sprime == gimple_assign_rhs1 (stmt))
5634 continue;
5636 /* Else replace its RHS. */
5637 bool can_make_abnormal_goto
5638 = is_gimple_call (stmt)
5639 && stmt_can_make_abnormal_goto (stmt);
5641 if (dump_file && (dump_flags & TDF_DETAILS))
5643 fprintf (dump_file, "Replaced ");
5644 print_gimple_expr (dump_file, stmt, 0);
5645 fprintf (dump_file, " with ");
5646 print_generic_expr (dump_file, sprime);
5647 fprintf (dump_file, " in ");
5648 print_gimple_stmt (dump_file, stmt, 0);
5651 eliminations++;
5652 gimple *orig_stmt = stmt;
5653 if (!useless_type_conversion_p (TREE_TYPE (lhs),
5654 TREE_TYPE (sprime)))
5655 sprime = fold_convert (TREE_TYPE (lhs), sprime);
5656 tree vdef = gimple_vdef (stmt);
5657 tree vuse = gimple_vuse (stmt);
5658 propagate_tree_value_into_stmt (&gsi, sprime);
5659 stmt = gsi_stmt (gsi);
5660 update_stmt (stmt);
5661 if (vdef != gimple_vdef (stmt))
5662 VN_INFO (vdef)->valnum = vuse;
5664 /* If we removed EH side-effects from the statement, clean
5665 its EH information. */
5666 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
5668 bitmap_set_bit (need_eh_cleanup,
5669 gimple_bb (stmt)->index);
5670 if (dump_file && (dump_flags & TDF_DETAILS))
5671 fprintf (dump_file, " Removed EH side-effects.\n");
5674 /* Likewise for AB side-effects. */
5675 if (can_make_abnormal_goto
5676 && !stmt_can_make_abnormal_goto (stmt))
5678 bitmap_set_bit (need_ab_cleanup,
5679 gimple_bb (stmt)->index);
5680 if (dump_file && (dump_flags & TDF_DETAILS))
5681 fprintf (dump_file, " Removed AB side-effects.\n");
5684 continue;
5688 /* If the statement is a scalar store, see if the expression
5689 has the same value number as its rhs. If so, the store is
5690 dead. */
5691 if (gimple_assign_single_p (stmt)
5692 && !gimple_has_volatile_ops (stmt)
5693 && !is_gimple_reg (gimple_assign_lhs (stmt))
5694 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5695 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
5697 tree val;
5698 tree rhs = gimple_assign_rhs1 (stmt);
5699 vn_reference_t vnresult;
5700 val = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_WALKREWRITE,
5701 &vnresult, false);
5702 if (TREE_CODE (rhs) == SSA_NAME)
5703 rhs = VN_INFO (rhs)->valnum;
5704 if (val
5705 && operand_equal_p (val, rhs, 0))
5707 /* We can only remove the later store if the former aliases
5708 at least all accesses the later one does or if the store
5709 was to readonly memory storing the same value. */
5710 alias_set_type set = get_alias_set (lhs);
5711 if (! vnresult
5712 || vnresult->set == set
5713 || alias_set_subset_of (set, vnresult->set))
5715 if (dump_file && (dump_flags & TDF_DETAILS))
5717 fprintf (dump_file, "Deleted redundant store ");
5718 print_gimple_stmt (dump_file, stmt, 0);
5721 /* Queue stmt for removal. */
5722 to_remove.safe_push (stmt);
5723 continue;
5728 /* If this is a control statement value numbering left edges
5729 unexecuted on force the condition in a way consistent with
5730 that. */
5731 if (gcond *cond = dyn_cast <gcond *> (stmt))
5733 if ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE)
5734 ^ (EDGE_SUCC (b, 1)->flags & EDGE_EXECUTABLE))
5736 if (dump_file && (dump_flags & TDF_DETAILS))
5738 fprintf (dump_file, "Removing unexecutable edge from ");
5739 print_gimple_stmt (dump_file, stmt, 0);
5741 if (((EDGE_SUCC (b, 0)->flags & EDGE_TRUE_VALUE) != 0)
5742 == ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE) != 0))
5743 gimple_cond_make_true (cond);
5744 else
5745 gimple_cond_make_false (cond);
5746 update_stmt (cond);
5747 el_todo |= TODO_cleanup_cfg;
5748 continue;
5752 bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
5753 bool was_noreturn = (is_gimple_call (stmt)
5754 && gimple_call_noreturn_p (stmt));
5755 tree vdef = gimple_vdef (stmt);
5756 tree vuse = gimple_vuse (stmt);
5758 /* If we didn't replace the whole stmt (or propagate the result
5759 into all uses), replace all uses on this stmt with their
5760 leaders. */
5761 bool modified = false;
5762 use_operand_p use_p;
5763 ssa_op_iter iter;
5764 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5766 tree use = USE_FROM_PTR (use_p);
5767 /* ??? The call code above leaves stmt operands un-updated. */
5768 if (TREE_CODE (use) != SSA_NAME)
5769 continue;
5770 tree sprime = eliminate_avail (use);
5771 if (sprime && sprime != use
5772 && may_propagate_copy (use, sprime)
5773 /* We substitute into debug stmts to avoid excessive
5774 debug temporaries created by removed stmts, but we need
5775 to avoid doing so for inserted sprimes as we never want
5776 to create debug temporaries for them. */
5777 && (!inserted_exprs
5778 || TREE_CODE (sprime) != SSA_NAME
5779 || !is_gimple_debug (stmt)
5780 || !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
5782 propagate_value (use_p, sprime);
5783 modified = true;
5787 /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
5788 into which is a requirement for the IPA devirt machinery. */
5789 gimple *old_stmt = stmt;
5790 if (modified)
5792 /* If a formerly non-invariant ADDR_EXPR is turned into an
5793 invariant one it was on a separate stmt. */
5794 if (gimple_assign_single_p (stmt)
5795 && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
5796 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
5797 gimple_stmt_iterator prev = gsi;
5798 gsi_prev (&prev);
5799 if (fold_stmt (&gsi))
5801 /* fold_stmt may have created new stmts inbetween
5802 the previous stmt and the folded stmt. Mark
5803 all defs created there as varying to not confuse
5804 the SCCVN machinery as we're using that even during
5805 elimination. */
5806 if (gsi_end_p (prev))
5807 prev = gsi_start_bb (b);
5808 else
5809 gsi_next (&prev);
5810 if (gsi_stmt (prev) != gsi_stmt (gsi))
5813 tree def;
5814 ssa_op_iter dit;
5815 FOR_EACH_SSA_TREE_OPERAND (def, gsi_stmt (prev),
5816 dit, SSA_OP_ALL_DEFS)
5817 /* As existing DEFs may move between stmts
5818 we have to guard VN_INFO_GET. */
5819 if (! has_VN_INFO (def))
5820 VN_INFO_GET (def)->valnum = def;
5821 if (gsi_stmt (prev) == gsi_stmt (gsi))
5822 break;
5823 gsi_next (&prev);
5825 while (1);
5827 stmt = gsi_stmt (gsi);
5828 /* In case we folded the stmt away schedule the NOP for removal. */
5829 if (gimple_nop_p (stmt))
5830 to_remove.safe_push (stmt);
5833 /* Visit indirect calls and turn them into direct calls if
5834 possible using the devirtualization machinery. Do this before
5835 checking for required EH/abnormal/noreturn cleanup as devird
5836 may expose more of those. */
5837 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
5839 tree fn = gimple_call_fn (call_stmt);
5840 if (fn
5841 && flag_devirtualize
5842 && virtual_method_call_p (fn))
5844 tree otr_type = obj_type_ref_class (fn);
5845 unsigned HOST_WIDE_INT otr_tok
5846 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn));
5847 tree instance;
5848 ipa_polymorphic_call_context context (current_function_decl,
5849 fn, stmt, &instance);
5850 context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn),
5851 otr_type, stmt);
5852 bool final;
5853 vec <cgraph_node *> targets
5854 = possible_polymorphic_call_targets (obj_type_ref_class (fn),
5855 otr_tok, context, &final);
5856 if (dump_file)
5857 dump_possible_polymorphic_call_targets (dump_file,
5858 obj_type_ref_class (fn),
5859 otr_tok, context);
5860 if (final && targets.length () <= 1 && dbg_cnt (devirt))
5862 tree fn;
5863 if (targets.length () == 1)
5864 fn = targets[0]->decl;
5865 else
5866 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5867 if (dump_enabled_p ())
5869 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
5870 "converting indirect call to "
5871 "function %s\n",
5872 lang_hooks.decl_printable_name (fn, 2));
5874 gimple_call_set_fndecl (call_stmt, fn);
5875 /* If changing the call to __builtin_unreachable
5876 or similar noreturn function, adjust gimple_call_fntype
5877 too. */
5878 if (gimple_call_noreturn_p (call_stmt)
5879 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
5880 && TYPE_ARG_TYPES (TREE_TYPE (fn))
5881 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn)))
5882 == void_type_node))
5883 gimple_call_set_fntype (call_stmt, TREE_TYPE (fn));
5884 maybe_remove_unused_call_args (cfun, call_stmt);
5885 modified = true;
5890 if (modified)
5892 /* When changing a call into a noreturn call, cfg cleanup
5893 is needed to fix up the noreturn call. */
5894 if (!was_noreturn
5895 && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
5896 to_fixup.safe_push (stmt);
5897 /* When changing a condition or switch into one we know what
5898 edge will be executed, schedule a cfg cleanup. */
5899 if ((gimple_code (stmt) == GIMPLE_COND
5900 && (gimple_cond_true_p (as_a <gcond *> (stmt))
5901 || gimple_cond_false_p (as_a <gcond *> (stmt))))
5902 || (gimple_code (stmt) == GIMPLE_SWITCH
5903 && TREE_CODE (gimple_switch_index
5904 (as_a <gswitch *> (stmt))) == INTEGER_CST))
5905 el_todo |= TODO_cleanup_cfg;
5906 /* If we removed EH side-effects from the statement, clean
5907 its EH information. */
5908 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
5910 bitmap_set_bit (need_eh_cleanup,
5911 gimple_bb (stmt)->index);
5912 if (dump_file && (dump_flags & TDF_DETAILS))
5913 fprintf (dump_file, " Removed EH side-effects.\n");
5915 /* Likewise for AB side-effects. */
5916 if (can_make_abnormal_goto
5917 && !stmt_can_make_abnormal_goto (stmt))
5919 bitmap_set_bit (need_ab_cleanup,
5920 gimple_bb (stmt)->index);
5921 if (dump_file && (dump_flags & TDF_DETAILS))
5922 fprintf (dump_file, " Removed AB side-effects.\n");
5924 update_stmt (stmt);
5925 if (vdef != gimple_vdef (stmt))
5926 VN_INFO (vdef)->valnum = vuse;
5929 /* Make new values available - for fully redundant LHS we
5930 continue with the next stmt above and skip this. */
5931 def_operand_p defp;
5932 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
5933 eliminate_push_avail (DEF_FROM_PTR (defp));
5936 /* Replace destination PHI arguments. */
5937 FOR_EACH_EDGE (e, ei, b->succs)
5938 if (e->flags & EDGE_EXECUTABLE)
5939 for (gphi_iterator gsi = gsi_start_phis (e->dest);
5940 !gsi_end_p (gsi);
5941 gsi_next (&gsi))
5943 gphi *phi = gsi.phi ();
5944 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
5945 tree arg = USE_FROM_PTR (use_p);
5946 if (TREE_CODE (arg) != SSA_NAME
5947 || virtual_operand_p (arg))
5948 continue;
5949 tree sprime = eliminate_avail (arg);
5950 if (sprime && may_propagate_copy (arg, sprime))
5951 propagate_value (use_p, sprime);
5953 return NULL;
5956 /* Make no longer available leaders no longer available. */
5958 void
5959 eliminate_dom_walker::after_dom_children (basic_block)
5961 tree entry;
5962 while ((entry = avail_stack.pop ()) != NULL_TREE)
5964 tree valnum = VN_INFO (entry)->valnum;
5965 tree old = avail[SSA_NAME_VERSION (valnum)];
5966 if (old == entry)
5967 avail[SSA_NAME_VERSION (valnum)] = NULL_TREE;
5968 else
5969 avail[SSA_NAME_VERSION (valnum)] = entry;
5973 /* Eliminate fully redundant computations. */
5975 unsigned int
5976 vn_eliminate (bitmap inserted_exprs)
5978 eliminate_dom_walker el (CDI_DOMINATORS, inserted_exprs);
5979 el.avail.reserve (num_ssa_names);
5981 el.walk (cfun->cfg->x_entry_block_ptr);
5983 /* We cannot remove stmts during BB walk, especially not release SSA
5984 names there as this confuses the VN machinery. The stmts ending
5985 up in to_remove are either stores or simple copies.
5986 Remove stmts in reverse order to make debug stmt creation possible. */
5987 while (!el.to_remove.is_empty ())
5989 gimple *stmt = el.to_remove.pop ();
5991 if (dump_file && (dump_flags & TDF_DETAILS))
5993 fprintf (dump_file, "Removing dead stmt ");
5994 print_gimple_stmt (dump_file, stmt, 0, TDF_NONE);
5997 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5998 if (gimple_code (stmt) == GIMPLE_PHI)
5999 remove_phi_node (&gsi, true);
6000 else
6002 basic_block bb = gimple_bb (stmt);
6003 unlink_stmt_vdef (stmt);
6004 if (gsi_remove (&gsi, true))
6005 bitmap_set_bit (el.need_eh_cleanup, bb->index);
6006 if (is_gimple_call (stmt) && stmt_can_make_abnormal_goto (stmt))
6007 bitmap_set_bit (el.need_ab_cleanup, bb->index);
6008 release_defs (stmt);
6011 /* Removing a stmt may expose a forwarder block. */
6012 el.el_todo |= TODO_cleanup_cfg;
6015 /* Fixup stmts that became noreturn calls. This may require splitting
6016 blocks and thus isn't possible during the dominator walk. Do this
6017 in reverse order so we don't inadvertedly remove a stmt we want to
6018 fixup by visiting a dominating now noreturn call first. */
6019 while (!el.to_fixup.is_empty ())
6021 gimple *stmt = el.to_fixup.pop ();
6023 if (dump_file && (dump_flags & TDF_DETAILS))
6025 fprintf (dump_file, "Fixing up noreturn call ");
6026 print_gimple_stmt (dump_file, stmt, 0);
6029 if (fixup_noreturn_call (stmt))
6030 el.el_todo |= TODO_cleanup_cfg;
6033 bool do_eh_cleanup = !bitmap_empty_p (el.need_eh_cleanup);
6034 bool do_ab_cleanup = !bitmap_empty_p (el.need_ab_cleanup);
6036 if (do_eh_cleanup)
6037 gimple_purge_all_dead_eh_edges (el.need_eh_cleanup);
6039 if (do_ab_cleanup)
6040 gimple_purge_all_dead_abnormal_call_edges (el.need_ab_cleanup);
6042 if (do_eh_cleanup || do_ab_cleanup)
6043 el.el_todo |= TODO_cleanup_cfg;
6045 statistics_counter_event (cfun, "Eliminated", el.eliminations);
6046 statistics_counter_event (cfun, "Insertions", el.insertions);
6048 return el.el_todo;
6052 namespace {
6054 const pass_data pass_data_fre =
6056 GIMPLE_PASS, /* type */
6057 "fre", /* name */
6058 OPTGROUP_NONE, /* optinfo_flags */
6059 TV_TREE_FRE, /* tv_id */
6060 ( PROP_cfg | PROP_ssa ), /* properties_required */
6061 0, /* properties_provided */
6062 0, /* properties_destroyed */
6063 0, /* todo_flags_start */
6064 0, /* todo_flags_finish */
6067 class pass_fre : public gimple_opt_pass
6069 public:
6070 pass_fre (gcc::context *ctxt)
6071 : gimple_opt_pass (pass_data_fre, ctxt)
6074 /* opt_pass methods: */
6075 opt_pass * clone () { return new pass_fre (m_ctxt); }
6076 virtual bool gate (function *) { return flag_tree_fre != 0; }
6077 virtual unsigned int execute (function *);
6079 }; // class pass_fre
6081 unsigned int
6082 pass_fre::execute (function *)
6084 unsigned int todo = 0;
6086 run_scc_vn (VN_WALKREWRITE);
6088 /* Remove all the redundant expressions. */
6089 todo |= vn_eliminate (NULL);
6091 scc_vn_restore_ssa_info ();
6092 free_scc_vn ();
6094 return todo;
6097 } // anon namespace
6099 gimple_opt_pass *
6100 make_pass_fre (gcc::context *ctxt)
6102 return new pass_fre (ctxt);