PR tree-optimization/86415 - strlen() not folded for substrings within constant arrays
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blob1e16e13cfa16d909c88cd2e0d703cff2c6d6797b
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2018 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "insn-config.h"
32 #include "memmodel.h"
33 #include "emit-rtl.h"
34 #include "cgraph.h"
35 #include "gimple-pretty-print.h"
36 #include "alias.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "cfganal.h"
40 #include "tree-inline.h"
41 #include "internal-fn.h"
42 #include "gimple-fold.h"
43 #include "tree-eh.h"
44 #include "gimplify.h"
45 #include "flags.h"
46 #include "dojump.h"
47 #include "explow.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "stmt.h"
51 #include "expr.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "dumpfile.h"
55 #include "cfgloop.h"
56 #include "params.h"
57 #include "tree-ssa-propagate.h"
58 #include "tree-cfg.h"
59 #include "domwalk.h"
60 #include "gimple-iterator.h"
61 #include "gimple-match.h"
62 #include "stringpool.h"
63 #include "attribs.h"
64 #include "tree-pass.h"
65 #include "statistics.h"
66 #include "langhooks.h"
67 #include "ipa-utils.h"
68 #include "dbgcnt.h"
69 #include "tree-cfgcleanup.h"
70 #include "tree-ssa-loop.h"
71 #include "tree-scalar-evolution.h"
72 #include "tree-ssa-sccvn.h"
74 /* This algorithm is based on the SCC algorithm presented by Keith
75 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
76 (http://citeseer.ist.psu.edu/41805.html). In
77 straight line code, it is equivalent to a regular hash based value
78 numbering that is performed in reverse postorder.
80 For code with cycles, there are two alternatives, both of which
81 require keeping the hashtables separate from the actual list of
82 value numbers for SSA names.
84 1. Iterate value numbering in an RPO walk of the blocks, removing
85 all the entries from the hashtable after each iteration (but
86 keeping the SSA name->value number mapping between iterations).
87 Iterate until it does not change.
89 2. Perform value numbering as part of an SCC walk on the SSA graph,
90 iterating only the cycles in the SSA graph until they do not change
91 (using a separate, optimistic hashtable for value numbering the SCC
92 operands).
94 The second is not just faster in practice (because most SSA graph
95 cycles do not involve all the variables in the graph), it also has
96 some nice properties.
98 One of these nice properties is that when we pop an SCC off the
99 stack, we are guaranteed to have processed all the operands coming from
100 *outside of that SCC*, so we do not need to do anything special to
101 ensure they have value numbers.
103 Another nice property is that the SCC walk is done as part of a DFS
104 of the SSA graph, which makes it easy to perform combining and
105 simplifying operations at the same time.
107 The code below is deliberately written in a way that makes it easy
108 to separate the SCC walk from the other work it does.
110 In order to propagate constants through the code, we track which
111 expressions contain constants, and use those while folding. In
112 theory, we could also track expressions whose value numbers are
113 replaced, in case we end up folding based on expression
114 identities.
116 In order to value number memory, we assign value numbers to vuses.
117 This enables us to note that, for example, stores to the same
118 address of the same value from the same starting memory states are
119 equivalent.
120 TODO:
122 1. We can iterate only the changing portions of the SCC's, but
123 I have not seen an SCC big enough for this to be a win.
124 2. If you differentiate between phi nodes for loops and phi nodes
125 for if-then-else, you can properly consider phi nodes in different
126 blocks for equivalence.
127 3. We could value number vuses in more cases, particularly, whole
128 structure copies.
132 static tree *last_vuse_ptr;
133 static vn_lookup_kind vn_walk_kind;
134 static vn_lookup_kind default_vn_walk_kind;
135 bitmap const_parms;
137 /* vn_nary_op hashtable helpers. */
139 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
141 typedef vn_nary_op_s *compare_type;
142 static inline hashval_t hash (const vn_nary_op_s *);
143 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
146 /* Return the computed hashcode for nary operation P1. */
148 inline hashval_t
149 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
151 return vno1->hashcode;
154 /* Compare nary operations P1 and P2 and return true if they are
155 equivalent. */
157 inline bool
158 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
160 return vn_nary_op_eq (vno1, vno2);
163 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
164 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
167 /* vn_phi hashtable helpers. */
169 static int
170 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
172 struct vn_phi_hasher : pointer_hash <vn_phi_s>
174 static inline hashval_t hash (const vn_phi_s *);
175 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
176 static inline void remove (vn_phi_s *);
179 /* Return the computed hashcode for phi operation P1. */
181 inline hashval_t
182 vn_phi_hasher::hash (const vn_phi_s *vp1)
184 return vp1->hashcode;
187 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
189 inline bool
190 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
192 return vn_phi_eq (vp1, vp2);
195 /* Free a phi operation structure VP. */
197 inline void
198 vn_phi_hasher::remove (vn_phi_s *phi)
200 phi->phiargs.release ();
203 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
204 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
207 /* Compare two reference operands P1 and P2 for equality. Return true if
208 they are equal, and false otherwise. */
210 static int
211 vn_reference_op_eq (const void *p1, const void *p2)
213 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
214 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
216 return (vro1->opcode == vro2->opcode
217 /* We do not care for differences in type qualification. */
218 && (vro1->type == vro2->type
219 || (vro1->type && vro2->type
220 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
221 TYPE_MAIN_VARIANT (vro2->type))))
222 && expressions_equal_p (vro1->op0, vro2->op0)
223 && expressions_equal_p (vro1->op1, vro2->op1)
224 && expressions_equal_p (vro1->op2, vro2->op2));
227 /* Free a reference operation structure VP. */
229 static inline void
230 free_reference (vn_reference_s *vr)
232 vr->operands.release ();
236 /* vn_reference hashtable helpers. */
238 struct vn_reference_hasher : pointer_hash <vn_reference_s>
240 static inline hashval_t hash (const vn_reference_s *);
241 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
242 static inline void remove (vn_reference_s *);
245 /* Return the hashcode for a given reference operation P1. */
247 inline hashval_t
248 vn_reference_hasher::hash (const vn_reference_s *vr1)
250 return vr1->hashcode;
253 inline bool
254 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
256 return vn_reference_eq (v, c);
259 inline void
260 vn_reference_hasher::remove (vn_reference_s *v)
262 free_reference (v);
265 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
266 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
269 /* The set of hashtables and alloc_pool's for their items. */
271 typedef struct vn_tables_s
273 vn_nary_op_table_type *nary;
274 vn_phi_table_type *phis;
275 vn_reference_table_type *references;
276 struct obstack nary_obstack;
277 object_allocator<vn_phi_s> *phis_pool;
278 object_allocator<vn_reference_s> *references_pool;
279 } *vn_tables_t;
282 /* vn_constant hashtable helpers. */
284 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
286 static inline hashval_t hash (const vn_constant_s *);
287 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
290 /* Hash table hash function for vn_constant_t. */
292 inline hashval_t
293 vn_constant_hasher::hash (const vn_constant_s *vc1)
295 return vc1->hashcode;
298 /* Hash table equality function for vn_constant_t. */
300 inline bool
301 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
303 if (vc1->hashcode != vc2->hashcode)
304 return false;
306 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
309 static hash_table<vn_constant_hasher> *constant_to_value_id;
310 static bitmap constant_value_ids;
313 /* Valid hashtables storing information we have proven to be
314 correct. */
316 static vn_tables_t valid_info;
318 /* Optimistic hashtables storing information we are making assumptions about
319 during iterations. */
321 static vn_tables_t optimistic_info;
323 /* Pointer to the set of hashtables that is currently being used.
324 Should always point to either the optimistic_info, or the
325 valid_info. */
327 static vn_tables_t current_info;
330 /* Reverse post order index for each basic block. */
332 static int *rpo_numbers;
334 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
336 /* Return the SSA value of the VUSE x, supporting released VDEFs
337 during elimination which will value-number the VDEF to the
338 associated VUSE (but not substitute in the whole lattice). */
340 static inline tree
341 vuse_ssa_val (tree x)
343 if (!x)
344 return NULL_TREE;
348 tree tem = SSA_VAL (x);
349 /* stmt walking can walk over a backedge and reach code we didn't
350 value-number yet. */
351 if (tem == VN_TOP)
352 return x;
353 x = tem;
355 while (SSA_NAME_IN_FREE_LIST (x));
357 return x;
360 /* This represents the top of the VN lattice, which is the universal
361 value. */
363 tree VN_TOP;
365 /* Unique counter for our value ids. */
367 static unsigned int next_value_id;
369 /* Next DFS number and the stack for strongly connected component
370 detection. */
372 static unsigned int next_dfs_num;
373 static vec<tree> sccstack;
377 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
378 are allocated on an obstack for locality reasons, and to free them
379 without looping over the vec. */
381 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
382 static struct obstack vn_ssa_aux_obstack;
384 /* Return whether there is value numbering information for a given SSA name. */
386 bool
387 has_VN_INFO (tree name)
389 if (SSA_NAME_VERSION (name) < vn_ssa_aux_table.length ())
390 return vn_ssa_aux_table[SSA_NAME_VERSION (name)] != NULL;
391 return false;
394 /* Return the value numbering information for a given SSA name. */
396 vn_ssa_aux_t
397 VN_INFO (tree name)
399 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
400 gcc_checking_assert (res);
401 return res;
404 /* Set the value numbering info for a given SSA name to a given
405 value. */
407 static inline void
408 VN_INFO_SET (tree name, vn_ssa_aux_t value)
410 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
413 /* Initialize the value numbering info for a given SSA name.
414 This should be called just once for every SSA name. */
416 vn_ssa_aux_t
417 VN_INFO_GET (tree name)
419 vn_ssa_aux_t newinfo;
421 gcc_assert (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ()
422 || vn_ssa_aux_table[SSA_NAME_VERSION (name)] == NULL);
423 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
424 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
425 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
426 vn_ssa_aux_table.safe_grow_cleared (SSA_NAME_VERSION (name) + 1);
427 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
428 return newinfo;
432 /* Return the vn_kind the expression computed by the stmt should be
433 associated with. */
435 enum vn_kind
436 vn_get_stmt_kind (gimple *stmt)
438 switch (gimple_code (stmt))
440 case GIMPLE_CALL:
441 return VN_REFERENCE;
442 case GIMPLE_PHI:
443 return VN_PHI;
444 case GIMPLE_ASSIGN:
446 enum tree_code code = gimple_assign_rhs_code (stmt);
447 tree rhs1 = gimple_assign_rhs1 (stmt);
448 switch (get_gimple_rhs_class (code))
450 case GIMPLE_UNARY_RHS:
451 case GIMPLE_BINARY_RHS:
452 case GIMPLE_TERNARY_RHS:
453 return VN_NARY;
454 case GIMPLE_SINGLE_RHS:
455 switch (TREE_CODE_CLASS (code))
457 case tcc_reference:
458 /* VOP-less references can go through unary case. */
459 if ((code == REALPART_EXPR
460 || code == IMAGPART_EXPR
461 || code == VIEW_CONVERT_EXPR
462 || code == BIT_FIELD_REF)
463 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
464 return VN_NARY;
466 /* Fallthrough. */
467 case tcc_declaration:
468 return VN_REFERENCE;
470 case tcc_constant:
471 return VN_CONSTANT;
473 default:
474 if (code == ADDR_EXPR)
475 return (is_gimple_min_invariant (rhs1)
476 ? VN_CONSTANT : VN_REFERENCE);
477 else if (code == CONSTRUCTOR)
478 return VN_NARY;
479 return VN_NONE;
481 default:
482 return VN_NONE;
485 default:
486 return VN_NONE;
490 /* Lookup a value id for CONSTANT and return it. If it does not
491 exist returns 0. */
493 unsigned int
494 get_constant_value_id (tree constant)
496 vn_constant_s **slot;
497 struct vn_constant_s vc;
499 vc.hashcode = vn_hash_constant_with_type (constant);
500 vc.constant = constant;
501 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
502 if (slot)
503 return (*slot)->value_id;
504 return 0;
507 /* Lookup a value id for CONSTANT, and if it does not exist, create a
508 new one and return it. If it does exist, return it. */
510 unsigned int
511 get_or_alloc_constant_value_id (tree constant)
513 vn_constant_s **slot;
514 struct vn_constant_s vc;
515 vn_constant_t vcp;
517 vc.hashcode = vn_hash_constant_with_type (constant);
518 vc.constant = constant;
519 slot = constant_to_value_id->find_slot (&vc, INSERT);
520 if (*slot)
521 return (*slot)->value_id;
523 vcp = XNEW (struct vn_constant_s);
524 vcp->hashcode = vc.hashcode;
525 vcp->constant = constant;
526 vcp->value_id = get_next_value_id ();
527 *slot = vcp;
528 bitmap_set_bit (constant_value_ids, vcp->value_id);
529 return vcp->value_id;
532 /* Return true if V is a value id for a constant. */
534 bool
535 value_id_constant_p (unsigned int v)
537 return bitmap_bit_p (constant_value_ids, v);
540 /* Compute the hash for a reference operand VRO1. */
542 static void
543 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
545 hstate.add_int (vro1->opcode);
546 if (vro1->op0)
547 inchash::add_expr (vro1->op0, hstate);
548 if (vro1->op1)
549 inchash::add_expr (vro1->op1, hstate);
550 if (vro1->op2)
551 inchash::add_expr (vro1->op2, hstate);
554 /* Compute a hash for the reference operation VR1 and return it. */
556 static hashval_t
557 vn_reference_compute_hash (const vn_reference_t vr1)
559 inchash::hash hstate;
560 hashval_t result;
561 int i;
562 vn_reference_op_t vro;
563 poly_int64 off = -1;
564 bool deref = false;
566 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
568 if (vro->opcode == MEM_REF)
569 deref = true;
570 else if (vro->opcode != ADDR_EXPR)
571 deref = false;
572 if (maybe_ne (vro->off, -1))
574 if (known_eq (off, -1))
575 off = 0;
576 off += vro->off;
578 else
580 if (maybe_ne (off, -1)
581 && maybe_ne (off, 0))
582 hstate.add_poly_int (off);
583 off = -1;
584 if (deref
585 && vro->opcode == ADDR_EXPR)
587 if (vro->op0)
589 tree op = TREE_OPERAND (vro->op0, 0);
590 hstate.add_int (TREE_CODE (op));
591 inchash::add_expr (op, hstate);
594 else
595 vn_reference_op_compute_hash (vro, hstate);
598 result = hstate.end ();
599 /* ??? We would ICE later if we hash instead of adding that in. */
600 if (vr1->vuse)
601 result += SSA_NAME_VERSION (vr1->vuse);
603 return result;
606 /* Return true if reference operations VR1 and VR2 are equivalent. This
607 means they have the same set of operands and vuses. */
609 bool
610 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
612 unsigned i, j;
614 /* Early out if this is not a hash collision. */
615 if (vr1->hashcode != vr2->hashcode)
616 return false;
618 /* The VOP needs to be the same. */
619 if (vr1->vuse != vr2->vuse)
620 return false;
622 /* If the operands are the same we are done. */
623 if (vr1->operands == vr2->operands)
624 return true;
626 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
627 return false;
629 if (INTEGRAL_TYPE_P (vr1->type)
630 && INTEGRAL_TYPE_P (vr2->type))
632 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
633 return false;
635 else if (INTEGRAL_TYPE_P (vr1->type)
636 && (TYPE_PRECISION (vr1->type)
637 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
638 return false;
639 else if (INTEGRAL_TYPE_P (vr2->type)
640 && (TYPE_PRECISION (vr2->type)
641 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
642 return false;
644 i = 0;
645 j = 0;
648 poly_int64 off1 = 0, off2 = 0;
649 vn_reference_op_t vro1, vro2;
650 vn_reference_op_s tem1, tem2;
651 bool deref1 = false, deref2 = false;
652 for (; vr1->operands.iterate (i, &vro1); i++)
654 if (vro1->opcode == MEM_REF)
655 deref1 = true;
656 /* Do not look through a storage order barrier. */
657 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
658 return false;
659 if (known_eq (vro1->off, -1))
660 break;
661 off1 += vro1->off;
663 for (; vr2->operands.iterate (j, &vro2); j++)
665 if (vro2->opcode == MEM_REF)
666 deref2 = true;
667 /* Do not look through a storage order barrier. */
668 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
669 return false;
670 if (known_eq (vro2->off, -1))
671 break;
672 off2 += vro2->off;
674 if (maybe_ne (off1, off2))
675 return false;
676 if (deref1 && vro1->opcode == ADDR_EXPR)
678 memset (&tem1, 0, sizeof (tem1));
679 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
680 tem1.type = TREE_TYPE (tem1.op0);
681 tem1.opcode = TREE_CODE (tem1.op0);
682 vro1 = &tem1;
683 deref1 = false;
685 if (deref2 && vro2->opcode == ADDR_EXPR)
687 memset (&tem2, 0, sizeof (tem2));
688 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
689 tem2.type = TREE_TYPE (tem2.op0);
690 tem2.opcode = TREE_CODE (tem2.op0);
691 vro2 = &tem2;
692 deref2 = false;
694 if (deref1 != deref2)
695 return false;
696 if (!vn_reference_op_eq (vro1, vro2))
697 return false;
698 ++j;
699 ++i;
701 while (vr1->operands.length () != i
702 || vr2->operands.length () != j);
704 return true;
707 /* Copy the operations present in load/store REF into RESULT, a vector of
708 vn_reference_op_s's. */
710 static void
711 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
713 if (TREE_CODE (ref) == TARGET_MEM_REF)
715 vn_reference_op_s temp;
717 result->reserve (3);
719 memset (&temp, 0, sizeof (temp));
720 temp.type = TREE_TYPE (ref);
721 temp.opcode = TREE_CODE (ref);
722 temp.op0 = TMR_INDEX (ref);
723 temp.op1 = TMR_STEP (ref);
724 temp.op2 = TMR_OFFSET (ref);
725 temp.off = -1;
726 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
727 temp.base = MR_DEPENDENCE_BASE (ref);
728 result->quick_push (temp);
730 memset (&temp, 0, sizeof (temp));
731 temp.type = NULL_TREE;
732 temp.opcode = ERROR_MARK;
733 temp.op0 = TMR_INDEX2 (ref);
734 temp.off = -1;
735 result->quick_push (temp);
737 memset (&temp, 0, sizeof (temp));
738 temp.type = NULL_TREE;
739 temp.opcode = TREE_CODE (TMR_BASE (ref));
740 temp.op0 = TMR_BASE (ref);
741 temp.off = -1;
742 result->quick_push (temp);
743 return;
746 /* For non-calls, store the information that makes up the address. */
747 tree orig = ref;
748 while (ref)
750 vn_reference_op_s temp;
752 memset (&temp, 0, sizeof (temp));
753 temp.type = TREE_TYPE (ref);
754 temp.opcode = TREE_CODE (ref);
755 temp.off = -1;
757 switch (temp.opcode)
759 case MODIFY_EXPR:
760 temp.op0 = TREE_OPERAND (ref, 1);
761 break;
762 case WITH_SIZE_EXPR:
763 temp.op0 = TREE_OPERAND (ref, 1);
764 temp.off = 0;
765 break;
766 case MEM_REF:
767 /* The base address gets its own vn_reference_op_s structure. */
768 temp.op0 = TREE_OPERAND (ref, 1);
769 if (!mem_ref_offset (ref).to_shwi (&temp.off))
770 temp.off = -1;
771 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
772 temp.base = MR_DEPENDENCE_BASE (ref);
773 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
774 break;
775 case BIT_FIELD_REF:
776 /* Record bits, position and storage order. */
777 temp.op0 = TREE_OPERAND (ref, 1);
778 temp.op1 = TREE_OPERAND (ref, 2);
779 if (!multiple_p (bit_field_offset (ref), BITS_PER_UNIT, &temp.off))
780 temp.off = -1;
781 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
782 break;
783 case COMPONENT_REF:
784 /* The field decl is enough to unambiguously specify the field,
785 a matching type is not necessary and a mismatching type
786 is always a spurious difference. */
787 temp.type = NULL_TREE;
788 temp.op0 = TREE_OPERAND (ref, 1);
789 temp.op1 = TREE_OPERAND (ref, 2);
791 tree this_offset = component_ref_field_offset (ref);
792 if (this_offset
793 && poly_int_tree_p (this_offset))
795 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
796 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
798 poly_offset_int off
799 = (wi::to_poly_offset (this_offset)
800 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
801 /* Probibit value-numbering zero offset components
802 of addresses the same before the pass folding
803 __builtin_object_size had a chance to run
804 (checking cfun->after_inlining does the
805 trick here). */
806 if (TREE_CODE (orig) != ADDR_EXPR
807 || maybe_ne (off, 0)
808 || cfun->after_inlining)
809 off.to_shwi (&temp.off);
813 break;
814 case ARRAY_RANGE_REF:
815 case ARRAY_REF:
817 tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
818 /* Record index as operand. */
819 temp.op0 = TREE_OPERAND (ref, 1);
820 /* Always record lower bounds and element size. */
821 temp.op1 = array_ref_low_bound (ref);
822 /* But record element size in units of the type alignment. */
823 temp.op2 = TREE_OPERAND (ref, 3);
824 temp.align = eltype->type_common.align;
825 if (! temp.op2)
826 temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
827 size_int (TYPE_ALIGN_UNIT (eltype)));
828 if (poly_int_tree_p (temp.op0)
829 && poly_int_tree_p (temp.op1)
830 && TREE_CODE (temp.op2) == INTEGER_CST)
832 poly_offset_int off = ((wi::to_poly_offset (temp.op0)
833 - wi::to_poly_offset (temp.op1))
834 * wi::to_offset (temp.op2)
835 * vn_ref_op_align_unit (&temp));
836 off.to_shwi (&temp.off);
839 break;
840 case VAR_DECL:
841 if (DECL_HARD_REGISTER (ref))
843 temp.op0 = ref;
844 break;
846 /* Fallthru. */
847 case PARM_DECL:
848 case CONST_DECL:
849 case RESULT_DECL:
850 /* Canonicalize decls to MEM[&decl] which is what we end up with
851 when valueizing MEM[ptr] with ptr = &decl. */
852 temp.opcode = MEM_REF;
853 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
854 temp.off = 0;
855 result->safe_push (temp);
856 temp.opcode = ADDR_EXPR;
857 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
858 temp.type = TREE_TYPE (temp.op0);
859 temp.off = -1;
860 break;
861 case STRING_CST:
862 case INTEGER_CST:
863 case COMPLEX_CST:
864 case VECTOR_CST:
865 case REAL_CST:
866 case FIXED_CST:
867 case CONSTRUCTOR:
868 case SSA_NAME:
869 temp.op0 = ref;
870 break;
871 case ADDR_EXPR:
872 if (is_gimple_min_invariant (ref))
874 temp.op0 = ref;
875 break;
877 break;
878 /* These are only interesting for their operands, their
879 existence, and their type. They will never be the last
880 ref in the chain of references (IE they require an
881 operand), so we don't have to put anything
882 for op* as it will be handled by the iteration */
883 case REALPART_EXPR:
884 temp.off = 0;
885 break;
886 case VIEW_CONVERT_EXPR:
887 temp.off = 0;
888 temp.reverse = storage_order_barrier_p (ref);
889 break;
890 case IMAGPART_EXPR:
891 /* This is only interesting for its constant offset. */
892 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
893 break;
894 default:
895 gcc_unreachable ();
897 result->safe_push (temp);
899 if (REFERENCE_CLASS_P (ref)
900 || TREE_CODE (ref) == MODIFY_EXPR
901 || TREE_CODE (ref) == WITH_SIZE_EXPR
902 || (TREE_CODE (ref) == ADDR_EXPR
903 && !is_gimple_min_invariant (ref)))
904 ref = TREE_OPERAND (ref, 0);
905 else
906 ref = NULL_TREE;
910 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
911 operands in *OPS, the reference alias set SET and the reference type TYPE.
912 Return true if something useful was produced. */
914 bool
915 ao_ref_init_from_vn_reference (ao_ref *ref,
916 alias_set_type set, tree type,
917 vec<vn_reference_op_s> ops)
919 vn_reference_op_t op;
920 unsigned i;
921 tree base = NULL_TREE;
922 tree *op0_p = &base;
923 poly_offset_int offset = 0;
924 poly_offset_int max_size;
925 poly_offset_int size = -1;
926 tree size_tree = NULL_TREE;
927 alias_set_type base_alias_set = -1;
929 /* First get the final access size from just the outermost expression. */
930 op = &ops[0];
931 if (op->opcode == COMPONENT_REF)
932 size_tree = DECL_SIZE (op->op0);
933 else if (op->opcode == BIT_FIELD_REF)
934 size_tree = op->op0;
935 else
937 machine_mode mode = TYPE_MODE (type);
938 if (mode == BLKmode)
939 size_tree = TYPE_SIZE (type);
940 else
941 size = GET_MODE_BITSIZE (mode);
943 if (size_tree != NULL_TREE
944 && poly_int_tree_p (size_tree))
945 size = wi::to_poly_offset (size_tree);
947 /* Initially, maxsize is the same as the accessed element size.
948 In the following it will only grow (or become -1). */
949 max_size = size;
951 /* Compute cumulative bit-offset for nested component-refs and array-refs,
952 and find the ultimate containing object. */
953 FOR_EACH_VEC_ELT (ops, i, op)
955 switch (op->opcode)
957 /* These may be in the reference ops, but we cannot do anything
958 sensible with them here. */
959 case ADDR_EXPR:
960 /* Apart from ADDR_EXPR arguments to MEM_REF. */
961 if (base != NULL_TREE
962 && TREE_CODE (base) == MEM_REF
963 && op->op0
964 && DECL_P (TREE_OPERAND (op->op0, 0)))
966 vn_reference_op_t pop = &ops[i-1];
967 base = TREE_OPERAND (op->op0, 0);
968 if (known_eq (pop->off, -1))
970 max_size = -1;
971 offset = 0;
973 else
974 offset += pop->off * BITS_PER_UNIT;
975 op0_p = NULL;
976 break;
978 /* Fallthru. */
979 case CALL_EXPR:
980 return false;
982 /* Record the base objects. */
983 case MEM_REF:
984 base_alias_set = get_deref_alias_set (op->op0);
985 *op0_p = build2 (MEM_REF, op->type,
986 NULL_TREE, op->op0);
987 MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
988 MR_DEPENDENCE_BASE (*op0_p) = op->base;
989 op0_p = &TREE_OPERAND (*op0_p, 0);
990 break;
992 case VAR_DECL:
993 case PARM_DECL:
994 case RESULT_DECL:
995 case SSA_NAME:
996 *op0_p = op->op0;
997 op0_p = NULL;
998 break;
1000 /* And now the usual component-reference style ops. */
1001 case BIT_FIELD_REF:
1002 offset += wi::to_poly_offset (op->op1);
1003 break;
1005 case COMPONENT_REF:
1007 tree field = op->op0;
1008 /* We do not have a complete COMPONENT_REF tree here so we
1009 cannot use component_ref_field_offset. Do the interesting
1010 parts manually. */
1011 tree this_offset = DECL_FIELD_OFFSET (field);
1013 if (op->op1 || !poly_int_tree_p (this_offset))
1014 max_size = -1;
1015 else
1017 poly_offset_int woffset = (wi::to_poly_offset (this_offset)
1018 << LOG2_BITS_PER_UNIT);
1019 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1020 offset += woffset;
1022 break;
1025 case ARRAY_RANGE_REF:
1026 case ARRAY_REF:
1027 /* We recorded the lower bound and the element size. */
1028 if (!poly_int_tree_p (op->op0)
1029 || !poly_int_tree_p (op->op1)
1030 || TREE_CODE (op->op2) != INTEGER_CST)
1031 max_size = -1;
1032 else
1034 poly_offset_int woffset
1035 = wi::sext (wi::to_poly_offset (op->op0)
1036 - wi::to_poly_offset (op->op1),
1037 TYPE_PRECISION (TREE_TYPE (op->op0)));
1038 woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
1039 woffset <<= LOG2_BITS_PER_UNIT;
1040 offset += woffset;
1042 break;
1044 case REALPART_EXPR:
1045 break;
1047 case IMAGPART_EXPR:
1048 offset += size;
1049 break;
1051 case VIEW_CONVERT_EXPR:
1052 break;
1054 case STRING_CST:
1055 case INTEGER_CST:
1056 case COMPLEX_CST:
1057 case VECTOR_CST:
1058 case REAL_CST:
1059 case CONSTRUCTOR:
1060 case CONST_DECL:
1061 return false;
1063 default:
1064 return false;
1068 if (base == NULL_TREE)
1069 return false;
1071 ref->ref = NULL_TREE;
1072 ref->base = base;
1073 ref->ref_alias_set = set;
1074 if (base_alias_set != -1)
1075 ref->base_alias_set = base_alias_set;
1076 else
1077 ref->base_alias_set = get_alias_set (base);
1078 /* We discount volatiles from value-numbering elsewhere. */
1079 ref->volatile_p = false;
1081 if (!size.to_shwi (&ref->size) || maybe_lt (ref->size, 0))
1083 ref->offset = 0;
1084 ref->size = -1;
1085 ref->max_size = -1;
1086 return true;
1089 if (!offset.to_shwi (&ref->offset))
1091 ref->offset = 0;
1092 ref->max_size = -1;
1093 return true;
1096 if (!max_size.to_shwi (&ref->max_size) || maybe_lt (ref->max_size, 0))
1097 ref->max_size = -1;
1099 return true;
1102 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1103 vn_reference_op_s's. */
1105 static void
1106 copy_reference_ops_from_call (gcall *call,
1107 vec<vn_reference_op_s> *result)
1109 vn_reference_op_s temp;
1110 unsigned i;
1111 tree lhs = gimple_call_lhs (call);
1112 int lr;
1114 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1115 different. By adding the lhs here in the vector, we ensure that the
1116 hashcode is different, guaranteeing a different value number. */
1117 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1119 memset (&temp, 0, sizeof (temp));
1120 temp.opcode = MODIFY_EXPR;
1121 temp.type = TREE_TYPE (lhs);
1122 temp.op0 = lhs;
1123 temp.off = -1;
1124 result->safe_push (temp);
1127 /* Copy the type, opcode, function, static chain and EH region, if any. */
1128 memset (&temp, 0, sizeof (temp));
1129 temp.type = gimple_call_return_type (call);
1130 temp.opcode = CALL_EXPR;
1131 temp.op0 = gimple_call_fn (call);
1132 temp.op1 = gimple_call_chain (call);
1133 if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1134 temp.op2 = size_int (lr);
1135 temp.off = -1;
1136 result->safe_push (temp);
1138 /* Copy the call arguments. As they can be references as well,
1139 just chain them together. */
1140 for (i = 0; i < gimple_call_num_args (call); ++i)
1142 tree callarg = gimple_call_arg (call, i);
1143 copy_reference_ops_from_ref (callarg, result);
1147 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1148 *I_P to point to the last element of the replacement. */
1149 static bool
1150 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1151 unsigned int *i_p)
1153 unsigned int i = *i_p;
1154 vn_reference_op_t op = &(*ops)[i];
1155 vn_reference_op_t mem_op = &(*ops)[i - 1];
1156 tree addr_base;
1157 poly_int64 addr_offset = 0;
1159 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1160 from .foo.bar to the preceding MEM_REF offset and replace the
1161 address with &OBJ. */
1162 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1163 &addr_offset);
1164 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1165 if (addr_base != TREE_OPERAND (op->op0, 0))
1167 poly_offset_int off
1168 = (poly_offset_int::from (wi::to_poly_wide (mem_op->op0),
1169 SIGNED)
1170 + addr_offset);
1171 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1172 op->op0 = build_fold_addr_expr (addr_base);
1173 if (tree_fits_shwi_p (mem_op->op0))
1174 mem_op->off = tree_to_shwi (mem_op->op0);
1175 else
1176 mem_op->off = -1;
1177 return true;
1179 return false;
1182 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1183 *I_P to point to the last element of the replacement. */
1184 static bool
1185 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1186 unsigned int *i_p)
1188 unsigned int i = *i_p;
1189 vn_reference_op_t op = &(*ops)[i];
1190 vn_reference_op_t mem_op = &(*ops)[i - 1];
1191 gimple *def_stmt;
1192 enum tree_code code;
1193 poly_offset_int off;
1195 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1196 if (!is_gimple_assign (def_stmt))
1197 return false;
1199 code = gimple_assign_rhs_code (def_stmt);
1200 if (code != ADDR_EXPR
1201 && code != POINTER_PLUS_EXPR)
1202 return false;
1204 off = poly_offset_int::from (wi::to_poly_wide (mem_op->op0), SIGNED);
1206 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1207 from .foo.bar to the preceding MEM_REF offset and replace the
1208 address with &OBJ. */
1209 if (code == ADDR_EXPR)
1211 tree addr, addr_base;
1212 poly_int64 addr_offset;
1214 addr = gimple_assign_rhs1 (def_stmt);
1215 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1216 &addr_offset);
1217 /* If that didn't work because the address isn't invariant propagate
1218 the reference tree from the address operation in case the current
1219 dereference isn't offsetted. */
1220 if (!addr_base
1221 && *i_p == ops->length () - 1
1222 && known_eq (off, 0)
1223 /* This makes us disable this transform for PRE where the
1224 reference ops might be also used for code insertion which
1225 is invalid. */
1226 && default_vn_walk_kind == VN_WALKREWRITE)
1228 auto_vec<vn_reference_op_s, 32> tem;
1229 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1230 /* Make sure to preserve TBAA info. The only objects not
1231 wrapped in MEM_REFs that can have their address taken are
1232 STRING_CSTs. */
1233 if (tem.length () >= 2
1234 && tem[tem.length () - 2].opcode == MEM_REF)
1236 vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1237 new_mem_op->op0
1238 = wide_int_to_tree (TREE_TYPE (mem_op->op0),
1239 wi::to_poly_wide (new_mem_op->op0));
1241 else
1242 gcc_assert (tem.last ().opcode == STRING_CST);
1243 ops->pop ();
1244 ops->pop ();
1245 ops->safe_splice (tem);
1246 --*i_p;
1247 return true;
1249 if (!addr_base
1250 || TREE_CODE (addr_base) != MEM_REF
1251 || (TREE_CODE (TREE_OPERAND (addr_base, 0)) == SSA_NAME
1252 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (addr_base, 0))))
1253 return false;
1255 off += addr_offset;
1256 off += mem_ref_offset (addr_base);
1257 op->op0 = TREE_OPERAND (addr_base, 0);
1259 else
1261 tree ptr, ptroff;
1262 ptr = gimple_assign_rhs1 (def_stmt);
1263 ptroff = gimple_assign_rhs2 (def_stmt);
1264 if (TREE_CODE (ptr) != SSA_NAME
1265 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1266 || !poly_int_tree_p (ptroff))
1267 return false;
1269 off += wi::to_poly_offset (ptroff);
1270 op->op0 = ptr;
1273 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1274 if (tree_fits_shwi_p (mem_op->op0))
1275 mem_op->off = tree_to_shwi (mem_op->op0);
1276 else
1277 mem_op->off = -1;
1278 if (TREE_CODE (op->op0) == SSA_NAME)
1279 op->op0 = SSA_VAL (op->op0);
1280 if (TREE_CODE (op->op0) != SSA_NAME)
1281 op->opcode = TREE_CODE (op->op0);
1283 /* And recurse. */
1284 if (TREE_CODE (op->op0) == SSA_NAME)
1285 vn_reference_maybe_forwprop_address (ops, i_p);
1286 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1287 vn_reference_fold_indirect (ops, i_p);
1288 return true;
1291 /* Optimize the reference REF to a constant if possible or return
1292 NULL_TREE if not. */
1294 tree
1295 fully_constant_vn_reference_p (vn_reference_t ref)
1297 vec<vn_reference_op_s> operands = ref->operands;
1298 vn_reference_op_t op;
1300 /* Try to simplify the translated expression if it is
1301 a call to a builtin function with at most two arguments. */
1302 op = &operands[0];
1303 if (op->opcode == CALL_EXPR
1304 && TREE_CODE (op->op0) == ADDR_EXPR
1305 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1306 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1307 && operands.length () >= 2
1308 && operands.length () <= 3)
1310 vn_reference_op_t arg0, arg1 = NULL;
1311 bool anyconst = false;
1312 arg0 = &operands[1];
1313 if (operands.length () > 2)
1314 arg1 = &operands[2];
1315 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1316 || (arg0->opcode == ADDR_EXPR
1317 && is_gimple_min_invariant (arg0->op0)))
1318 anyconst = true;
1319 if (arg1
1320 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1321 || (arg1->opcode == ADDR_EXPR
1322 && is_gimple_min_invariant (arg1->op0))))
1323 anyconst = true;
1324 if (anyconst)
1326 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1327 arg1 ? 2 : 1,
1328 arg0->op0,
1329 arg1 ? arg1->op0 : NULL);
1330 if (folded
1331 && TREE_CODE (folded) == NOP_EXPR)
1332 folded = TREE_OPERAND (folded, 0);
1333 if (folded
1334 && is_gimple_min_invariant (folded))
1335 return folded;
1339 /* Simplify reads from constants or constant initializers. */
1340 else if (BITS_PER_UNIT == 8
1341 && is_gimple_reg_type (ref->type)
1342 && (!INTEGRAL_TYPE_P (ref->type)
1343 || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
1345 poly_int64 off = 0;
1346 HOST_WIDE_INT size;
1347 if (INTEGRAL_TYPE_P (ref->type))
1348 size = TYPE_PRECISION (ref->type);
1349 else
1350 size = tree_to_shwi (TYPE_SIZE (ref->type));
1351 if (size % BITS_PER_UNIT != 0
1352 || size > MAX_BITSIZE_MODE_ANY_MODE)
1353 return NULL_TREE;
1354 size /= BITS_PER_UNIT;
1355 unsigned i;
1356 for (i = 0; i < operands.length (); ++i)
1358 if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1360 ++i;
1361 break;
1363 if (known_eq (operands[i].off, -1))
1364 return NULL_TREE;
1365 off += operands[i].off;
1366 if (operands[i].opcode == MEM_REF)
1368 ++i;
1369 break;
1372 vn_reference_op_t base = &operands[--i];
1373 tree ctor = error_mark_node;
1374 tree decl = NULL_TREE;
1375 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1376 ctor = base->op0;
1377 else if (base->opcode == MEM_REF
1378 && base[1].opcode == ADDR_EXPR
1379 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1380 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL
1381 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == STRING_CST))
1383 decl = TREE_OPERAND (base[1].op0, 0);
1384 if (TREE_CODE (decl) == STRING_CST)
1385 ctor = decl;
1386 else
1387 ctor = ctor_for_folding (decl);
1389 if (ctor == NULL_TREE)
1390 return build_zero_cst (ref->type);
1391 else if (ctor != error_mark_node)
1393 HOST_WIDE_INT const_off;
1394 if (decl)
1396 tree res = fold_ctor_reference (ref->type, ctor,
1397 off * BITS_PER_UNIT,
1398 size * BITS_PER_UNIT, decl);
1399 if (res)
1401 STRIP_USELESS_TYPE_CONVERSION (res);
1402 if (is_gimple_min_invariant (res))
1403 return res;
1406 else if (off.is_constant (&const_off))
1408 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1409 int len = native_encode_expr (ctor, buf, size, const_off);
1410 if (len > 0)
1411 return native_interpret_expr (ref->type, buf, len);
1416 return NULL_TREE;
1419 /* Return true if OPS contain a storage order barrier. */
1421 static bool
1422 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1424 vn_reference_op_t op;
1425 unsigned i;
1427 FOR_EACH_VEC_ELT (ops, i, op)
1428 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1429 return true;
1431 return false;
1434 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1435 structures into their value numbers. This is done in-place, and
1436 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1437 whether any operands were valueized. */
1439 static vec<vn_reference_op_s>
1440 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1442 vn_reference_op_t vro;
1443 unsigned int i;
1445 *valueized_anything = false;
1447 FOR_EACH_VEC_ELT (orig, i, vro)
1449 if (vro->opcode == SSA_NAME
1450 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1452 tree tem = SSA_VAL (vro->op0);
1453 if (tem != vro->op0)
1455 *valueized_anything = true;
1456 vro->op0 = tem;
1458 /* If it transforms from an SSA_NAME to a constant, update
1459 the opcode. */
1460 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1461 vro->opcode = TREE_CODE (vro->op0);
1463 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1465 tree tem = SSA_VAL (vro->op1);
1466 if (tem != vro->op1)
1468 *valueized_anything = true;
1469 vro->op1 = tem;
1472 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1474 tree tem = SSA_VAL (vro->op2);
1475 if (tem != vro->op2)
1477 *valueized_anything = true;
1478 vro->op2 = tem;
1481 /* If it transforms from an SSA_NAME to an address, fold with
1482 a preceding indirect reference. */
1483 if (i > 0
1484 && vro->op0
1485 && TREE_CODE (vro->op0) == ADDR_EXPR
1486 && orig[i - 1].opcode == MEM_REF)
1488 if (vn_reference_fold_indirect (&orig, &i))
1489 *valueized_anything = true;
1491 else if (i > 0
1492 && vro->opcode == SSA_NAME
1493 && orig[i - 1].opcode == MEM_REF)
1495 if (vn_reference_maybe_forwprop_address (&orig, &i))
1496 *valueized_anything = true;
1498 /* If it transforms a non-constant ARRAY_REF into a constant
1499 one, adjust the constant offset. */
1500 else if (vro->opcode == ARRAY_REF
1501 && known_eq (vro->off, -1)
1502 && poly_int_tree_p (vro->op0)
1503 && poly_int_tree_p (vro->op1)
1504 && TREE_CODE (vro->op2) == INTEGER_CST)
1506 poly_offset_int off = ((wi::to_poly_offset (vro->op0)
1507 - wi::to_poly_offset (vro->op1))
1508 * wi::to_offset (vro->op2)
1509 * vn_ref_op_align_unit (vro));
1510 off.to_shwi (&vro->off);
1514 return orig;
1517 static vec<vn_reference_op_s>
1518 valueize_refs (vec<vn_reference_op_s> orig)
1520 bool tem;
1521 return valueize_refs_1 (orig, &tem);
1524 static vec<vn_reference_op_s> shared_lookup_references;
1526 /* Create a vector of vn_reference_op_s structures from REF, a
1527 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1528 this function. *VALUEIZED_ANYTHING will specify whether any
1529 operands were valueized. */
1531 static vec<vn_reference_op_s>
1532 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1534 if (!ref)
1535 return vNULL;
1536 shared_lookup_references.truncate (0);
1537 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1538 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1539 valueized_anything);
1540 return shared_lookup_references;
1543 /* Create a vector of vn_reference_op_s structures from CALL, a
1544 call statement. The vector is shared among all callers of
1545 this function. */
1547 static vec<vn_reference_op_s>
1548 valueize_shared_reference_ops_from_call (gcall *call)
1550 if (!call)
1551 return vNULL;
1552 shared_lookup_references.truncate (0);
1553 copy_reference_ops_from_call (call, &shared_lookup_references);
1554 shared_lookup_references = valueize_refs (shared_lookup_references);
1555 return shared_lookup_references;
1558 /* Lookup a SCCVN reference operation VR in the current hash table.
1559 Returns the resulting value number if it exists in the hash table,
1560 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1561 vn_reference_t stored in the hashtable if something is found. */
1563 static tree
1564 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1566 vn_reference_s **slot;
1567 hashval_t hash;
1569 hash = vr->hashcode;
1570 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1571 if (!slot && current_info == optimistic_info)
1572 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1573 if (slot)
1575 if (vnresult)
1576 *vnresult = (vn_reference_t)*slot;
1577 return ((vn_reference_t)*slot)->result;
1580 return NULL_TREE;
1583 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1584 with the current VUSE and performs the expression lookup. */
1586 static void *
1587 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1588 unsigned int cnt, void *vr_)
1590 vn_reference_t vr = (vn_reference_t)vr_;
1591 vn_reference_s **slot;
1592 hashval_t hash;
1594 /* This bounds the stmt walks we perform on reference lookups
1595 to O(1) instead of O(N) where N is the number of dominating
1596 stores. */
1597 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1598 return (void *)-1;
1600 if (last_vuse_ptr)
1601 *last_vuse_ptr = vuse;
1603 /* Fixup vuse and hash. */
1604 if (vr->vuse)
1605 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1606 vr->vuse = vuse_ssa_val (vuse);
1607 if (vr->vuse)
1608 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1610 hash = vr->hashcode;
1611 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1612 if (!slot && current_info == optimistic_info)
1613 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1614 if (slot)
1615 return *slot;
1617 return NULL;
1620 /* Lookup an existing or insert a new vn_reference entry into the
1621 value table for the VUSE, SET, TYPE, OPERANDS reference which
1622 has the value VALUE which is either a constant or an SSA name. */
1624 static vn_reference_t
1625 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1626 alias_set_type set,
1627 tree type,
1628 vec<vn_reference_op_s,
1629 va_heap> operands,
1630 tree value)
1632 vn_reference_s vr1;
1633 vn_reference_t result;
1634 unsigned value_id;
1635 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1636 vr1.operands = operands;
1637 vr1.type = type;
1638 vr1.set = set;
1639 vr1.hashcode = vn_reference_compute_hash (&vr1);
1640 if (vn_reference_lookup_1 (&vr1, &result))
1641 return result;
1642 if (TREE_CODE (value) == SSA_NAME)
1643 value_id = VN_INFO (value)->value_id;
1644 else
1645 value_id = get_or_alloc_constant_value_id (value);
1646 return vn_reference_insert_pieces (vuse, set, type,
1647 operands.copy (), value, value_id);
1650 static vn_nary_op_t vn_nary_op_insert_stmt (gimple *stmt, tree result);
1651 static unsigned mprts_hook_cnt;
1653 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
1655 static tree
1656 vn_lookup_simplify_result (gimple_match_op *res_op)
1658 if (!res_op->code.is_tree_code ())
1659 return NULL_TREE;
1660 tree *ops = res_op->ops;
1661 unsigned int length = res_op->num_ops;
1662 if (res_op->code == CONSTRUCTOR
1663 /* ??? We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
1664 and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree. */
1665 && TREE_CODE (res_op->ops[0]) == CONSTRUCTOR)
1667 length = CONSTRUCTOR_NELTS (res_op->ops[0]);
1668 ops = XALLOCAVEC (tree, length);
1669 for (unsigned i = 0; i < length; ++i)
1670 ops[i] = CONSTRUCTOR_ELT (res_op->ops[0], i)->value;
1672 vn_nary_op_t vnresult = NULL;
1673 tree res = vn_nary_op_lookup_pieces (length, (tree_code) res_op->code,
1674 res_op->type, ops, &vnresult);
1675 /* We can end up endlessly recursing simplifications if the lookup above
1676 presents us with a def-use chain that mirrors the original simplification.
1677 See PR80887 for an example. Limit successful lookup artificially
1678 to 10 times if we are called as mprts_hook. */
1679 if (res
1680 && mprts_hook
1681 && --mprts_hook_cnt == 0)
1683 if (dump_file && (dump_flags & TDF_DETAILS))
1684 fprintf (dump_file, "Resetting mprts_hook after too many "
1685 "invocations.\n");
1686 mprts_hook = NULL;
1688 return res;
1691 /* Return a value-number for RCODE OPS... either by looking up an existing
1692 value-number for the simplified result or by inserting the operation if
1693 INSERT is true. */
1695 static tree
1696 vn_nary_build_or_lookup_1 (gimple_match_op *res_op, bool insert)
1698 tree result = NULL_TREE;
1699 /* We will be creating a value number for
1700 RCODE (OPS...).
1701 So first simplify and lookup this expression to see if it
1702 is already available. */
1703 mprts_hook = vn_lookup_simplify_result;
1704 mprts_hook_cnt = 9;
1705 bool res = false;
1706 switch (TREE_CODE_LENGTH ((tree_code) res_op->code))
1708 case 1:
1709 res = gimple_resimplify1 (NULL, res_op, vn_valueize);
1710 break;
1711 case 2:
1712 res = gimple_resimplify2 (NULL, res_op, vn_valueize);
1713 break;
1714 case 3:
1715 res = gimple_resimplify3 (NULL, res_op, vn_valueize);
1716 break;
1718 mprts_hook = NULL;
1719 gimple *new_stmt = NULL;
1720 if (res
1721 && gimple_simplified_result_is_gimple_val (res_op))
1722 /* The expression is already available. */
1723 result = res_op->ops[0];
1724 else
1726 tree val = vn_lookup_simplify_result (res_op);
1727 if (!val && insert)
1729 gimple_seq stmts = NULL;
1730 result = maybe_push_res_to_seq (res_op, &stmts);
1731 if (result)
1733 gcc_assert (gimple_seq_singleton_p (stmts));
1734 new_stmt = gimple_seq_first_stmt (stmts);
1737 else
1738 /* The expression is already available. */
1739 result = val;
1741 if (new_stmt)
1743 /* The expression is not yet available, value-number lhs to
1744 the new SSA_NAME we created. */
1745 /* Initialize value-number information properly. */
1746 VN_INFO_GET (result)->valnum = result;
1747 VN_INFO (result)->value_id = get_next_value_id ();
1748 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
1749 new_stmt);
1750 VN_INFO (result)->needs_insertion = true;
1751 /* ??? PRE phi-translation inserts NARYs without corresponding
1752 SSA name result. Re-use those but set their result according
1753 to the stmt we just built. */
1754 vn_nary_op_t nary = NULL;
1755 vn_nary_op_lookup_stmt (new_stmt, &nary);
1756 if (nary)
1758 gcc_assert (nary->result == NULL_TREE);
1759 nary->result = gimple_assign_lhs (new_stmt);
1761 /* As all "inserted" statements are singleton SCCs, insert
1762 to the valid table. This is strictly needed to
1763 avoid re-generating new value SSA_NAMEs for the same
1764 expression during SCC iteration over and over (the
1765 optimistic table gets cleared after each iteration).
1766 We do not need to insert into the optimistic table, as
1767 lookups there will fall back to the valid table. */
1768 else if (current_info == optimistic_info)
1770 current_info = valid_info;
1771 vn_nary_op_insert_stmt (new_stmt, result);
1772 current_info = optimistic_info;
1774 else
1775 vn_nary_op_insert_stmt (new_stmt, result);
1776 if (dump_file && (dump_flags & TDF_DETAILS))
1778 fprintf (dump_file, "Inserting name ");
1779 print_generic_expr (dump_file, result);
1780 fprintf (dump_file, " for expression ");
1781 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
1782 fprintf (dump_file, "\n");
1785 return result;
1788 /* Return a value-number for RCODE OPS... either by looking up an existing
1789 value-number for the simplified result or by inserting the operation. */
1791 static tree
1792 vn_nary_build_or_lookup (gimple_match_op *res_op)
1794 return vn_nary_build_or_lookup_1 (res_op, true);
1797 /* Try to simplify the expression RCODE OPS... of type TYPE and return
1798 its value if present. */
1800 tree
1801 vn_nary_simplify (vn_nary_op_t nary)
1803 if (nary->length > gimple_match_op::MAX_NUM_OPS)
1804 return NULL_TREE;
1805 gimple_match_op op (nary->opcode, nary->type, nary->length);
1806 memcpy (op.ops, nary->op, sizeof (tree) * nary->length);
1807 return vn_nary_build_or_lookup_1 (&op, false);
1811 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1812 from the statement defining VUSE and if not successful tries to
1813 translate *REFP and VR_ through an aggregate copy at the definition
1814 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
1815 of *REF and *VR. If only disambiguation was performed then
1816 *DISAMBIGUATE_ONLY is set to true. */
1818 static void *
1819 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1820 bool *disambiguate_only)
1822 vn_reference_t vr = (vn_reference_t)vr_;
1823 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
1824 tree base = ao_ref_base (ref);
1825 HOST_WIDE_INT offseti, maxsizei;
1826 static vec<vn_reference_op_s> lhs_ops;
1827 ao_ref lhs_ref;
1828 bool lhs_ref_ok = false;
1829 poly_int64 copy_size;
1831 /* If the reference is based on a parameter that was determined as
1832 pointing to readonly memory it doesn't change. */
1833 if (TREE_CODE (base) == MEM_REF
1834 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
1835 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
1836 && bitmap_bit_p (const_parms,
1837 SSA_NAME_VERSION (TREE_OPERAND (base, 0))))
1839 *disambiguate_only = true;
1840 return NULL;
1843 /* First try to disambiguate after value-replacing in the definitions LHS. */
1844 if (is_gimple_assign (def_stmt))
1846 tree lhs = gimple_assign_lhs (def_stmt);
1847 bool valueized_anything = false;
1848 /* Avoid re-allocation overhead. */
1849 lhs_ops.truncate (0);
1850 copy_reference_ops_from_ref (lhs, &lhs_ops);
1851 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1852 if (valueized_anything)
1854 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1855 get_alias_set (lhs),
1856 TREE_TYPE (lhs), lhs_ops);
1857 if (lhs_ref_ok
1858 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1860 *disambiguate_only = true;
1861 return NULL;
1864 else
1866 ao_ref_init (&lhs_ref, lhs);
1867 lhs_ref_ok = true;
1870 /* If we reach a clobbering statement try to skip it and see if
1871 we find a VN result with exactly the same value as the
1872 possible clobber. In this case we can ignore the clobber
1873 and return the found value.
1874 Note that we don't need to worry about partial overlapping
1875 accesses as we then can use TBAA to disambiguate against the
1876 clobbering statement when looking up a load (thus the
1877 VN_WALKREWRITE guard). */
1878 if (vn_walk_kind == VN_WALKREWRITE
1879 && is_gimple_reg_type (TREE_TYPE (lhs))
1880 && types_compatible_p (TREE_TYPE (lhs), vr->type))
1882 tree *saved_last_vuse_ptr = last_vuse_ptr;
1883 /* Do not update last_vuse_ptr in vn_reference_lookup_2. */
1884 last_vuse_ptr = NULL;
1885 tree saved_vuse = vr->vuse;
1886 hashval_t saved_hashcode = vr->hashcode;
1887 void *res = vn_reference_lookup_2 (ref,
1888 gimple_vuse (def_stmt), 0, vr);
1889 /* Need to restore vr->vuse and vr->hashcode. */
1890 vr->vuse = saved_vuse;
1891 vr->hashcode = saved_hashcode;
1892 last_vuse_ptr = saved_last_vuse_ptr;
1893 if (res && res != (void *)-1)
1895 vn_reference_t vnresult = (vn_reference_t) res;
1896 if (vnresult->result
1897 && operand_equal_p (vnresult->result,
1898 gimple_assign_rhs1 (def_stmt), 0))
1899 return res;
1903 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1904 && gimple_call_num_args (def_stmt) <= 4)
1906 /* For builtin calls valueize its arguments and call the
1907 alias oracle again. Valueization may improve points-to
1908 info of pointers and constify size and position arguments.
1909 Originally this was motivated by PR61034 which has
1910 conditional calls to free falsely clobbering ref because
1911 of imprecise points-to info of the argument. */
1912 tree oldargs[4];
1913 bool valueized_anything = false;
1914 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1916 oldargs[i] = gimple_call_arg (def_stmt, i);
1917 tree val = vn_valueize (oldargs[i]);
1918 if (val != oldargs[i])
1920 gimple_call_set_arg (def_stmt, i, val);
1921 valueized_anything = true;
1924 if (valueized_anything)
1926 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1927 ref);
1928 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1929 gimple_call_set_arg (def_stmt, i, oldargs[i]);
1930 if (!res)
1932 *disambiguate_only = true;
1933 return NULL;
1938 if (*disambiguate_only)
1939 return (void *)-1;
1941 /* If we cannot constrain the size of the reference we cannot
1942 test if anything kills it. */
1943 if (!ref->max_size_known_p ())
1944 return (void *)-1;
1946 poly_int64 offset = ref->offset;
1947 poly_int64 maxsize = ref->max_size;
1949 /* We can't deduce anything useful from clobbers. */
1950 if (gimple_clobber_p (def_stmt))
1951 return (void *)-1;
1953 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1954 from that definition.
1955 1) Memset. */
1956 if (is_gimple_reg_type (vr->type)
1957 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1958 && (integer_zerop (gimple_call_arg (def_stmt, 1))
1959 || ((TREE_CODE (gimple_call_arg (def_stmt, 1)) == INTEGER_CST
1960 || (INTEGRAL_TYPE_P (vr->type) && known_eq (ref->size, 8)))
1961 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1962 && offset.is_constant (&offseti)
1963 && offseti % BITS_PER_UNIT == 0))
1964 && poly_int_tree_p (gimple_call_arg (def_stmt, 2))
1965 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
1966 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME))
1968 tree base2;
1969 poly_int64 offset2, size2, maxsize2;
1970 bool reverse;
1971 tree ref2 = gimple_call_arg (def_stmt, 0);
1972 if (TREE_CODE (ref2) == SSA_NAME)
1974 ref2 = SSA_VAL (ref2);
1975 if (TREE_CODE (ref2) == SSA_NAME
1976 && (TREE_CODE (base) != MEM_REF
1977 || TREE_OPERAND (base, 0) != ref2))
1979 gimple *def_stmt = SSA_NAME_DEF_STMT (ref2);
1980 if (gimple_assign_single_p (def_stmt)
1981 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
1982 ref2 = gimple_assign_rhs1 (def_stmt);
1985 if (TREE_CODE (ref2) == ADDR_EXPR)
1987 ref2 = TREE_OPERAND (ref2, 0);
1988 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
1989 &reverse);
1990 if (!known_size_p (maxsize2)
1991 || !known_eq (maxsize2, size2)
1992 || !operand_equal_p (base, base2, OEP_ADDRESS_OF))
1993 return (void *)-1;
1995 else if (TREE_CODE (ref2) == SSA_NAME)
1997 poly_int64 soff;
1998 if (TREE_CODE (base) != MEM_REF
1999 || !(mem_ref_offset (base) << LOG2_BITS_PER_UNIT).to_shwi (&soff))
2000 return (void *)-1;
2001 offset += soff;
2002 offset2 = 0;
2003 if (TREE_OPERAND (base, 0) != ref2)
2005 gimple *def = SSA_NAME_DEF_STMT (ref2);
2006 if (is_gimple_assign (def)
2007 && gimple_assign_rhs_code (def) == POINTER_PLUS_EXPR
2008 && gimple_assign_rhs1 (def) == TREE_OPERAND (base, 0)
2009 && poly_int_tree_p (gimple_assign_rhs2 (def))
2010 && (wi::to_poly_offset (gimple_assign_rhs2 (def))
2011 << LOG2_BITS_PER_UNIT).to_shwi (&offset2))
2013 ref2 = gimple_assign_rhs1 (def);
2014 if (TREE_CODE (ref2) == SSA_NAME)
2015 ref2 = SSA_VAL (ref2);
2017 else
2018 return (void *)-1;
2021 else
2022 return (void *)-1;
2023 tree len = gimple_call_arg (def_stmt, 2);
2024 if (known_subrange_p (offset, maxsize, offset2,
2025 wi::to_poly_offset (len) << LOG2_BITS_PER_UNIT))
2027 tree val;
2028 if (integer_zerop (gimple_call_arg (def_stmt, 1)))
2029 val = build_zero_cst (vr->type);
2030 else if (INTEGRAL_TYPE_P (vr->type)
2031 && known_eq (ref->size, 8))
2033 gimple_match_op res_op (NOP_EXPR, vr->type,
2034 gimple_call_arg (def_stmt, 1));
2035 val = vn_nary_build_or_lookup (&res_op);
2036 if (!val
2037 || (TREE_CODE (val) == SSA_NAME
2038 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2039 return (void *)-1;
2041 else
2043 unsigned len = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (vr->type));
2044 unsigned char *buf = XALLOCAVEC (unsigned char, len);
2045 memset (buf, TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 1)),
2046 len);
2047 val = native_interpret_expr (vr->type, buf, len);
2048 if (!val)
2049 return (void *)-1;
2051 return vn_reference_lookup_or_insert_for_pieces
2052 (vuse, vr->set, vr->type, vr->operands, val);
2056 /* 2) Assignment from an empty CONSTRUCTOR. */
2057 else if (is_gimple_reg_type (vr->type)
2058 && gimple_assign_single_p (def_stmt)
2059 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
2060 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
2062 tree base2;
2063 poly_int64 offset2, size2, maxsize2;
2064 bool reverse;
2065 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2066 &offset2, &size2, &maxsize2, &reverse);
2067 if (known_size_p (maxsize2)
2068 && operand_equal_p (base, base2, 0)
2069 && known_subrange_p (offset, maxsize, offset2, size2))
2071 tree val = build_zero_cst (vr->type);
2072 return vn_reference_lookup_or_insert_for_pieces
2073 (vuse, vr->set, vr->type, vr->operands, val);
2077 /* 3) Assignment from a constant. We can use folds native encode/interpret
2078 routines to extract the assigned bits. */
2079 else if (known_eq (ref->size, maxsize)
2080 && is_gimple_reg_type (vr->type)
2081 && !contains_storage_order_barrier_p (vr->operands)
2082 && gimple_assign_single_p (def_stmt)
2083 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
2084 /* native_encode and native_decode operate on arrays of bytes
2085 and so fundamentally need a compile-time size and offset. */
2086 && maxsize.is_constant (&maxsizei)
2087 && maxsizei % BITS_PER_UNIT == 0
2088 && offset.is_constant (&offseti)
2089 && offseti % BITS_PER_UNIT == 0
2090 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
2091 || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
2092 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
2094 tree base2;
2095 HOST_WIDE_INT offset2, size2;
2096 bool reverse;
2097 base2 = get_ref_base_and_extent_hwi (gimple_assign_lhs (def_stmt),
2098 &offset2, &size2, &reverse);
2099 if (base2
2100 && !reverse
2101 && size2 % BITS_PER_UNIT == 0
2102 && offset2 % BITS_PER_UNIT == 0
2103 && operand_equal_p (base, base2, 0)
2104 && known_subrange_p (offseti, maxsizei, offset2, size2))
2106 /* We support up to 512-bit values (for V8DFmode). */
2107 unsigned char buffer[64];
2108 int len;
2110 tree rhs = gimple_assign_rhs1 (def_stmt);
2111 if (TREE_CODE (rhs) == SSA_NAME)
2112 rhs = SSA_VAL (rhs);
2113 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
2114 buffer, sizeof (buffer),
2115 (offseti - offset2) / BITS_PER_UNIT);
2116 if (len > 0 && len * BITS_PER_UNIT >= maxsizei)
2118 tree type = vr->type;
2119 /* Make sure to interpret in a type that has a range
2120 covering the whole access size. */
2121 if (INTEGRAL_TYPE_P (vr->type)
2122 && maxsizei != TYPE_PRECISION (vr->type))
2123 type = build_nonstandard_integer_type (maxsizei,
2124 TYPE_UNSIGNED (type));
2125 tree val = native_interpret_expr (type, buffer,
2126 maxsizei / BITS_PER_UNIT);
2127 /* If we chop off bits because the types precision doesn't
2128 match the memory access size this is ok when optimizing
2129 reads but not when called from the DSE code during
2130 elimination. */
2131 if (val
2132 && type != vr->type)
2134 if (! int_fits_type_p (val, vr->type))
2135 val = NULL_TREE;
2136 else
2137 val = fold_convert (vr->type, val);
2140 if (val)
2141 return vn_reference_lookup_or_insert_for_pieces
2142 (vuse, vr->set, vr->type, vr->operands, val);
2147 /* 4) Assignment from an SSA name which definition we may be able
2148 to access pieces from. */
2149 else if (known_eq (ref->size, maxsize)
2150 && is_gimple_reg_type (vr->type)
2151 && !contains_storage_order_barrier_p (vr->operands)
2152 && gimple_assign_single_p (def_stmt)
2153 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
2155 tree base2;
2156 poly_int64 offset2, size2, maxsize2;
2157 bool reverse;
2158 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2159 &offset2, &size2, &maxsize2,
2160 &reverse);
2161 if (!reverse
2162 && known_size_p (maxsize2)
2163 && known_eq (maxsize2, size2)
2164 && operand_equal_p (base, base2, 0)
2165 && known_subrange_p (offset, maxsize, offset2, size2)
2166 /* ??? We can't handle bitfield precision extracts without
2167 either using an alternate type for the BIT_FIELD_REF and
2168 then doing a conversion or possibly adjusting the offset
2169 according to endianness. */
2170 && (! INTEGRAL_TYPE_P (vr->type)
2171 || known_eq (ref->size, TYPE_PRECISION (vr->type)))
2172 && multiple_p (ref->size, BITS_PER_UNIT))
2174 gimple_match_op op (BIT_FIELD_REF, vr->type,
2175 SSA_VAL (gimple_assign_rhs1 (def_stmt)),
2176 bitsize_int (ref->size),
2177 bitsize_int (offset - offset2));
2178 tree val = vn_nary_build_or_lookup (&op);
2179 if (val
2180 && (TREE_CODE (val) != SSA_NAME
2181 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2183 vn_reference_t res = vn_reference_lookup_or_insert_for_pieces
2184 (vuse, vr->set, vr->type, vr->operands, val);
2185 return res;
2190 /* 5) For aggregate copies translate the reference through them if
2191 the copy kills ref. */
2192 else if (vn_walk_kind == VN_WALKREWRITE
2193 && gimple_assign_single_p (def_stmt)
2194 && (DECL_P (gimple_assign_rhs1 (def_stmt))
2195 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
2196 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
2198 tree base2;
2199 int i, j, k;
2200 auto_vec<vn_reference_op_s> rhs;
2201 vn_reference_op_t vro;
2202 ao_ref r;
2204 if (!lhs_ref_ok)
2205 return (void *)-1;
2207 /* See if the assignment kills REF. */
2208 base2 = ao_ref_base (&lhs_ref);
2209 if (!lhs_ref.max_size_known_p ()
2210 || (base != base2
2211 && (TREE_CODE (base) != MEM_REF
2212 || TREE_CODE (base2) != MEM_REF
2213 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
2214 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
2215 TREE_OPERAND (base2, 1))))
2216 || !stmt_kills_ref_p (def_stmt, ref))
2217 return (void *)-1;
2219 /* Find the common base of ref and the lhs. lhs_ops already
2220 contains valueized operands for the lhs. */
2221 i = vr->operands.length () - 1;
2222 j = lhs_ops.length () - 1;
2223 while (j >= 0 && i >= 0
2224 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
2226 i--;
2227 j--;
2230 /* ??? The innermost op should always be a MEM_REF and we already
2231 checked that the assignment to the lhs kills vr. Thus for
2232 aggregate copies using char[] types the vn_reference_op_eq
2233 may fail when comparing types for compatibility. But we really
2234 don't care here - further lookups with the rewritten operands
2235 will simply fail if we messed up types too badly. */
2236 poly_int64 extra_off = 0;
2237 if (j == 0 && i >= 0
2238 && lhs_ops[0].opcode == MEM_REF
2239 && maybe_ne (lhs_ops[0].off, -1))
2241 if (known_eq (lhs_ops[0].off, vr->operands[i].off))
2242 i--, j--;
2243 else if (vr->operands[i].opcode == MEM_REF
2244 && maybe_ne (vr->operands[i].off, -1))
2246 extra_off = vr->operands[i].off - lhs_ops[0].off;
2247 i--, j--;
2251 /* i now points to the first additional op.
2252 ??? LHS may not be completely contained in VR, one or more
2253 VIEW_CONVERT_EXPRs could be in its way. We could at least
2254 try handling outermost VIEW_CONVERT_EXPRs. */
2255 if (j != -1)
2256 return (void *)-1;
2258 /* Punt if the additional ops contain a storage order barrier. */
2259 for (k = i; k >= 0; k--)
2261 vro = &vr->operands[k];
2262 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
2263 return (void *)-1;
2266 /* Now re-write REF to be based on the rhs of the assignment. */
2267 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
2269 /* Apply an extra offset to the inner MEM_REF of the RHS. */
2270 if (maybe_ne (extra_off, 0))
2272 if (rhs.length () < 2)
2273 return (void *)-1;
2274 int ix = rhs.length () - 2;
2275 if (rhs[ix].opcode != MEM_REF
2276 || known_eq (rhs[ix].off, -1))
2277 return (void *)-1;
2278 rhs[ix].off += extra_off;
2279 rhs[ix].op0 = int_const_binop (PLUS_EXPR, rhs[ix].op0,
2280 build_int_cst (TREE_TYPE (rhs[ix].op0),
2281 extra_off));
2284 /* We need to pre-pend vr->operands[0..i] to rhs. */
2285 vec<vn_reference_op_s> old = vr->operands;
2286 if (i + 1 + rhs.length () > vr->operands.length ())
2287 vr->operands.safe_grow (i + 1 + rhs.length ());
2288 else
2289 vr->operands.truncate (i + 1 + rhs.length ());
2290 FOR_EACH_VEC_ELT (rhs, j, vro)
2291 vr->operands[i + 1 + j] = *vro;
2292 vr->operands = valueize_refs (vr->operands);
2293 if (old == shared_lookup_references)
2294 shared_lookup_references = vr->operands;
2295 vr->hashcode = vn_reference_compute_hash (vr);
2297 /* Try folding the new reference to a constant. */
2298 tree val = fully_constant_vn_reference_p (vr);
2299 if (val)
2300 return vn_reference_lookup_or_insert_for_pieces
2301 (vuse, vr->set, vr->type, vr->operands, val);
2303 /* Adjust *ref from the new operands. */
2304 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2305 return (void *)-1;
2306 /* This can happen with bitfields. */
2307 if (maybe_ne (ref->size, r.size))
2308 return (void *)-1;
2309 *ref = r;
2311 /* Do not update last seen VUSE after translating. */
2312 last_vuse_ptr = NULL;
2314 /* Keep looking for the adjusted *REF / VR pair. */
2315 return NULL;
2318 /* 6) For memcpy copies translate the reference through them if
2319 the copy kills ref. */
2320 else if (vn_walk_kind == VN_WALKREWRITE
2321 && is_gimple_reg_type (vr->type)
2322 /* ??? Handle BCOPY as well. */
2323 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2324 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2325 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2326 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2327 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2328 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2329 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2330 && poly_int_tree_p (gimple_call_arg (def_stmt, 2), &copy_size))
2332 tree lhs, rhs;
2333 ao_ref r;
2334 poly_int64 rhs_offset, lhs_offset;
2335 vn_reference_op_s op;
2336 poly_uint64 mem_offset;
2337 poly_int64 at, byte_maxsize;
2339 /* Only handle non-variable, addressable refs. */
2340 if (maybe_ne (ref->size, maxsize)
2341 || !multiple_p (offset, BITS_PER_UNIT, &at)
2342 || !multiple_p (maxsize, BITS_PER_UNIT, &byte_maxsize))
2343 return (void *)-1;
2345 /* Extract a pointer base and an offset for the destination. */
2346 lhs = gimple_call_arg (def_stmt, 0);
2347 lhs_offset = 0;
2348 if (TREE_CODE (lhs) == SSA_NAME)
2350 lhs = SSA_VAL (lhs);
2351 if (TREE_CODE (lhs) == SSA_NAME)
2353 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
2354 if (gimple_assign_single_p (def_stmt)
2355 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2356 lhs = gimple_assign_rhs1 (def_stmt);
2359 if (TREE_CODE (lhs) == ADDR_EXPR)
2361 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2362 &lhs_offset);
2363 if (!tem)
2364 return (void *)-1;
2365 if (TREE_CODE (tem) == MEM_REF
2366 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
2368 lhs = TREE_OPERAND (tem, 0);
2369 if (TREE_CODE (lhs) == SSA_NAME)
2370 lhs = SSA_VAL (lhs);
2371 lhs_offset += mem_offset;
2373 else if (DECL_P (tem))
2374 lhs = build_fold_addr_expr (tem);
2375 else
2376 return (void *)-1;
2378 if (TREE_CODE (lhs) != SSA_NAME
2379 && TREE_CODE (lhs) != ADDR_EXPR)
2380 return (void *)-1;
2382 /* Extract a pointer base and an offset for the source. */
2383 rhs = gimple_call_arg (def_stmt, 1);
2384 rhs_offset = 0;
2385 if (TREE_CODE (rhs) == SSA_NAME)
2386 rhs = SSA_VAL (rhs);
2387 if (TREE_CODE (rhs) == ADDR_EXPR)
2389 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2390 &rhs_offset);
2391 if (!tem)
2392 return (void *)-1;
2393 if (TREE_CODE (tem) == MEM_REF
2394 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
2396 rhs = TREE_OPERAND (tem, 0);
2397 rhs_offset += mem_offset;
2399 else if (DECL_P (tem)
2400 || TREE_CODE (tem) == STRING_CST)
2401 rhs = build_fold_addr_expr (tem);
2402 else
2403 return (void *)-1;
2405 if (TREE_CODE (rhs) != SSA_NAME
2406 && TREE_CODE (rhs) != ADDR_EXPR)
2407 return (void *)-1;
2409 /* The bases of the destination and the references have to agree. */
2410 if (TREE_CODE (base) == MEM_REF)
2412 if (TREE_OPERAND (base, 0) != lhs
2413 || !poly_int_tree_p (TREE_OPERAND (base, 1), &mem_offset))
2414 return (void *) -1;
2415 at += mem_offset;
2417 else if (!DECL_P (base)
2418 || TREE_CODE (lhs) != ADDR_EXPR
2419 || TREE_OPERAND (lhs, 0) != base)
2420 return (void *)-1;
2422 /* If the access is completely outside of the memcpy destination
2423 area there is no aliasing. */
2424 if (!ranges_maybe_overlap_p (lhs_offset, copy_size, at, byte_maxsize))
2425 return NULL;
2426 /* And the access has to be contained within the memcpy destination. */
2427 if (!known_subrange_p (at, byte_maxsize, lhs_offset, copy_size))
2428 return (void *)-1;
2430 /* Make room for 2 operands in the new reference. */
2431 if (vr->operands.length () < 2)
2433 vec<vn_reference_op_s> old = vr->operands;
2434 vr->operands.safe_grow_cleared (2);
2435 if (old == shared_lookup_references)
2436 shared_lookup_references = vr->operands;
2438 else
2439 vr->operands.truncate (2);
2441 /* The looked-through reference is a simple MEM_REF. */
2442 memset (&op, 0, sizeof (op));
2443 op.type = vr->type;
2444 op.opcode = MEM_REF;
2445 op.op0 = build_int_cst (ptr_type_node, at - lhs_offset + rhs_offset);
2446 op.off = at - lhs_offset + rhs_offset;
2447 vr->operands[0] = op;
2448 op.type = TREE_TYPE (rhs);
2449 op.opcode = TREE_CODE (rhs);
2450 op.op0 = rhs;
2451 op.off = -1;
2452 vr->operands[1] = op;
2453 vr->hashcode = vn_reference_compute_hash (vr);
2455 /* Try folding the new reference to a constant. */
2456 tree val = fully_constant_vn_reference_p (vr);
2457 if (val)
2458 return vn_reference_lookup_or_insert_for_pieces
2459 (vuse, vr->set, vr->type, vr->operands, val);
2461 /* Adjust *ref from the new operands. */
2462 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2463 return (void *)-1;
2464 /* This can happen with bitfields. */
2465 if (maybe_ne (ref->size, r.size))
2466 return (void *)-1;
2467 *ref = r;
2469 /* Do not update last seen VUSE after translating. */
2470 last_vuse_ptr = NULL;
2472 /* Keep looking for the adjusted *REF / VR pair. */
2473 return NULL;
2476 /* Bail out and stop walking. */
2477 return (void *)-1;
2480 /* Return a reference op vector from OP that can be used for
2481 vn_reference_lookup_pieces. The caller is responsible for releasing
2482 the vector. */
2484 vec<vn_reference_op_s>
2485 vn_reference_operands_for_lookup (tree op)
2487 bool valueized;
2488 return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
2491 /* Lookup a reference operation by it's parts, in the current hash table.
2492 Returns the resulting value number if it exists in the hash table,
2493 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2494 vn_reference_t stored in the hashtable if something is found. */
2496 tree
2497 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2498 vec<vn_reference_op_s> operands,
2499 vn_reference_t *vnresult, vn_lookup_kind kind)
2501 struct vn_reference_s vr1;
2502 vn_reference_t tmp;
2503 tree cst;
2505 if (!vnresult)
2506 vnresult = &tmp;
2507 *vnresult = NULL;
2509 vr1.vuse = vuse_ssa_val (vuse);
2510 shared_lookup_references.truncate (0);
2511 shared_lookup_references.safe_grow (operands.length ());
2512 memcpy (shared_lookup_references.address (),
2513 operands.address (),
2514 sizeof (vn_reference_op_s)
2515 * operands.length ());
2516 vr1.operands = operands = shared_lookup_references
2517 = valueize_refs (shared_lookup_references);
2518 vr1.type = type;
2519 vr1.set = set;
2520 vr1.hashcode = vn_reference_compute_hash (&vr1);
2521 if ((cst = fully_constant_vn_reference_p (&vr1)))
2522 return cst;
2524 vn_reference_lookup_1 (&vr1, vnresult);
2525 if (!*vnresult
2526 && kind != VN_NOWALK
2527 && vr1.vuse)
2529 ao_ref r;
2530 vn_walk_kind = kind;
2531 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2532 *vnresult =
2533 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2534 vn_reference_lookup_2,
2535 vn_reference_lookup_3,
2536 vuse_ssa_val, &vr1);
2537 gcc_checking_assert (vr1.operands == shared_lookup_references);
2540 if (*vnresult)
2541 return (*vnresult)->result;
2543 return NULL_TREE;
2546 /* Lookup OP in the current hash table, and return the resulting value
2547 number if it exists in the hash table. Return NULL_TREE if it does
2548 not exist in the hash table or if the result field of the structure
2549 was NULL.. VNRESULT will be filled in with the vn_reference_t
2550 stored in the hashtable if one exists. When TBAA_P is false assume
2551 we are looking up a store and treat it as having alias-set zero. */
2553 tree
2554 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2555 vn_reference_t *vnresult, bool tbaa_p)
2557 vec<vn_reference_op_s> operands;
2558 struct vn_reference_s vr1;
2559 tree cst;
2560 bool valuezied_anything;
2562 if (vnresult)
2563 *vnresult = NULL;
2565 vr1.vuse = vuse_ssa_val (vuse);
2566 vr1.operands = operands
2567 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2568 vr1.type = TREE_TYPE (op);
2569 vr1.set = tbaa_p ? get_alias_set (op) : 0;
2570 vr1.hashcode = vn_reference_compute_hash (&vr1);
2571 if ((cst = fully_constant_vn_reference_p (&vr1)))
2572 return cst;
2574 if (kind != VN_NOWALK
2575 && vr1.vuse)
2577 vn_reference_t wvnresult;
2578 ao_ref r;
2579 /* Make sure to use a valueized reference if we valueized anything.
2580 Otherwise preserve the full reference for advanced TBAA. */
2581 if (!valuezied_anything
2582 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2583 vr1.operands))
2584 ao_ref_init (&r, op);
2585 if (! tbaa_p)
2586 r.ref_alias_set = r.base_alias_set = 0;
2587 vn_walk_kind = kind;
2588 wvnresult =
2589 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2590 vn_reference_lookup_2,
2591 vn_reference_lookup_3,
2592 vuse_ssa_val, &vr1);
2593 gcc_checking_assert (vr1.operands == shared_lookup_references);
2594 if (wvnresult)
2596 if (vnresult)
2597 *vnresult = wvnresult;
2598 return wvnresult->result;
2601 return NULL_TREE;
2604 return vn_reference_lookup_1 (&vr1, vnresult);
2607 /* Lookup CALL in the current hash table and return the entry in
2608 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2610 void
2611 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2612 vn_reference_t vr)
2614 if (vnresult)
2615 *vnresult = NULL;
2617 tree vuse = gimple_vuse (call);
2619 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2620 vr->operands = valueize_shared_reference_ops_from_call (call);
2621 vr->type = gimple_expr_type (call);
2622 vr->set = 0;
2623 vr->hashcode = vn_reference_compute_hash (vr);
2624 vn_reference_lookup_1 (vr, vnresult);
2627 /* Insert OP into the current hash table with a value number of
2628 RESULT, and return the resulting reference structure we created. */
2630 static vn_reference_t
2631 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2633 vn_reference_s **slot;
2634 vn_reference_t vr1;
2635 bool tem;
2637 vr1 = current_info->references_pool->allocate ();
2638 if (TREE_CODE (result) == SSA_NAME)
2639 vr1->value_id = VN_INFO (result)->value_id;
2640 else
2641 vr1->value_id = get_or_alloc_constant_value_id (result);
2642 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2643 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2644 vr1->type = TREE_TYPE (op);
2645 vr1->set = get_alias_set (op);
2646 vr1->hashcode = vn_reference_compute_hash (vr1);
2647 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2648 vr1->result_vdef = vdef;
2650 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2651 INSERT);
2653 /* Because we lookup stores using vuses, and value number failures
2654 using the vdefs (see visit_reference_op_store for how and why),
2655 it's possible that on failure we may try to insert an already
2656 inserted store. This is not wrong, there is no ssa name for a
2657 store that we could use as a differentiator anyway. Thus, unlike
2658 the other lookup functions, you cannot gcc_assert (!*slot)
2659 here. */
2661 /* But free the old slot in case of a collision. */
2662 if (*slot)
2663 free_reference (*slot);
2665 *slot = vr1;
2666 return vr1;
2669 /* Insert a reference by it's pieces into the current hash table with
2670 a value number of RESULT. Return the resulting reference
2671 structure we created. */
2673 vn_reference_t
2674 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2675 vec<vn_reference_op_s> operands,
2676 tree result, unsigned int value_id)
2679 vn_reference_s **slot;
2680 vn_reference_t vr1;
2682 vr1 = current_info->references_pool->allocate ();
2683 vr1->value_id = value_id;
2684 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2685 vr1->operands = valueize_refs (operands);
2686 vr1->type = type;
2687 vr1->set = set;
2688 vr1->hashcode = vn_reference_compute_hash (vr1);
2689 if (result && TREE_CODE (result) == SSA_NAME)
2690 result = SSA_VAL (result);
2691 vr1->result = result;
2693 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2694 INSERT);
2696 /* At this point we should have all the things inserted that we have
2697 seen before, and we should never try inserting something that
2698 already exists. */
2699 gcc_assert (!*slot);
2700 if (*slot)
2701 free_reference (*slot);
2703 *slot = vr1;
2704 return vr1;
2707 /* Compute and return the hash value for nary operation VBO1. */
2709 static hashval_t
2710 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2712 inchash::hash hstate;
2713 unsigned i;
2715 for (i = 0; i < vno1->length; ++i)
2716 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2717 vno1->op[i] = SSA_VAL (vno1->op[i]);
2719 if (((vno1->length == 2
2720 && commutative_tree_code (vno1->opcode))
2721 || (vno1->length == 3
2722 && commutative_ternary_tree_code (vno1->opcode)))
2723 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2724 std::swap (vno1->op[0], vno1->op[1]);
2725 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
2726 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2728 std::swap (vno1->op[0], vno1->op[1]);
2729 vno1->opcode = swap_tree_comparison (vno1->opcode);
2732 hstate.add_int (vno1->opcode);
2733 for (i = 0; i < vno1->length; ++i)
2734 inchash::add_expr (vno1->op[i], hstate);
2736 return hstate.end ();
2739 /* Compare nary operations VNO1 and VNO2 and return true if they are
2740 equivalent. */
2742 bool
2743 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2745 unsigned i;
2747 if (vno1->hashcode != vno2->hashcode)
2748 return false;
2750 if (vno1->length != vno2->length)
2751 return false;
2753 if (vno1->opcode != vno2->opcode
2754 || !types_compatible_p (vno1->type, vno2->type))
2755 return false;
2757 for (i = 0; i < vno1->length; ++i)
2758 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2759 return false;
2761 /* BIT_INSERT_EXPR has an implict operand as the type precision
2762 of op1. Need to check to make sure they are the same. */
2763 if (vno1->opcode == BIT_INSERT_EXPR
2764 && TREE_CODE (vno1->op[1]) == INTEGER_CST
2765 && TYPE_PRECISION (TREE_TYPE (vno1->op[1]))
2766 != TYPE_PRECISION (TREE_TYPE (vno2->op[1])))
2767 return false;
2769 return true;
2772 /* Initialize VNO from the pieces provided. */
2774 static void
2775 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2776 enum tree_code code, tree type, tree *ops)
2778 vno->opcode = code;
2779 vno->length = length;
2780 vno->type = type;
2781 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2784 /* Initialize VNO from OP. */
2786 static void
2787 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2789 unsigned i;
2791 vno->opcode = TREE_CODE (op);
2792 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2793 vno->type = TREE_TYPE (op);
2794 for (i = 0; i < vno->length; ++i)
2795 vno->op[i] = TREE_OPERAND (op, i);
2798 /* Return the number of operands for a vn_nary ops structure from STMT. */
2800 static unsigned int
2801 vn_nary_length_from_stmt (gimple *stmt)
2803 switch (gimple_assign_rhs_code (stmt))
2805 case REALPART_EXPR:
2806 case IMAGPART_EXPR:
2807 case VIEW_CONVERT_EXPR:
2808 return 1;
2810 case BIT_FIELD_REF:
2811 return 3;
2813 case CONSTRUCTOR:
2814 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2816 default:
2817 return gimple_num_ops (stmt) - 1;
2821 /* Initialize VNO from STMT. */
2823 static void
2824 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
2826 unsigned i;
2828 vno->opcode = gimple_assign_rhs_code (stmt);
2829 vno->type = gimple_expr_type (stmt);
2830 switch (vno->opcode)
2832 case REALPART_EXPR:
2833 case IMAGPART_EXPR:
2834 case VIEW_CONVERT_EXPR:
2835 vno->length = 1;
2836 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2837 break;
2839 case BIT_FIELD_REF:
2840 vno->length = 3;
2841 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2842 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2843 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2844 break;
2846 case CONSTRUCTOR:
2847 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2848 for (i = 0; i < vno->length; ++i)
2849 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2850 break;
2852 default:
2853 gcc_checking_assert (!gimple_assign_single_p (stmt));
2854 vno->length = gimple_num_ops (stmt) - 1;
2855 for (i = 0; i < vno->length; ++i)
2856 vno->op[i] = gimple_op (stmt, i + 1);
2860 /* Compute the hashcode for VNO and look for it in the hash table;
2861 return the resulting value number if it exists in the hash table.
2862 Return NULL_TREE if it does not exist in the hash table or if the
2863 result field of the operation is NULL. VNRESULT will contain the
2864 vn_nary_op_t from the hashtable if it exists. */
2866 static tree
2867 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2869 vn_nary_op_s **slot;
2871 if (vnresult)
2872 *vnresult = NULL;
2874 vno->hashcode = vn_nary_op_compute_hash (vno);
2875 slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2876 NO_INSERT);
2877 if (!slot && current_info == optimistic_info)
2878 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2879 NO_INSERT);
2880 if (!slot)
2881 return NULL_TREE;
2882 if (vnresult)
2883 *vnresult = *slot;
2884 return (*slot)->result;
2887 /* Lookup a n-ary operation by its pieces and return the resulting value
2888 number if it exists in the hash table. Return NULL_TREE if it does
2889 not exist in the hash table or if the result field of the operation
2890 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2891 if it exists. */
2893 tree
2894 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2895 tree type, tree *ops, vn_nary_op_t *vnresult)
2897 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2898 sizeof_vn_nary_op (length));
2899 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2900 return vn_nary_op_lookup_1 (vno1, vnresult);
2903 /* Lookup OP in the current hash table, and return the resulting value
2904 number if it exists in the hash table. Return NULL_TREE if it does
2905 not exist in the hash table or if the result field of the operation
2906 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2907 if it exists. */
2909 tree
2910 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2912 vn_nary_op_t vno1
2913 = XALLOCAVAR (struct vn_nary_op_s,
2914 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2915 init_vn_nary_op_from_op (vno1, op);
2916 return vn_nary_op_lookup_1 (vno1, vnresult);
2919 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2920 value number if it exists in the hash table. Return NULL_TREE if
2921 it does not exist in the hash table. VNRESULT will contain the
2922 vn_nary_op_t from the hashtable if it exists. */
2924 tree
2925 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
2927 vn_nary_op_t vno1
2928 = XALLOCAVAR (struct vn_nary_op_s,
2929 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2930 init_vn_nary_op_from_stmt (vno1, stmt);
2931 return vn_nary_op_lookup_1 (vno1, vnresult);
2934 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2936 static vn_nary_op_t
2937 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2939 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2942 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2943 obstack. */
2945 static vn_nary_op_t
2946 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2948 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2949 &current_info->nary_obstack);
2951 vno1->value_id = value_id;
2952 vno1->length = length;
2953 vno1->result = result;
2955 return vno1;
2958 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2959 VNO->HASHCODE first. */
2961 static vn_nary_op_t
2962 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
2963 bool compute_hash)
2965 vn_nary_op_s **slot;
2967 if (compute_hash)
2968 vno->hashcode = vn_nary_op_compute_hash (vno);
2970 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
2971 /* While we do not want to insert things twice it's awkward to
2972 avoid it in the case where visit_nary_op pattern-matches stuff
2973 and ends up simplifying the replacement to itself. We then
2974 get two inserts, one from visit_nary_op and one from
2975 vn_nary_build_or_lookup.
2976 So allow inserts with the same value number. */
2977 if (*slot && (*slot)->result == vno->result)
2978 return *slot;
2980 gcc_assert (!*slot);
2982 *slot = vno;
2983 return vno;
2986 /* Insert a n-ary operation into the current hash table using it's
2987 pieces. Return the vn_nary_op_t structure we created and put in
2988 the hashtable. */
2990 vn_nary_op_t
2991 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2992 tree type, tree *ops,
2993 tree result, unsigned int value_id)
2995 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2996 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2997 return vn_nary_op_insert_into (vno1, current_info->nary, true);
3000 /* Insert OP into the current hash table with a value number of
3001 RESULT. Return the vn_nary_op_t structure we created and put in
3002 the hashtable. */
3004 vn_nary_op_t
3005 vn_nary_op_insert (tree op, tree result)
3007 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
3008 vn_nary_op_t vno1;
3010 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
3011 init_vn_nary_op_from_op (vno1, op);
3012 return vn_nary_op_insert_into (vno1, current_info->nary, true);
3015 /* Insert the rhs of STMT into the current hash table with a value number of
3016 RESULT. */
3018 static vn_nary_op_t
3019 vn_nary_op_insert_stmt (gimple *stmt, tree result)
3021 vn_nary_op_t vno1
3022 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
3023 result, VN_INFO (result)->value_id);
3024 init_vn_nary_op_from_stmt (vno1, stmt);
3025 return vn_nary_op_insert_into (vno1, current_info->nary, true);
3028 /* Compute a hashcode for PHI operation VP1 and return it. */
3030 static inline hashval_t
3031 vn_phi_compute_hash (vn_phi_t vp1)
3033 inchash::hash hstate (vp1->phiargs.length () > 2
3034 ? vp1->block->index : vp1->phiargs.length ());
3035 tree phi1op;
3036 tree type;
3037 edge e;
3038 edge_iterator ei;
3040 /* If all PHI arguments are constants we need to distinguish
3041 the PHI node via its type. */
3042 type = vp1->type;
3043 hstate.merge_hash (vn_hash_type (type));
3045 FOR_EACH_EDGE (e, ei, vp1->block->preds)
3047 /* Don't hash backedge values they need to be handled as VN_TOP
3048 for optimistic value-numbering. */
3049 if (e->flags & EDGE_DFS_BACK)
3050 continue;
3052 phi1op = vp1->phiargs[e->dest_idx];
3053 if (phi1op == VN_TOP)
3054 continue;
3055 inchash::add_expr (phi1op, hstate);
3058 return hstate.end ();
3062 /* Return true if COND1 and COND2 represent the same condition, set
3063 *INVERTED_P if one needs to be inverted to make it the same as
3064 the other. */
3066 static bool
3067 cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
3068 gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
3070 enum tree_code code1 = gimple_cond_code (cond1);
3071 enum tree_code code2 = gimple_cond_code (cond2);
3073 *inverted_p = false;
3074 if (code1 == code2)
3076 else if (code1 == swap_tree_comparison (code2))
3077 std::swap (lhs2, rhs2);
3078 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
3079 *inverted_p = true;
3080 else if (code1 == invert_tree_comparison
3081 (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
3083 std::swap (lhs2, rhs2);
3084 *inverted_p = true;
3086 else
3087 return false;
3089 return ((expressions_equal_p (lhs1, lhs2)
3090 && expressions_equal_p (rhs1, rhs2))
3091 || (commutative_tree_code (code1)
3092 && expressions_equal_p (lhs1, rhs2)
3093 && expressions_equal_p (rhs1, lhs2)));
3096 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
3098 static int
3099 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
3101 if (vp1->hashcode != vp2->hashcode)
3102 return false;
3104 if (vp1->block != vp2->block)
3106 if (vp1->phiargs.length () != vp2->phiargs.length ())
3107 return false;
3109 switch (vp1->phiargs.length ())
3111 case 1:
3112 /* Single-arg PHIs are just copies. */
3113 break;
3115 case 2:
3117 /* Rule out backedges into the PHI. */
3118 if (vp1->block->loop_father->header == vp1->block
3119 || vp2->block->loop_father->header == vp2->block)
3120 return false;
3122 /* If the PHI nodes do not have compatible types
3123 they are not the same. */
3124 if (!types_compatible_p (vp1->type, vp2->type))
3125 return false;
3127 basic_block idom1
3128 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3129 basic_block idom2
3130 = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
3131 /* If the immediate dominator end in switch stmts multiple
3132 values may end up in the same PHI arg via intermediate
3133 CFG merges. */
3134 if (EDGE_COUNT (idom1->succs) != 2
3135 || EDGE_COUNT (idom2->succs) != 2)
3136 return false;
3138 /* Verify the controlling stmt is the same. */
3139 gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1));
3140 gcond *last2 = safe_dyn_cast <gcond *> (last_stmt (idom2));
3141 if (! last1 || ! last2)
3142 return false;
3143 bool inverted_p;
3144 if (! cond_stmts_equal_p (last1, vp1->cclhs, vp1->ccrhs,
3145 last2, vp2->cclhs, vp2->ccrhs,
3146 &inverted_p))
3147 return false;
3149 /* Get at true/false controlled edges into the PHI. */
3150 edge te1, te2, fe1, fe2;
3151 if (! extract_true_false_controlled_edges (idom1, vp1->block,
3152 &te1, &fe1)
3153 || ! extract_true_false_controlled_edges (idom2, vp2->block,
3154 &te2, &fe2))
3155 return false;
3157 /* Swap edges if the second condition is the inverted of the
3158 first. */
3159 if (inverted_p)
3160 std::swap (te2, fe2);
3162 /* ??? Handle VN_TOP specially. */
3163 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
3164 vp2->phiargs[te2->dest_idx])
3165 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
3166 vp2->phiargs[fe2->dest_idx]))
3167 return false;
3169 return true;
3172 default:
3173 return false;
3177 /* If the PHI nodes do not have compatible types
3178 they are not the same. */
3179 if (!types_compatible_p (vp1->type, vp2->type))
3180 return false;
3182 /* Any phi in the same block will have it's arguments in the
3183 same edge order, because of how we store phi nodes. */
3184 int i;
3185 tree phi1op;
3186 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
3188 tree phi2op = vp2->phiargs[i];
3189 if (phi1op == VN_TOP || phi2op == VN_TOP)
3190 continue;
3191 if (!expressions_equal_p (phi1op, phi2op))
3192 return false;
3195 return true;
3198 static vec<tree> shared_lookup_phiargs;
3200 /* Lookup PHI in the current hash table, and return the resulting
3201 value number if it exists in the hash table. Return NULL_TREE if
3202 it does not exist in the hash table. */
3204 static tree
3205 vn_phi_lookup (gimple *phi)
3207 vn_phi_s **slot;
3208 struct vn_phi_s vp1;
3209 edge e;
3210 edge_iterator ei;
3212 shared_lookup_phiargs.truncate (0);
3213 shared_lookup_phiargs.safe_grow (gimple_phi_num_args (phi));
3215 /* Canonicalize the SSA_NAME's to their value number. */
3216 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3218 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3219 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3220 shared_lookup_phiargs[e->dest_idx] = def;
3222 vp1.type = TREE_TYPE (gimple_phi_result (phi));
3223 vp1.phiargs = shared_lookup_phiargs;
3224 vp1.block = gimple_bb (phi);
3225 /* Extract values of the controlling condition. */
3226 vp1.cclhs = NULL_TREE;
3227 vp1.ccrhs = NULL_TREE;
3228 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1.block);
3229 if (EDGE_COUNT (idom1->succs) == 2)
3230 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3232 vp1.cclhs = vn_valueize (gimple_cond_lhs (last1));
3233 vp1.ccrhs = vn_valueize (gimple_cond_rhs (last1));
3235 vp1.hashcode = vn_phi_compute_hash (&vp1);
3236 slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3237 NO_INSERT);
3238 if (!slot && current_info == optimistic_info)
3239 slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3240 NO_INSERT);
3241 if (!slot)
3242 return NULL_TREE;
3243 return (*slot)->result;
3246 /* Insert PHI into the current hash table with a value number of
3247 RESULT. */
3249 static vn_phi_t
3250 vn_phi_insert (gimple *phi, tree result)
3252 vn_phi_s **slot;
3253 vn_phi_t vp1 = current_info->phis_pool->allocate ();
3254 vec<tree> args = vNULL;
3255 edge e;
3256 edge_iterator ei;
3258 args.safe_grow (gimple_phi_num_args (phi));
3260 /* Canonicalize the SSA_NAME's to their value number. */
3261 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3263 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3264 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3265 args[e->dest_idx] = def;
3267 vp1->value_id = VN_INFO (result)->value_id;
3268 vp1->type = TREE_TYPE (gimple_phi_result (phi));
3269 vp1->phiargs = args;
3270 vp1->block = gimple_bb (phi);
3271 /* Extract values of the controlling condition. */
3272 vp1->cclhs = NULL_TREE;
3273 vp1->ccrhs = NULL_TREE;
3274 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3275 if (EDGE_COUNT (idom1->succs) == 2)
3276 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3278 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
3279 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
3281 vp1->result = result;
3282 vp1->hashcode = vn_phi_compute_hash (vp1);
3284 slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
3286 /* Because we iterate over phi operations more than once, it's
3287 possible the slot might already exist here, hence no assert.*/
3288 *slot = vp1;
3289 return vp1;
3293 /* Print set of components in strongly connected component SCC to OUT. */
3295 static void
3296 print_scc (FILE *out, vec<tree> scc)
3298 tree var;
3299 unsigned int i;
3301 fprintf (out, "SCC consists of %u:", scc.length ());
3302 FOR_EACH_VEC_ELT (scc, i, var)
3304 fprintf (out, " ");
3305 print_generic_expr (out, var);
3307 fprintf (out, "\n");
3310 /* Return true if BB1 is dominated by BB2 taking into account edges
3311 that are not executable. */
3313 static bool
3314 dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
3316 edge_iterator ei;
3317 edge e;
3319 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3320 return true;
3322 /* Before iterating we'd like to know if there exists a
3323 (executable) path from bb2 to bb1 at all, if not we can
3324 directly return false. For now simply iterate once. */
3326 /* Iterate to the single executable bb1 predecessor. */
3327 if (EDGE_COUNT (bb1->preds) > 1)
3329 edge prede = NULL;
3330 FOR_EACH_EDGE (e, ei, bb1->preds)
3331 if (e->flags & EDGE_EXECUTABLE)
3333 if (prede)
3335 prede = NULL;
3336 break;
3338 prede = e;
3340 if (prede)
3342 bb1 = prede->src;
3344 /* Re-do the dominance check with changed bb1. */
3345 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3346 return true;
3350 /* Iterate to the single executable bb2 successor. */
3351 edge succe = NULL;
3352 FOR_EACH_EDGE (e, ei, bb2->succs)
3353 if (e->flags & EDGE_EXECUTABLE)
3355 if (succe)
3357 succe = NULL;
3358 break;
3360 succe = e;
3362 if (succe)
3364 /* Verify the reached block is only reached through succe.
3365 If there is only one edge we can spare us the dominator
3366 check and iterate directly. */
3367 if (EDGE_COUNT (succe->dest->preds) > 1)
3369 FOR_EACH_EDGE (e, ei, succe->dest->preds)
3370 if (e != succe
3371 && (e->flags & EDGE_EXECUTABLE))
3373 succe = NULL;
3374 break;
3377 if (succe)
3379 bb2 = succe->dest;
3381 /* Re-do the dominance check with changed bb2. */
3382 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3383 return true;
3387 /* We could now iterate updating bb1 / bb2. */
3388 return false;
3391 /* Set the value number of FROM to TO, return true if it has changed
3392 as a result. */
3394 static inline bool
3395 set_ssa_val_to (tree from, tree to)
3397 tree currval = SSA_VAL (from);
3398 poly_int64 toff, coff;
3400 /* The only thing we allow as value numbers are ssa_names
3401 and invariants. So assert that here. We don't allow VN_TOP
3402 as visiting a stmt should produce a value-number other than
3403 that.
3404 ??? Still VN_TOP can happen for unreachable code, so force
3405 it to varying in that case. Not all code is prepared to
3406 get VN_TOP on valueization. */
3407 if (to == VN_TOP)
3409 if (dump_file && (dump_flags & TDF_DETAILS))
3410 fprintf (dump_file, "Forcing value number to varying on "
3411 "receiving VN_TOP\n");
3412 to = from;
3415 gcc_assert (to != NULL_TREE
3416 && ((TREE_CODE (to) == SSA_NAME
3417 && (to == from || SSA_VAL (to) == to))
3418 || is_gimple_min_invariant (to)));
3420 if (from != to)
3422 if (currval == from)
3424 if (dump_file && (dump_flags & TDF_DETAILS))
3426 fprintf (dump_file, "Not changing value number of ");
3427 print_generic_expr (dump_file, from);
3428 fprintf (dump_file, " from VARYING to ");
3429 print_generic_expr (dump_file, to);
3430 fprintf (dump_file, "\n");
3432 return false;
3434 else if (currval != VN_TOP
3435 && ! is_gimple_min_invariant (currval)
3436 && is_gimple_min_invariant (to))
3438 if (dump_file && (dump_flags & TDF_DETAILS))
3440 fprintf (dump_file, "Forcing VARYING instead of changing "
3441 "value number of ");
3442 print_generic_expr (dump_file, from);
3443 fprintf (dump_file, " from ");
3444 print_generic_expr (dump_file, currval);
3445 fprintf (dump_file, " (non-constant) to ");
3446 print_generic_expr (dump_file, to);
3447 fprintf (dump_file, " (constant)\n");
3449 to = from;
3451 else if (TREE_CODE (to) == SSA_NAME
3452 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
3453 to = from;
3456 if (dump_file && (dump_flags & TDF_DETAILS))
3458 fprintf (dump_file, "Setting value number of ");
3459 print_generic_expr (dump_file, from);
3460 fprintf (dump_file, " to ");
3461 print_generic_expr (dump_file, to);
3464 if (currval != to
3465 && !operand_equal_p (currval, to, 0)
3466 /* Different undefined SSA names are not actually different. See
3467 PR82320 for a testcase were we'd otherwise not terminate iteration. */
3468 && !(TREE_CODE (currval) == SSA_NAME
3469 && TREE_CODE (to) == SSA_NAME
3470 && ssa_undefined_value_p (currval, false)
3471 && ssa_undefined_value_p (to, false))
3472 /* ??? For addresses involving volatile objects or types operand_equal_p
3473 does not reliably detect ADDR_EXPRs as equal. We know we are only
3474 getting invariant gimple addresses here, so can use
3475 get_addr_base_and_unit_offset to do this comparison. */
3476 && !(TREE_CODE (currval) == ADDR_EXPR
3477 && TREE_CODE (to) == ADDR_EXPR
3478 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
3479 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
3480 && known_eq (coff, toff)))
3482 if (dump_file && (dump_flags & TDF_DETAILS))
3483 fprintf (dump_file, " (changed)\n");
3485 /* If we equate two SSA names we have to make the side-band info
3486 of the leader conservative (and remember whatever original value
3487 was present). */
3488 if (TREE_CODE (to) == SSA_NAME)
3490 if (INTEGRAL_TYPE_P (TREE_TYPE (to))
3491 && SSA_NAME_RANGE_INFO (to))
3493 if (SSA_NAME_IS_DEFAULT_DEF (to)
3494 || dominated_by_p_w_unex
3495 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3496 gimple_bb (SSA_NAME_DEF_STMT (to))))
3497 /* Keep the info from the dominator. */
3499 else
3501 /* Save old info. */
3502 if (! VN_INFO (to)->info.range_info)
3504 VN_INFO (to)->info.range_info = SSA_NAME_RANGE_INFO (to);
3505 VN_INFO (to)->range_info_anti_range_p
3506 = SSA_NAME_ANTI_RANGE_P (to);
3508 /* Rather than allocating memory and unioning the info
3509 just clear it. */
3510 if (dump_file && (dump_flags & TDF_DETAILS))
3512 fprintf (dump_file, "clearing range info of ");
3513 print_generic_expr (dump_file, to);
3514 fprintf (dump_file, "\n");
3516 SSA_NAME_RANGE_INFO (to) = NULL;
3519 else if (POINTER_TYPE_P (TREE_TYPE (to))
3520 && SSA_NAME_PTR_INFO (to))
3522 if (SSA_NAME_IS_DEFAULT_DEF (to)
3523 || dominated_by_p_w_unex
3524 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3525 gimple_bb (SSA_NAME_DEF_STMT (to))))
3526 /* Keep the info from the dominator. */
3528 else if (! SSA_NAME_PTR_INFO (from)
3529 /* Handle the case of trivially equivalent info. */
3530 || memcmp (SSA_NAME_PTR_INFO (to),
3531 SSA_NAME_PTR_INFO (from),
3532 sizeof (ptr_info_def)) != 0)
3534 /* Save old info. */
3535 if (! VN_INFO (to)->info.ptr_info)
3536 VN_INFO (to)->info.ptr_info = SSA_NAME_PTR_INFO (to);
3537 /* Rather than allocating memory and unioning the info
3538 just clear it. */
3539 if (dump_file && (dump_flags & TDF_DETAILS))
3541 fprintf (dump_file, "clearing points-to info of ");
3542 print_generic_expr (dump_file, to);
3543 fprintf (dump_file, "\n");
3545 SSA_NAME_PTR_INFO (to) = NULL;
3550 VN_INFO (from)->valnum = to;
3551 return true;
3553 if (dump_file && (dump_flags & TDF_DETAILS))
3554 fprintf (dump_file, "\n");
3555 return false;
3558 /* Mark as processed all the definitions in the defining stmt of USE, or
3559 the USE itself. */
3561 static void
3562 mark_use_processed (tree use)
3564 ssa_op_iter iter;
3565 def_operand_p defp;
3566 gimple *stmt = SSA_NAME_DEF_STMT (use);
3568 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
3570 VN_INFO (use)->use_processed = true;
3571 return;
3574 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3576 tree def = DEF_FROM_PTR (defp);
3578 VN_INFO (def)->use_processed = true;
3582 /* Set all definitions in STMT to value number to themselves.
3583 Return true if a value number changed. */
3585 static bool
3586 defs_to_varying (gimple *stmt)
3588 bool changed = false;
3589 ssa_op_iter iter;
3590 def_operand_p defp;
3592 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3594 tree def = DEF_FROM_PTR (defp);
3595 changed |= set_ssa_val_to (def, def);
3597 return changed;
3600 /* Visit a copy between LHS and RHS, return true if the value number
3601 changed. */
3603 static bool
3604 visit_copy (tree lhs, tree rhs)
3606 /* Valueize. */
3607 rhs = SSA_VAL (rhs);
3609 return set_ssa_val_to (lhs, rhs);
3612 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
3613 is the same. */
3615 static tree
3616 valueized_wider_op (tree wide_type, tree op)
3618 if (TREE_CODE (op) == SSA_NAME)
3619 op = SSA_VAL (op);
3621 /* Either we have the op widened available. */
3622 tree ops[3] = {};
3623 ops[0] = op;
3624 tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
3625 wide_type, ops, NULL);
3626 if (tem)
3627 return tem;
3629 /* Or the op is truncated from some existing value. */
3630 if (TREE_CODE (op) == SSA_NAME)
3632 gimple *def = SSA_NAME_DEF_STMT (op);
3633 if (is_gimple_assign (def)
3634 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3636 tem = gimple_assign_rhs1 (def);
3637 if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)))
3639 if (TREE_CODE (tem) == SSA_NAME)
3640 tem = SSA_VAL (tem);
3641 return tem;
3646 /* For constants simply extend it. */
3647 if (TREE_CODE (op) == INTEGER_CST)
3648 return wide_int_to_tree (wide_type, wi::to_wide (op));
3650 return NULL_TREE;
3653 /* Visit a nary operator RHS, value number it, and return true if the
3654 value number of LHS has changed as a result. */
3656 static bool
3657 visit_nary_op (tree lhs, gassign *stmt)
3659 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
3660 if (result)
3661 return set_ssa_val_to (lhs, result);
3663 /* Do some special pattern matching for redundancies of operations
3664 in different types. */
3665 enum tree_code code = gimple_assign_rhs_code (stmt);
3666 tree type = TREE_TYPE (lhs);
3667 tree rhs1 = gimple_assign_rhs1 (stmt);
3668 switch (code)
3670 CASE_CONVERT:
3671 /* Match arithmetic done in a different type where we can easily
3672 substitute the result from some earlier sign-changed or widened
3673 operation. */
3674 if (INTEGRAL_TYPE_P (type)
3675 && TREE_CODE (rhs1) == SSA_NAME
3676 /* We only handle sign-changes or zero-extension -> & mask. */
3677 && ((TYPE_UNSIGNED (TREE_TYPE (rhs1))
3678 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs1)))
3679 || TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (rhs1))))
3681 gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
3682 if (def
3683 && (gimple_assign_rhs_code (def) == PLUS_EXPR
3684 || gimple_assign_rhs_code (def) == MINUS_EXPR
3685 || gimple_assign_rhs_code (def) == MULT_EXPR))
3687 tree ops[3] = {};
3688 /* Either we have the op widened available. */
3689 ops[0] = valueized_wider_op (type,
3690 gimple_assign_rhs1 (def));
3691 if (ops[0])
3692 ops[1] = valueized_wider_op (type,
3693 gimple_assign_rhs2 (def));
3694 if (ops[0] && ops[1])
3696 ops[0] = vn_nary_op_lookup_pieces
3697 (2, gimple_assign_rhs_code (def), type, ops, NULL);
3698 /* We have wider operation available. */
3699 if (ops[0])
3701 unsigned lhs_prec = TYPE_PRECISION (type);
3702 unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1));
3703 if (lhs_prec == rhs_prec)
3705 gimple_match_op match_op (NOP_EXPR, type, ops[0]);
3706 result = vn_nary_build_or_lookup (&match_op);
3707 if (result)
3709 bool changed = set_ssa_val_to (lhs, result);
3710 vn_nary_op_insert_stmt (stmt, result);
3711 return changed;
3714 else
3716 tree mask = wide_int_to_tree
3717 (type, wi::mask (rhs_prec, false, lhs_prec));
3718 gimple_match_op match_op (BIT_AND_EXPR,
3719 TREE_TYPE (lhs),
3720 ops[0], mask);
3721 result = vn_nary_build_or_lookup (&match_op);
3722 if (result)
3724 bool changed = set_ssa_val_to (lhs, result);
3725 vn_nary_op_insert_stmt (stmt, result);
3726 return changed;
3733 default:;
3736 bool changed = set_ssa_val_to (lhs, lhs);
3737 vn_nary_op_insert_stmt (stmt, lhs);
3738 return changed;
3741 /* Visit a call STMT storing into LHS. Return true if the value number
3742 of the LHS has changed as a result. */
3744 static bool
3745 visit_reference_op_call (tree lhs, gcall *stmt)
3747 bool changed = false;
3748 struct vn_reference_s vr1;
3749 vn_reference_t vnresult = NULL;
3750 tree vdef = gimple_vdef (stmt);
3752 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
3753 if (lhs && TREE_CODE (lhs) != SSA_NAME)
3754 lhs = NULL_TREE;
3756 vn_reference_lookup_call (stmt, &vnresult, &vr1);
3757 if (vnresult)
3759 if (vnresult->result_vdef && vdef)
3760 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
3761 else if (vdef)
3762 /* If the call was discovered to be pure or const reflect
3763 that as far as possible. */
3764 changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt)));
3766 if (!vnresult->result && lhs)
3767 vnresult->result = lhs;
3769 if (vnresult->result && lhs)
3770 changed |= set_ssa_val_to (lhs, vnresult->result);
3772 else
3774 vn_reference_t vr2;
3775 vn_reference_s **slot;
3776 tree vdef_val = vdef;
3777 if (vdef)
3779 /* If we value numbered an indirect functions function to
3780 one not clobbering memory value number its VDEF to its
3781 VUSE. */
3782 tree fn = gimple_call_fn (stmt);
3783 if (fn && TREE_CODE (fn) == SSA_NAME)
3785 fn = SSA_VAL (fn);
3786 if (TREE_CODE (fn) == ADDR_EXPR
3787 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
3788 && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
3789 & (ECF_CONST | ECF_PURE)))
3790 vdef_val = vuse_ssa_val (gimple_vuse (stmt));
3792 changed |= set_ssa_val_to (vdef, vdef_val);
3794 if (lhs)
3795 changed |= set_ssa_val_to (lhs, lhs);
3796 vr2 = current_info->references_pool->allocate ();
3797 vr2->vuse = vr1.vuse;
3798 /* As we are not walking the virtual operand chain we know the
3799 shared_lookup_references are still original so we can re-use
3800 them here. */
3801 vr2->operands = vr1.operands.copy ();
3802 vr2->type = vr1.type;
3803 vr2->set = vr1.set;
3804 vr2->hashcode = vr1.hashcode;
3805 vr2->result = lhs;
3806 vr2->result_vdef = vdef_val;
3807 slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3808 INSERT);
3809 gcc_assert (!*slot);
3810 *slot = vr2;
3813 return changed;
3816 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3817 and return true if the value number of the LHS has changed as a result. */
3819 static bool
3820 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
3822 bool changed = false;
3823 tree last_vuse;
3824 tree result;
3826 last_vuse = gimple_vuse (stmt);
3827 last_vuse_ptr = &last_vuse;
3828 result = vn_reference_lookup (op, gimple_vuse (stmt),
3829 default_vn_walk_kind, NULL, true);
3830 last_vuse_ptr = NULL;
3832 /* We handle type-punning through unions by value-numbering based
3833 on offset and size of the access. Be prepared to handle a
3834 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3835 if (result
3836 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3838 /* We will be setting the value number of lhs to the value number
3839 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3840 So first simplify and lookup this expression to see if it
3841 is already available. */
3842 gimple_match_op res_op (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
3843 result = vn_nary_build_or_lookup (&res_op);
3846 if (result)
3847 changed = set_ssa_val_to (lhs, result);
3848 else
3850 changed = set_ssa_val_to (lhs, lhs);
3851 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
3854 return changed;
3858 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3859 and return true if the value number of the LHS has changed as a result. */
3861 static bool
3862 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
3864 bool changed = false;
3865 vn_reference_t vnresult = NULL;
3866 tree assign;
3867 bool resultsame = false;
3868 tree vuse = gimple_vuse (stmt);
3869 tree vdef = gimple_vdef (stmt);
3871 if (TREE_CODE (op) == SSA_NAME)
3872 op = SSA_VAL (op);
3874 /* First we want to lookup using the *vuses* from the store and see
3875 if there the last store to this location with the same address
3876 had the same value.
3878 The vuses represent the memory state before the store. If the
3879 memory state, address, and value of the store is the same as the
3880 last store to this location, then this store will produce the
3881 same memory state as that store.
3883 In this case the vdef versions for this store are value numbered to those
3884 vuse versions, since they represent the same memory state after
3885 this store.
3887 Otherwise, the vdefs for the store are used when inserting into
3888 the table, since the store generates a new memory state. */
3890 vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
3891 if (vnresult
3892 && vnresult->result)
3894 tree result = vnresult->result;
3895 if (TREE_CODE (result) == SSA_NAME)
3896 result = SSA_VAL (result);
3897 resultsame = expressions_equal_p (result, op);
3898 if (resultsame)
3900 /* If the TBAA state isn't compatible for downstream reads
3901 we cannot value-number the VDEFs the same. */
3902 alias_set_type set = get_alias_set (lhs);
3903 if (vnresult->set != set
3904 && ! alias_set_subset_of (set, vnresult->set))
3905 resultsame = false;
3909 if (!resultsame)
3911 /* Only perform the following when being called from PRE
3912 which embeds tail merging. */
3913 if (default_vn_walk_kind == VN_WALK)
3915 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3916 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
3917 if (vnresult)
3919 VN_INFO (vdef)->use_processed = true;
3920 return set_ssa_val_to (vdef, vnresult->result_vdef);
3924 if (dump_file && (dump_flags & TDF_DETAILS))
3926 fprintf (dump_file, "No store match\n");
3927 fprintf (dump_file, "Value numbering store ");
3928 print_generic_expr (dump_file, lhs);
3929 fprintf (dump_file, " to ");
3930 print_generic_expr (dump_file, op);
3931 fprintf (dump_file, "\n");
3933 /* Have to set value numbers before insert, since insert is
3934 going to valueize the references in-place. */
3935 if (vdef)
3936 changed |= set_ssa_val_to (vdef, vdef);
3938 /* Do not insert structure copies into the tables. */
3939 if (is_gimple_min_invariant (op)
3940 || is_gimple_reg (op))
3941 vn_reference_insert (lhs, op, vdef, NULL);
3943 /* Only perform the following when being called from PRE
3944 which embeds tail merging. */
3945 if (default_vn_walk_kind == VN_WALK)
3947 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3948 vn_reference_insert (assign, lhs, vuse, vdef);
3951 else
3953 /* We had a match, so value number the vdef to have the value
3954 number of the vuse it came from. */
3956 if (dump_file && (dump_flags & TDF_DETAILS))
3957 fprintf (dump_file, "Store matched earlier value, "
3958 "value numbering store vdefs to matching vuses.\n");
3960 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3963 return changed;
3966 /* Visit and value number PHI, return true if the value number
3967 changed. */
3969 static bool
3970 visit_phi (gimple *phi)
3972 tree result, sameval = VN_TOP, seen_undef = NULL_TREE;
3973 unsigned n_executable = 0;
3974 bool allsame = true;
3975 edge_iterator ei;
3976 edge e;
3978 /* TODO: We could check for this in init_sccvn, and replace this
3979 with a gcc_assert. */
3980 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3981 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3983 /* See if all non-TOP arguments have the same value. TOP is
3984 equivalent to everything, so we can ignore it. */
3985 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3986 if (e->flags & EDGE_EXECUTABLE)
3988 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3990 ++n_executable;
3991 if (TREE_CODE (def) == SSA_NAME)
3992 def = SSA_VAL (def);
3993 if (def == VN_TOP)
3995 /* Ignore undefined defs for sameval but record one. */
3996 else if (TREE_CODE (def) == SSA_NAME
3997 && ssa_undefined_value_p (def, false))
3998 seen_undef = def;
3999 else if (sameval == VN_TOP)
4000 sameval = def;
4001 else if (!expressions_equal_p (def, sameval))
4003 allsame = false;
4004 break;
4009 /* If none of the edges was executable keep the value-number at VN_TOP,
4010 if only a single edge is exectuable use its value. */
4011 if (n_executable <= 1)
4012 result = seen_undef ? seen_undef : sameval;
4013 /* If we saw only undefined values and VN_TOP use one of the
4014 undefined values. */
4015 else if (sameval == VN_TOP)
4016 result = seen_undef ? seen_undef : sameval;
4017 /* First see if it is equivalent to a phi node in this block. We prefer
4018 this as it allows IV elimination - see PRs 66502 and 67167. */
4019 else if ((result = vn_phi_lookup (phi)))
4021 /* If all values are the same use that, unless we've seen undefined
4022 values as well and the value isn't constant.
4023 CCP/copyprop have the same restriction to not remove uninit warnings. */
4024 else if (allsame
4025 && (! seen_undef || is_gimple_min_invariant (sameval)))
4026 result = sameval;
4027 else
4029 result = PHI_RESULT (phi);
4030 /* Only insert PHIs that are varying, for constant value numbers
4031 we mess up equivalences otherwise as we are only comparing
4032 the immediate controlling predicates. */
4033 vn_phi_insert (phi, result);
4036 return set_ssa_val_to (PHI_RESULT (phi), result);
4039 /* Try to simplify RHS using equivalences and constant folding. */
4041 static tree
4042 try_to_simplify (gassign *stmt)
4044 enum tree_code code = gimple_assign_rhs_code (stmt);
4045 tree tem;
4047 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
4048 in this case, there is no point in doing extra work. */
4049 if (code == SSA_NAME)
4050 return NULL_TREE;
4052 /* First try constant folding based on our current lattice. */
4053 mprts_hook = vn_lookup_simplify_result;
4054 mprts_hook_cnt = 9;
4055 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
4056 mprts_hook = NULL;
4057 if (tem
4058 && (TREE_CODE (tem) == SSA_NAME
4059 || is_gimple_min_invariant (tem)))
4060 return tem;
4062 return NULL_TREE;
4065 /* Visit and value number USE, return true if the value number
4066 changed. */
4068 static bool
4069 visit_use (tree use)
4071 bool changed = false;
4072 gimple *stmt = SSA_NAME_DEF_STMT (use);
4074 mark_use_processed (use);
4076 gcc_assert (!SSA_NAME_IN_FREE_LIST (use)
4077 && !SSA_NAME_IS_DEFAULT_DEF (use));
4079 if (dump_file && (dump_flags & TDF_DETAILS))
4081 fprintf (dump_file, "Value numbering ");
4082 print_generic_expr (dump_file, use);
4083 fprintf (dump_file, " stmt = ");
4084 print_gimple_stmt (dump_file, stmt, 0);
4087 if (gimple_code (stmt) == GIMPLE_PHI)
4088 changed = visit_phi (stmt);
4089 else if (gimple_has_volatile_ops (stmt))
4090 changed = defs_to_varying (stmt);
4091 else if (gassign *ass = dyn_cast <gassign *> (stmt))
4093 enum tree_code code = gimple_assign_rhs_code (ass);
4094 tree lhs = gimple_assign_lhs (ass);
4095 tree rhs1 = gimple_assign_rhs1 (ass);
4096 tree simplified;
4098 /* Shortcut for copies. Simplifying copies is pointless,
4099 since we copy the expression and value they represent. */
4100 if (code == SSA_NAME
4101 && TREE_CODE (lhs) == SSA_NAME)
4103 changed = visit_copy (lhs, rhs1);
4104 goto done;
4106 simplified = try_to_simplify (ass);
4107 if (simplified)
4109 if (dump_file && (dump_flags & TDF_DETAILS))
4111 fprintf (dump_file, "RHS ");
4112 print_gimple_expr (dump_file, ass, 0);
4113 fprintf (dump_file, " simplified to ");
4114 print_generic_expr (dump_file, simplified);
4115 fprintf (dump_file, "\n");
4118 /* Setting value numbers to constants will occasionally
4119 screw up phi congruence because constants are not
4120 uniquely associated with a single ssa name that can be
4121 looked up. */
4122 if (simplified
4123 && is_gimple_min_invariant (simplified)
4124 && TREE_CODE (lhs) == SSA_NAME)
4126 changed = set_ssa_val_to (lhs, simplified);
4127 goto done;
4129 else if (simplified
4130 && TREE_CODE (simplified) == SSA_NAME
4131 && TREE_CODE (lhs) == SSA_NAME)
4133 changed = visit_copy (lhs, simplified);
4134 goto done;
4137 if ((TREE_CODE (lhs) == SSA_NAME
4138 /* We can substitute SSA_NAMEs that are live over
4139 abnormal edges with their constant value. */
4140 && !(gimple_assign_copy_p (ass)
4141 && is_gimple_min_invariant (rhs1))
4142 && !(simplified
4143 && is_gimple_min_invariant (simplified))
4144 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4145 /* Stores or copies from SSA_NAMEs that are live over
4146 abnormal edges are a problem. */
4147 || (code == SSA_NAME
4148 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
4149 changed = defs_to_varying (ass);
4150 else if (REFERENCE_CLASS_P (lhs)
4151 || DECL_P (lhs))
4152 changed = visit_reference_op_store (lhs, rhs1, ass);
4153 else if (TREE_CODE (lhs) == SSA_NAME)
4155 if ((gimple_assign_copy_p (ass)
4156 && is_gimple_min_invariant (rhs1))
4157 || (simplified
4158 && is_gimple_min_invariant (simplified)))
4160 if (simplified)
4161 changed = set_ssa_val_to (lhs, simplified);
4162 else
4163 changed = set_ssa_val_to (lhs, rhs1);
4165 else
4167 /* Visit the original statement. */
4168 switch (vn_get_stmt_kind (ass))
4170 case VN_NARY:
4171 changed = visit_nary_op (lhs, ass);
4172 break;
4173 case VN_REFERENCE:
4174 changed = visit_reference_op_load (lhs, rhs1, ass);
4175 break;
4176 default:
4177 changed = defs_to_varying (ass);
4178 break;
4182 else
4183 changed = defs_to_varying (ass);
4185 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
4187 tree lhs = gimple_call_lhs (call_stmt);
4188 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4190 /* Try constant folding based on our current lattice. */
4191 tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
4192 vn_valueize);
4193 if (simplified)
4195 if (dump_file && (dump_flags & TDF_DETAILS))
4197 fprintf (dump_file, "call ");
4198 print_gimple_expr (dump_file, call_stmt, 0);
4199 fprintf (dump_file, " simplified to ");
4200 print_generic_expr (dump_file, simplified);
4201 fprintf (dump_file, "\n");
4204 /* Setting value numbers to constants will occasionally
4205 screw up phi congruence because constants are not
4206 uniquely associated with a single ssa name that can be
4207 looked up. */
4208 if (simplified
4209 && is_gimple_min_invariant (simplified))
4211 changed = set_ssa_val_to (lhs, simplified);
4212 if (gimple_vdef (call_stmt))
4213 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4214 SSA_VAL (gimple_vuse (call_stmt)));
4215 goto done;
4217 else if (simplified
4218 && TREE_CODE (simplified) == SSA_NAME)
4220 changed = visit_copy (lhs, simplified);
4221 if (gimple_vdef (call_stmt))
4222 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4223 SSA_VAL (gimple_vuse (call_stmt)));
4224 goto done;
4226 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4228 changed = defs_to_varying (call_stmt);
4229 goto done;
4233 /* Pick up flags from a devirtualization target. */
4234 tree fn = gimple_call_fn (stmt);
4235 int extra_fnflags = 0;
4236 if (fn && TREE_CODE (fn) == SSA_NAME)
4238 fn = SSA_VAL (fn);
4239 if (TREE_CODE (fn) == ADDR_EXPR
4240 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
4241 extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0));
4243 if (!gimple_call_internal_p (call_stmt)
4244 && (/* Calls to the same function with the same vuse
4245 and the same operands do not necessarily return the same
4246 value, unless they're pure or const. */
4247 ((gimple_call_flags (call_stmt) | extra_fnflags)
4248 & (ECF_PURE | ECF_CONST))
4249 /* If calls have a vdef, subsequent calls won't have
4250 the same incoming vuse. So, if 2 calls with vdef have the
4251 same vuse, we know they're not subsequent.
4252 We can value number 2 calls to the same function with the
4253 same vuse and the same operands which are not subsequent
4254 the same, because there is no code in the program that can
4255 compare the 2 values... */
4256 || (gimple_vdef (call_stmt)
4257 /* ... unless the call returns a pointer which does
4258 not alias with anything else. In which case the
4259 information that the values are distinct are encoded
4260 in the IL. */
4261 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
4262 /* Only perform the following when being called from PRE
4263 which embeds tail merging. */
4264 && default_vn_walk_kind == VN_WALK)))
4265 changed = visit_reference_op_call (lhs, call_stmt);
4266 else
4267 changed = defs_to_varying (call_stmt);
4269 else
4270 changed = defs_to_varying (stmt);
4271 done:
4272 return changed;
4275 /* Compare two operands by reverse postorder index */
4277 static int
4278 compare_ops (const void *pa, const void *pb)
4280 const tree opa = *((const tree *)pa);
4281 const tree opb = *((const tree *)pb);
4282 gimple *opstmta = SSA_NAME_DEF_STMT (opa);
4283 gimple *opstmtb = SSA_NAME_DEF_STMT (opb);
4284 basic_block bba;
4285 basic_block bbb;
4287 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
4288 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4289 else if (gimple_nop_p (opstmta))
4290 return -1;
4291 else if (gimple_nop_p (opstmtb))
4292 return 1;
4294 bba = gimple_bb (opstmta);
4295 bbb = gimple_bb (opstmtb);
4297 if (!bba && !bbb)
4298 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4299 else if (!bba)
4300 return -1;
4301 else if (!bbb)
4302 return 1;
4304 if (bba == bbb)
4306 if (gimple_code (opstmta) == GIMPLE_PHI
4307 && gimple_code (opstmtb) == GIMPLE_PHI)
4308 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4309 else if (gimple_code (opstmta) == GIMPLE_PHI)
4310 return -1;
4311 else if (gimple_code (opstmtb) == GIMPLE_PHI)
4312 return 1;
4313 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
4314 return gimple_uid (opstmta) - gimple_uid (opstmtb);
4315 else
4316 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4318 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
4321 /* Sort an array containing members of a strongly connected component
4322 SCC so that the members are ordered by RPO number.
4323 This means that when the sort is complete, iterating through the
4324 array will give you the members in RPO order. */
4326 static void
4327 sort_scc (vec<tree> scc)
4329 scc.qsort (compare_ops);
4332 /* Insert the no longer used nary ONARY to the hash INFO. */
4334 static void
4335 copy_nary (vn_nary_op_t onary, vn_tables_t info)
4337 size_t size = sizeof_vn_nary_op (onary->length);
4338 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
4339 &info->nary_obstack);
4340 memcpy (nary, onary, size);
4341 vn_nary_op_insert_into (nary, info->nary, false);
4344 /* Insert the no longer used phi OPHI to the hash INFO. */
4346 static void
4347 copy_phi (vn_phi_t ophi, vn_tables_t info)
4349 vn_phi_t phi = info->phis_pool->allocate ();
4350 vn_phi_s **slot;
4351 memcpy (phi, ophi, sizeof (*phi));
4352 ophi->phiargs.create (0);
4353 slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
4354 gcc_assert (!*slot);
4355 *slot = phi;
4358 /* Insert the no longer used reference OREF to the hash INFO. */
4360 static void
4361 copy_reference (vn_reference_t oref, vn_tables_t info)
4363 vn_reference_t ref;
4364 vn_reference_s **slot;
4365 ref = info->references_pool->allocate ();
4366 memcpy (ref, oref, sizeof (*ref));
4367 oref->operands.create (0);
4368 slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
4369 if (*slot)
4370 free_reference (*slot);
4371 *slot = ref;
4374 /* Process a strongly connected component in the SSA graph. */
4376 static void
4377 process_scc (vec<tree> scc)
4379 tree var;
4380 unsigned int i;
4381 unsigned int iterations = 0;
4382 bool changed = true;
4383 vn_nary_op_iterator_type hin;
4384 vn_phi_iterator_type hip;
4385 vn_reference_iterator_type hir;
4386 vn_nary_op_t nary;
4387 vn_phi_t phi;
4388 vn_reference_t ref;
4390 /* If the SCC has a single member, just visit it. */
4391 if (scc.length () == 1)
4393 tree use = scc[0];
4394 if (VN_INFO (use)->use_processed)
4395 return;
4396 /* We need to make sure it doesn't form a cycle itself, which can
4397 happen for self-referential PHI nodes. In that case we would
4398 end up inserting an expression with VN_TOP operands into the
4399 valid table which makes us derive bogus equivalences later.
4400 The cheapest way to check this is to assume it for all PHI nodes. */
4401 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
4402 /* Fallthru to iteration. */ ;
4403 else
4405 visit_use (use);
4406 return;
4410 if (dump_file && (dump_flags & TDF_DETAILS))
4411 print_scc (dump_file, scc);
4413 /* Iterate over the SCC with the optimistic table until it stops
4414 changing. */
4415 current_info = optimistic_info;
4416 while (changed)
4418 changed = false;
4419 iterations++;
4420 if (dump_file && (dump_flags & TDF_DETAILS))
4421 fprintf (dump_file, "Starting iteration %d\n", iterations);
4422 /* As we are value-numbering optimistically we have to
4423 clear the expression tables and the simplified expressions
4424 in each iteration until we converge. */
4425 optimistic_info->nary->empty ();
4426 optimistic_info->phis->empty ();
4427 optimistic_info->references->empty ();
4428 obstack_free (&optimistic_info->nary_obstack, NULL);
4429 gcc_obstack_init (&optimistic_info->nary_obstack);
4430 optimistic_info->phis_pool->release ();
4431 optimistic_info->references_pool->release ();
4432 FOR_EACH_VEC_ELT (scc, i, var)
4433 gcc_assert (!VN_INFO (var)->needs_insertion
4434 && VN_INFO (var)->expr == NULL);
4435 FOR_EACH_VEC_ELT (scc, i, var)
4436 changed |= visit_use (var);
4439 if (dump_file && (dump_flags & TDF_DETAILS))
4440 fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
4441 statistics_histogram_event (cfun, "SCC iterations", iterations);
4443 /* Finally, copy the contents of the no longer used optimistic
4444 table to the valid table. */
4445 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
4446 copy_nary (nary, valid_info);
4447 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
4448 copy_phi (phi, valid_info);
4449 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
4450 ref, vn_reference_t, hir)
4451 copy_reference (ref, valid_info);
4453 current_info = valid_info;
4457 /* Pop the components of the found SCC for NAME off the SCC stack
4458 and process them. Returns true if all went well, false if
4459 we run into resource limits. */
4461 static void
4462 extract_and_process_scc_for_name (tree name)
4464 auto_vec<tree> scc;
4465 tree x;
4467 /* Found an SCC, pop the components off the SCC stack and
4468 process them. */
4471 x = sccstack.pop ();
4473 VN_INFO (x)->on_sccstack = false;
4474 scc.safe_push (x);
4475 } while (x != name);
4477 /* Drop all defs in the SCC to varying in case a SCC turns out to be
4478 incredibly large.
4479 ??? Just switch to a non-optimistic mode that avoids any iteration. */
4480 if (scc.length () > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
4482 if (dump_file)
4484 print_scc (dump_file, scc);
4485 fprintf (dump_file, "WARNING: Giving up value-numbering SCC due to "
4486 "size %u exceeding %u\n", scc.length (),
4487 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
4489 tree var;
4490 unsigned i;
4491 FOR_EACH_VEC_ELT (scc, i, var)
4493 gimple *def = SSA_NAME_DEF_STMT (var);
4494 mark_use_processed (var);
4495 if (SSA_NAME_IS_DEFAULT_DEF (var)
4496 || gimple_code (def) == GIMPLE_PHI)
4497 set_ssa_val_to (var, var);
4498 else
4499 defs_to_varying (def);
4501 return;
4504 if (scc.length () > 1)
4505 sort_scc (scc);
4507 process_scc (scc);
4510 /* Depth first search on NAME to discover and process SCC's in the SSA
4511 graph.
4512 Execution of this algorithm relies on the fact that the SCC's are
4513 popped off the stack in topological order.
4514 Returns true if successful, false if we stopped processing SCC's due
4515 to resource constraints. */
4517 static void
4518 DFS (tree name)
4520 auto_vec<ssa_op_iter> itervec;
4521 auto_vec<tree> namevec;
4522 use_operand_p usep = NULL;
4523 gimple *defstmt;
4524 tree use;
4525 ssa_op_iter iter;
4527 start_over:
4528 /* SCC info */
4529 VN_INFO (name)->dfsnum = next_dfs_num++;
4530 VN_INFO (name)->visited = true;
4531 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
4533 sccstack.safe_push (name);
4534 VN_INFO (name)->on_sccstack = true;
4535 defstmt = SSA_NAME_DEF_STMT (name);
4537 /* Recursively DFS on our operands, looking for SCC's. */
4538 if (!gimple_nop_p (defstmt))
4540 /* Push a new iterator. */
4541 if (gphi *phi = dyn_cast <gphi *> (defstmt))
4542 usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
4543 else
4544 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
4546 else
4547 clear_and_done_ssa_iter (&iter);
4549 while (1)
4551 /* If we are done processing uses of a name, go up the stack
4552 of iterators and process SCCs as we found them. */
4553 if (op_iter_done (&iter))
4555 /* See if we found an SCC. */
4556 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
4557 extract_and_process_scc_for_name (name);
4559 /* Check if we are done. */
4560 if (namevec.is_empty ())
4561 return;
4563 /* Restore the last use walker and continue walking there. */
4564 use = name;
4565 name = namevec.pop ();
4566 memcpy (&iter, &itervec.last (),
4567 sizeof (ssa_op_iter));
4568 itervec.pop ();
4569 goto continue_walking;
4572 use = USE_FROM_PTR (usep);
4574 /* Since we handle phi nodes, we will sometimes get
4575 invariants in the use expression. */
4576 if (TREE_CODE (use) == SSA_NAME)
4578 if (! (VN_INFO (use)->visited))
4580 /* Recurse by pushing the current use walking state on
4581 the stack and starting over. */
4582 itervec.safe_push (iter);
4583 namevec.safe_push (name);
4584 name = use;
4585 goto start_over;
4587 continue_walking:
4588 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
4589 VN_INFO (use)->low);
4591 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
4592 && VN_INFO (use)->on_sccstack)
4594 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
4595 VN_INFO (name)->low);
4599 usep = op_iter_next_use (&iter);
4603 /* Allocate a value number table. */
4605 static void
4606 allocate_vn_table (vn_tables_t table)
4608 table->phis = new vn_phi_table_type (23);
4609 table->nary = new vn_nary_op_table_type (23);
4610 table->references = new vn_reference_table_type (23);
4612 gcc_obstack_init (&table->nary_obstack);
4613 table->phis_pool = new object_allocator<vn_phi_s> ("VN phis");
4614 table->references_pool = new object_allocator<vn_reference_s>
4615 ("VN references");
4618 /* Free a value number table. */
4620 static void
4621 free_vn_table (vn_tables_t table)
4623 delete table->phis;
4624 table->phis = NULL;
4625 delete table->nary;
4626 table->nary = NULL;
4627 delete table->references;
4628 table->references = NULL;
4629 obstack_free (&table->nary_obstack, NULL);
4630 delete table->phis_pool;
4631 delete table->references_pool;
4634 static void
4635 init_scc_vn (void)
4637 int j;
4638 int *rpo_numbers_temp;
4640 calculate_dominance_info (CDI_DOMINATORS);
4641 mark_dfs_back_edges ();
4643 sccstack.create (0);
4644 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
4646 constant_value_ids = BITMAP_ALLOC (NULL);
4648 next_dfs_num = 1;
4649 next_value_id = 1;
4651 vn_ssa_aux_table.create (num_ssa_names + 1);
4652 /* VEC_alloc doesn't actually grow it to the right size, it just
4653 preallocates the space to do so. */
4654 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
4655 gcc_obstack_init (&vn_ssa_aux_obstack);
4657 shared_lookup_phiargs.create (0);
4658 shared_lookup_references.create (0);
4659 rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
4660 rpo_numbers_temp =
4661 XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
4662 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4664 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4665 the i'th block in RPO order is bb. We want to map bb's to RPO
4666 numbers, so we need to rearrange this array. */
4667 for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
4668 rpo_numbers[rpo_numbers_temp[j]] = j;
4670 XDELETE (rpo_numbers_temp);
4672 VN_TOP = build_decl (UNKNOWN_LOCATION, VAR_DECL,
4673 get_identifier ("VN_TOP"), error_mark_node);
4675 renumber_gimple_stmt_uids ();
4677 /* Create the valid and optimistic value numbering tables. */
4678 valid_info = XCNEW (struct vn_tables_s);
4679 allocate_vn_table (valid_info);
4680 optimistic_info = XCNEW (struct vn_tables_s);
4681 allocate_vn_table (optimistic_info);
4682 current_info = valid_info;
4684 /* Create the VN_INFO structures, and initialize value numbers to
4685 TOP or VARYING for parameters. */
4686 size_t i;
4687 tree name;
4689 FOR_EACH_SSA_NAME (i, name, cfun)
4691 VN_INFO_GET (name)->valnum = VN_TOP;
4692 VN_INFO (name)->needs_insertion = false;
4693 VN_INFO (name)->expr = NULL;
4694 VN_INFO (name)->value_id = 0;
4696 if (!SSA_NAME_IS_DEFAULT_DEF (name))
4697 continue;
4699 switch (TREE_CODE (SSA_NAME_VAR (name)))
4701 case VAR_DECL:
4702 /* All undefined vars are VARYING. */
4703 VN_INFO (name)->valnum = name;
4704 VN_INFO (name)->visited = true;
4705 break;
4707 case PARM_DECL:
4708 /* Parameters are VARYING but we can record a condition
4709 if we know it is a non-NULL pointer. */
4710 VN_INFO (name)->visited = true;
4711 VN_INFO (name)->valnum = name;
4712 if (POINTER_TYPE_P (TREE_TYPE (name))
4713 && nonnull_arg_p (SSA_NAME_VAR (name)))
4715 tree ops[2];
4716 ops[0] = name;
4717 ops[1] = build_int_cst (TREE_TYPE (name), 0);
4718 vn_nary_op_insert_pieces (2, NE_EXPR, boolean_type_node, ops,
4719 boolean_true_node, 0);
4720 if (dump_file && (dump_flags & TDF_DETAILS))
4722 fprintf (dump_file, "Recording ");
4723 print_generic_expr (dump_file, name, TDF_SLIM);
4724 fprintf (dump_file, " != 0\n");
4727 break;
4729 case RESULT_DECL:
4730 /* If the result is passed by invisible reference the default
4731 def is initialized, otherwise it's uninitialized. Still
4732 undefined is varying. */
4733 VN_INFO (name)->visited = true;
4734 VN_INFO (name)->valnum = name;
4735 break;
4737 default:
4738 gcc_unreachable ();
4743 /* Restore SSA info that has been reset on value leaders. */
4745 void
4746 scc_vn_restore_ssa_info (void)
4748 unsigned i;
4749 tree name;
4751 FOR_EACH_SSA_NAME (i, name, cfun)
4753 if (has_VN_INFO (name))
4755 if (VN_INFO (name)->needs_insertion)
4757 else if (POINTER_TYPE_P (TREE_TYPE (name))
4758 && VN_INFO (name)->info.ptr_info)
4759 SSA_NAME_PTR_INFO (name) = VN_INFO (name)->info.ptr_info;
4760 else if (INTEGRAL_TYPE_P (TREE_TYPE (name))
4761 && VN_INFO (name)->info.range_info)
4763 SSA_NAME_RANGE_INFO (name) = VN_INFO (name)->info.range_info;
4764 SSA_NAME_ANTI_RANGE_P (name)
4765 = VN_INFO (name)->range_info_anti_range_p;
4771 void
4772 free_scc_vn (void)
4774 size_t i;
4775 tree name;
4777 delete constant_to_value_id;
4778 constant_to_value_id = NULL;
4779 BITMAP_FREE (constant_value_ids);
4780 shared_lookup_phiargs.release ();
4781 shared_lookup_references.release ();
4782 XDELETEVEC (rpo_numbers);
4784 FOR_EACH_SSA_NAME (i, name, cfun)
4786 if (has_VN_INFO (name)
4787 && VN_INFO (name)->needs_insertion)
4788 release_ssa_name (name);
4790 obstack_free (&vn_ssa_aux_obstack, NULL);
4791 vn_ssa_aux_table.release ();
4793 sccstack.release ();
4794 free_vn_table (valid_info);
4795 XDELETE (valid_info);
4796 free_vn_table (optimistic_info);
4797 XDELETE (optimistic_info);
4799 BITMAP_FREE (const_parms);
4802 /* Set *ID according to RESULT. */
4804 static void
4805 set_value_id_for_result (tree result, unsigned int *id)
4807 if (result && TREE_CODE (result) == SSA_NAME)
4808 *id = VN_INFO (result)->value_id;
4809 else if (result && is_gimple_min_invariant (result))
4810 *id = get_or_alloc_constant_value_id (result);
4811 else
4812 *id = get_next_value_id ();
4815 /* Set the value ids in the valid hash tables. */
4817 static void
4818 set_hashtable_value_ids (void)
4820 vn_nary_op_iterator_type hin;
4821 vn_phi_iterator_type hip;
4822 vn_reference_iterator_type hir;
4823 vn_nary_op_t vno;
4824 vn_reference_t vr;
4825 vn_phi_t vp;
4827 /* Now set the value ids of the things we had put in the hash
4828 table. */
4830 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4831 set_value_id_for_result (vno->result, &vno->value_id);
4833 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4834 set_value_id_for_result (vp->result, &vp->value_id);
4836 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4837 hir)
4838 set_value_id_for_result (vr->result, &vr->value_id);
4841 class sccvn_dom_walker : public dom_walker
4843 public:
4844 sccvn_dom_walker ()
4845 : dom_walker (CDI_DOMINATORS, REACHABLE_BLOCKS), cond_stack (0) {}
4847 virtual edge before_dom_children (basic_block);
4848 virtual void after_dom_children (basic_block);
4850 void record_cond (basic_block,
4851 enum tree_code code, tree lhs, tree rhs, bool value);
4852 void record_conds (basic_block,
4853 enum tree_code code, tree lhs, tree rhs, bool value);
4855 auto_vec<std::pair <basic_block, std::pair <vn_nary_op_t, vn_nary_op_t> > >
4856 cond_stack;
4859 /* Record a temporary condition for the BB and its dominated blocks. */
4861 void
4862 sccvn_dom_walker::record_cond (basic_block bb,
4863 enum tree_code code, tree lhs, tree rhs,
4864 bool value)
4866 tree ops[2] = { lhs, rhs };
4867 vn_nary_op_t old = NULL;
4868 if (vn_nary_op_lookup_pieces (2, code, boolean_type_node, ops, &old))
4869 current_info->nary->remove_elt_with_hash (old, old->hashcode);
4870 vn_nary_op_t cond
4871 = vn_nary_op_insert_pieces (2, code, boolean_type_node, ops,
4872 value
4873 ? boolean_true_node
4874 : boolean_false_node, 0);
4875 if (dump_file && (dump_flags & TDF_DETAILS))
4877 fprintf (dump_file, "Recording temporarily ");
4878 print_generic_expr (dump_file, ops[0], TDF_SLIM);
4879 fprintf (dump_file, " %s ", get_tree_code_name (code));
4880 print_generic_expr (dump_file, ops[1], TDF_SLIM);
4881 fprintf (dump_file, " == %s%s\n",
4882 value ? "true" : "false",
4883 old ? " (old entry saved)" : "");
4885 cond_stack.safe_push (std::make_pair (bb, std::make_pair (cond, old)));
4888 /* Record temporary conditions for the BB and its dominated blocks
4889 according to LHS CODE RHS == VALUE and its dominated conditions. */
4891 void
4892 sccvn_dom_walker::record_conds (basic_block bb,
4893 enum tree_code code, tree lhs, tree rhs,
4894 bool value)
4896 /* Record the original condition. */
4897 record_cond (bb, code, lhs, rhs, value);
4899 if (!value)
4900 return;
4902 /* Record dominated conditions if the condition is true. Note that
4903 the inversion is already recorded. */
4904 switch (code)
4906 case LT_EXPR:
4907 case GT_EXPR:
4908 record_cond (bb, code == LT_EXPR ? LE_EXPR : GE_EXPR, lhs, rhs, true);
4909 record_cond (bb, NE_EXPR, lhs, rhs, true);
4910 record_cond (bb, EQ_EXPR, lhs, rhs, false);
4911 break;
4913 case EQ_EXPR:
4914 record_cond (bb, LE_EXPR, lhs, rhs, true);
4915 record_cond (bb, GE_EXPR, lhs, rhs, true);
4916 record_cond (bb, LT_EXPR, lhs, rhs, false);
4917 record_cond (bb, GT_EXPR, lhs, rhs, false);
4918 break;
4920 default:
4921 break;
4925 /* Restore expressions and values derived from conditionals. */
4927 void
4928 sccvn_dom_walker::after_dom_children (basic_block bb)
4930 while (!cond_stack.is_empty ()
4931 && cond_stack.last ().first == bb)
4933 vn_nary_op_t cond = cond_stack.last ().second.first;
4934 vn_nary_op_t old = cond_stack.last ().second.second;
4935 current_info->nary->remove_elt_with_hash (cond, cond->hashcode);
4936 if (old)
4937 vn_nary_op_insert_into (old, current_info->nary, false);
4938 cond_stack.pop ();
4942 /* Value number all statements in BB. */
4944 edge
4945 sccvn_dom_walker::before_dom_children (basic_block bb)
4947 if (dump_file && (dump_flags & TDF_DETAILS))
4948 fprintf (dump_file, "Visiting BB %d\n", bb->index);
4950 /* If we have a single predecessor record the equivalence from a
4951 possible condition on the predecessor edge. */
4952 edge pred_e = single_pred_edge_ignoring_loop_edges (bb, false);
4953 if (pred_e)
4955 /* Check if there are multiple executable successor edges in
4956 the source block. Otherwise there is no additional info
4957 to be recorded. */
4958 edge_iterator ei;
4959 edge e2;
4960 FOR_EACH_EDGE (e2, ei, pred_e->src->succs)
4961 if (e2 != pred_e
4962 && e2->flags & EDGE_EXECUTABLE)
4963 break;
4964 if (e2 && (e2->flags & EDGE_EXECUTABLE))
4966 gimple *stmt = last_stmt (pred_e->src);
4967 if (stmt
4968 && gimple_code (stmt) == GIMPLE_COND)
4970 enum tree_code code = gimple_cond_code (stmt);
4971 tree lhs = gimple_cond_lhs (stmt);
4972 tree rhs = gimple_cond_rhs (stmt);
4973 record_conds (bb, code, lhs, rhs,
4974 (pred_e->flags & EDGE_TRUE_VALUE) != 0);
4975 code = invert_tree_comparison (code, HONOR_NANS (lhs));
4976 if (code != ERROR_MARK)
4977 record_conds (bb, code, lhs, rhs,
4978 (pred_e->flags & EDGE_TRUE_VALUE) == 0);
4983 /* Value-number all defs in the basic-block. */
4984 for (gphi_iterator gsi = gsi_start_phis (bb);
4985 !gsi_end_p (gsi); gsi_next (&gsi))
4987 gphi *phi = gsi.phi ();
4988 tree res = PHI_RESULT (phi);
4989 if (!VN_INFO (res)->visited)
4990 DFS (res);
4992 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
4993 !gsi_end_p (gsi); gsi_next (&gsi))
4995 ssa_op_iter i;
4996 tree op;
4997 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
4998 if (!VN_INFO (op)->visited)
4999 DFS (op);
5002 /* Finally look at the last stmt. */
5003 gimple *stmt = last_stmt (bb);
5004 if (!stmt)
5005 return NULL;
5007 enum gimple_code code = gimple_code (stmt);
5008 if (code != GIMPLE_COND
5009 && code != GIMPLE_SWITCH
5010 && code != GIMPLE_GOTO)
5011 return NULL;
5013 if (dump_file && (dump_flags & TDF_DETAILS))
5015 fprintf (dump_file, "Visiting control stmt ending BB %d: ", bb->index);
5016 print_gimple_stmt (dump_file, stmt, 0);
5019 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
5020 if value-numbering can prove they are not reachable. Handling
5021 computed gotos is also possible. */
5022 tree val;
5023 switch (code)
5025 case GIMPLE_COND:
5027 tree lhs = vn_valueize (gimple_cond_lhs (stmt));
5028 tree rhs = vn_valueize (gimple_cond_rhs (stmt));
5029 val = gimple_simplify (gimple_cond_code (stmt),
5030 boolean_type_node, lhs, rhs,
5031 NULL, vn_valueize);
5032 /* If that didn't simplify to a constant see if we have recorded
5033 temporary expressions from taken edges. */
5034 if (!val || TREE_CODE (val) != INTEGER_CST)
5036 tree ops[2];
5037 ops[0] = lhs;
5038 ops[1] = rhs;
5039 val = vn_nary_op_lookup_pieces (2, gimple_cond_code (stmt),
5040 boolean_type_node, ops, NULL);
5042 break;
5044 case GIMPLE_SWITCH:
5045 val = gimple_switch_index (as_a <gswitch *> (stmt));
5046 break;
5047 case GIMPLE_GOTO:
5048 val = gimple_goto_dest (stmt);
5049 break;
5050 default:
5051 gcc_unreachable ();
5053 if (!val)
5054 return NULL;
5056 edge taken = find_taken_edge (bb, vn_valueize (val));
5057 if (!taken)
5058 return NULL;
5060 if (dump_file && (dump_flags & TDF_DETAILS))
5061 fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
5062 "not executable\n", bb->index, bb->index, taken->dest->index);
5064 return taken;
5067 /* Do SCCVN. Returns true if it finished, false if we bailed out
5068 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
5069 how we use the alias oracle walking during the VN process. */
5071 void
5072 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
5074 size_t i;
5076 default_vn_walk_kind = default_vn_walk_kind_;
5078 init_scc_vn ();
5080 /* Collect pointers we know point to readonly memory. */
5081 const_parms = BITMAP_ALLOC (NULL);
5082 tree fnspec = lookup_attribute ("fn spec",
5083 TYPE_ATTRIBUTES (TREE_TYPE (cfun->decl)));
5084 if (fnspec)
5086 fnspec = TREE_VALUE (TREE_VALUE (fnspec));
5087 i = 1;
5088 for (tree arg = DECL_ARGUMENTS (cfun->decl);
5089 arg; arg = DECL_CHAIN (arg), ++i)
5091 if (i >= (unsigned) TREE_STRING_LENGTH (fnspec))
5092 break;
5093 if (TREE_STRING_POINTER (fnspec)[i] == 'R'
5094 || TREE_STRING_POINTER (fnspec)[i] == 'r')
5096 tree name = ssa_default_def (cfun, arg);
5097 if (name)
5098 bitmap_set_bit (const_parms, SSA_NAME_VERSION (name));
5103 /* Walk all blocks in dominator order, value-numbering stmts
5104 SSA defs and decide whether outgoing edges are not executable. */
5105 sccvn_dom_walker walker;
5106 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5108 /* Initialize the value ids and prune out remaining VN_TOPs
5109 from dead code. */
5110 tree name;
5111 FOR_EACH_SSA_NAME (i, name, cfun)
5113 vn_ssa_aux_t info = VN_INFO (name);
5114 if (!info->visited
5115 || info->valnum == VN_TOP)
5116 info->valnum = name;
5117 if (info->valnum == name)
5118 info->value_id = get_next_value_id ();
5119 else if (is_gimple_min_invariant (info->valnum))
5120 info->value_id = get_or_alloc_constant_value_id (info->valnum);
5123 /* Propagate. */
5124 FOR_EACH_SSA_NAME (i, name, cfun)
5126 vn_ssa_aux_t info = VN_INFO (name);
5127 if (TREE_CODE (info->valnum) == SSA_NAME
5128 && info->valnum != name
5129 && info->value_id != VN_INFO (info->valnum)->value_id)
5130 info->value_id = VN_INFO (info->valnum)->value_id;
5133 set_hashtable_value_ids ();
5135 if (dump_file && (dump_flags & TDF_DETAILS))
5137 fprintf (dump_file, "Value numbers:\n");
5138 FOR_EACH_SSA_NAME (i, name, cfun)
5140 if (VN_INFO (name)->visited
5141 && SSA_VAL (name) != name)
5143 print_generic_expr (dump_file, name);
5144 fprintf (dump_file, " = ");
5145 print_generic_expr (dump_file, SSA_VAL (name));
5146 fprintf (dump_file, "\n");
5152 /* Return the maximum value id we have ever seen. */
5154 unsigned int
5155 get_max_value_id (void)
5157 return next_value_id;
5160 /* Return the next unique value id. */
5162 unsigned int
5163 get_next_value_id (void)
5165 return next_value_id++;
5169 /* Compare two expressions E1 and E2 and return true if they are equal. */
5171 bool
5172 expressions_equal_p (tree e1, tree e2)
5174 /* The obvious case. */
5175 if (e1 == e2)
5176 return true;
5178 /* If either one is VN_TOP consider them equal. */
5179 if (e1 == VN_TOP || e2 == VN_TOP)
5180 return true;
5182 /* If only one of them is null, they cannot be equal. */
5183 if (!e1 || !e2)
5184 return false;
5186 /* Now perform the actual comparison. */
5187 if (TREE_CODE (e1) == TREE_CODE (e2)
5188 && operand_equal_p (e1, e2, OEP_PURE_SAME))
5189 return true;
5191 return false;
5195 /* Return true if the nary operation NARY may trap. This is a copy
5196 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
5198 bool
5199 vn_nary_may_trap (vn_nary_op_t nary)
5201 tree type;
5202 tree rhs2 = NULL_TREE;
5203 bool honor_nans = false;
5204 bool honor_snans = false;
5205 bool fp_operation = false;
5206 bool honor_trapv = false;
5207 bool handled, ret;
5208 unsigned i;
5210 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
5211 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
5212 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
5214 type = nary->type;
5215 fp_operation = FLOAT_TYPE_P (type);
5216 if (fp_operation)
5218 honor_nans = flag_trapping_math && !flag_finite_math_only;
5219 honor_snans = flag_signaling_nans != 0;
5221 else if (INTEGRAL_TYPE_P (type)
5222 && TYPE_OVERFLOW_TRAPS (type))
5223 honor_trapv = true;
5225 if (nary->length >= 2)
5226 rhs2 = nary->op[1];
5227 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
5228 honor_trapv,
5229 honor_nans, honor_snans, rhs2,
5230 &handled);
5231 if (handled
5232 && ret)
5233 return true;
5235 for (i = 0; i < nary->length; ++i)
5236 if (tree_could_trap_p (nary->op[i]))
5237 return true;
5239 return false;
5243 class eliminate_dom_walker : public dom_walker
5245 public:
5246 eliminate_dom_walker (cdi_direction, bitmap);
5247 ~eliminate_dom_walker ();
5249 virtual edge before_dom_children (basic_block);
5250 virtual void after_dom_children (basic_block);
5252 tree eliminate_avail (tree op);
5253 void eliminate_push_avail (tree op);
5254 tree eliminate_insert (gimple_stmt_iterator *gsi, tree val);
5256 bool do_pre;
5257 unsigned int el_todo;
5258 unsigned int eliminations;
5259 unsigned int insertions;
5261 /* SSA names that had their defs inserted by PRE if do_pre. */
5262 bitmap inserted_exprs;
5264 /* Blocks with statements that have had their EH properties changed. */
5265 bitmap need_eh_cleanup;
5267 /* Blocks with statements that have had their AB properties changed. */
5268 bitmap need_ab_cleanup;
5270 auto_vec<gimple *> to_remove;
5271 auto_vec<gimple *> to_fixup;
5272 auto_vec<tree> avail;
5273 auto_vec<tree> avail_stack;
5276 eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction,
5277 bitmap inserted_exprs_)
5278 : dom_walker (direction), do_pre (inserted_exprs_ != NULL),
5279 el_todo (0), eliminations (0), insertions (0),
5280 inserted_exprs (inserted_exprs_)
5282 need_eh_cleanup = BITMAP_ALLOC (NULL);
5283 need_ab_cleanup = BITMAP_ALLOC (NULL);
5286 eliminate_dom_walker::~eliminate_dom_walker ()
5288 BITMAP_FREE (need_eh_cleanup);
5289 BITMAP_FREE (need_ab_cleanup);
5292 /* Return a leader for OP that is available at the current point of the
5293 eliminate domwalk. */
5295 tree
5296 eliminate_dom_walker::eliminate_avail (tree op)
5298 tree valnum = VN_INFO (op)->valnum;
5299 if (TREE_CODE (valnum) == SSA_NAME)
5301 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
5302 return valnum;
5303 if (avail.length () > SSA_NAME_VERSION (valnum))
5304 return avail[SSA_NAME_VERSION (valnum)];
5306 else if (is_gimple_min_invariant (valnum))
5307 return valnum;
5308 return NULL_TREE;
5311 /* At the current point of the eliminate domwalk make OP available. */
5313 void
5314 eliminate_dom_walker::eliminate_push_avail (tree op)
5316 tree valnum = VN_INFO (op)->valnum;
5317 if (TREE_CODE (valnum) == SSA_NAME)
5319 if (avail.length () <= SSA_NAME_VERSION (valnum))
5320 avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1);
5321 tree pushop = op;
5322 if (avail[SSA_NAME_VERSION (valnum)])
5323 pushop = avail[SSA_NAME_VERSION (valnum)];
5324 avail_stack.safe_push (pushop);
5325 avail[SSA_NAME_VERSION (valnum)] = op;
5329 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
5330 the leader for the expression if insertion was successful. */
5332 tree
5333 eliminate_dom_walker::eliminate_insert (gimple_stmt_iterator *gsi, tree val)
5335 /* We can insert a sequence with a single assignment only. */
5336 gimple_seq stmts = VN_INFO (val)->expr;
5337 if (!gimple_seq_singleton_p (stmts))
5338 return NULL_TREE;
5339 gassign *stmt = dyn_cast <gassign *> (gimple_seq_first_stmt (stmts));
5340 if (!stmt
5341 || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
5342 && gimple_assign_rhs_code (stmt) != VIEW_CONVERT_EXPR
5343 && gimple_assign_rhs_code (stmt) != BIT_FIELD_REF
5344 && (gimple_assign_rhs_code (stmt) != BIT_AND_EXPR
5345 || TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)))
5346 return NULL_TREE;
5348 tree op = gimple_assign_rhs1 (stmt);
5349 if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR
5350 || gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5351 op = TREE_OPERAND (op, 0);
5352 tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (op) : op;
5353 if (!leader)
5354 return NULL_TREE;
5356 tree res;
5357 stmts = NULL;
5358 if (gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5359 res = gimple_build (&stmts, BIT_FIELD_REF,
5360 TREE_TYPE (val), leader,
5361 TREE_OPERAND (gimple_assign_rhs1 (stmt), 1),
5362 TREE_OPERAND (gimple_assign_rhs1 (stmt), 2));
5363 else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
5364 res = gimple_build (&stmts, BIT_AND_EXPR,
5365 TREE_TYPE (val), leader, gimple_assign_rhs2 (stmt));
5366 else
5367 res = gimple_build (&stmts, gimple_assign_rhs_code (stmt),
5368 TREE_TYPE (val), leader);
5369 if (TREE_CODE (res) != SSA_NAME
5370 || SSA_NAME_IS_DEFAULT_DEF (res)
5371 || gimple_bb (SSA_NAME_DEF_STMT (res)))
5373 gimple_seq_discard (stmts);
5375 /* During propagation we have to treat SSA info conservatively
5376 and thus we can end up simplifying the inserted expression
5377 at elimination time to sth not defined in stmts. */
5378 /* But then this is a redundancy we failed to detect. Which means
5379 res now has two values. That doesn't play well with how
5380 we track availability here, so give up. */
5381 if (dump_file && (dump_flags & TDF_DETAILS))
5383 if (TREE_CODE (res) == SSA_NAME)
5384 res = eliminate_avail (res);
5385 if (res)
5387 fprintf (dump_file, "Failed to insert expression for value ");
5388 print_generic_expr (dump_file, val);
5389 fprintf (dump_file, " which is really fully redundant to ");
5390 print_generic_expr (dump_file, res);
5391 fprintf (dump_file, "\n");
5395 return NULL_TREE;
5397 else
5399 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
5400 VN_INFO_GET (res)->valnum = val;
5403 insertions++;
5404 if (dump_file && (dump_flags & TDF_DETAILS))
5406 fprintf (dump_file, "Inserted ");
5407 print_gimple_stmt (dump_file, SSA_NAME_DEF_STMT (res), 0);
5410 return res;
5415 /* Perform elimination for the basic-block B during the domwalk. */
5417 edge
5418 eliminate_dom_walker::before_dom_children (basic_block b)
5420 /* Mark new bb. */
5421 avail_stack.safe_push (NULL_TREE);
5423 /* Skip unreachable blocks marked unreachable during the SCCVN domwalk. */
5424 edge_iterator ei;
5425 edge e;
5426 FOR_EACH_EDGE (e, ei, b->preds)
5427 if (e->flags & EDGE_EXECUTABLE)
5428 break;
5429 if (! e)
5430 return NULL;
5432 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
5434 gphi *phi = gsi.phi ();
5435 tree res = PHI_RESULT (phi);
5437 if (virtual_operand_p (res))
5439 gsi_next (&gsi);
5440 continue;
5443 tree sprime = eliminate_avail (res);
5444 if (sprime
5445 && sprime != res)
5447 if (dump_file && (dump_flags & TDF_DETAILS))
5449 fprintf (dump_file, "Replaced redundant PHI node defining ");
5450 print_generic_expr (dump_file, res);
5451 fprintf (dump_file, " with ");
5452 print_generic_expr (dump_file, sprime);
5453 fprintf (dump_file, "\n");
5456 /* If we inserted this PHI node ourself, it's not an elimination. */
5457 if (! inserted_exprs
5458 || ! bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
5459 eliminations++;
5461 /* If we will propagate into all uses don't bother to do
5462 anything. */
5463 if (may_propagate_copy (res, sprime))
5465 /* Mark the PHI for removal. */
5466 to_remove.safe_push (phi);
5467 gsi_next (&gsi);
5468 continue;
5471 remove_phi_node (&gsi, false);
5473 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
5474 sprime = fold_convert (TREE_TYPE (res), sprime);
5475 gimple *stmt = gimple_build_assign (res, sprime);
5476 gimple_stmt_iterator gsi2 = gsi_after_labels (b);
5477 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
5478 continue;
5481 eliminate_push_avail (res);
5482 gsi_next (&gsi);
5485 for (gimple_stmt_iterator gsi = gsi_start_bb (b);
5486 !gsi_end_p (gsi);
5487 gsi_next (&gsi))
5489 tree sprime = NULL_TREE;
5490 gimple *stmt = gsi_stmt (gsi);
5491 tree lhs = gimple_get_lhs (stmt);
5492 if (lhs && TREE_CODE (lhs) == SSA_NAME
5493 && !gimple_has_volatile_ops (stmt)
5494 /* See PR43491. Do not replace a global register variable when
5495 it is a the RHS of an assignment. Do replace local register
5496 variables since gcc does not guarantee a local variable will
5497 be allocated in register.
5498 ??? The fix isn't effective here. This should instead
5499 be ensured by not value-numbering them the same but treating
5500 them like volatiles? */
5501 && !(gimple_assign_single_p (stmt)
5502 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
5503 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))
5504 && is_global_var (gimple_assign_rhs1 (stmt)))))
5506 sprime = eliminate_avail (lhs);
5507 if (!sprime)
5509 /* If there is no existing usable leader but SCCVN thinks
5510 it has an expression it wants to use as replacement,
5511 insert that. */
5512 tree val = VN_INFO (lhs)->valnum;
5513 if (val != VN_TOP
5514 && TREE_CODE (val) == SSA_NAME
5515 && VN_INFO (val)->needs_insertion
5516 && VN_INFO (val)->expr != NULL
5517 && (sprime = eliminate_insert (&gsi, val)) != NULL_TREE)
5518 eliminate_push_avail (sprime);
5521 /* If this now constitutes a copy duplicate points-to
5522 and range info appropriately. This is especially
5523 important for inserted code. See tree-ssa-copy.c
5524 for similar code. */
5525 if (sprime
5526 && TREE_CODE (sprime) == SSA_NAME)
5528 basic_block sprime_b = gimple_bb (SSA_NAME_DEF_STMT (sprime));
5529 if (POINTER_TYPE_P (TREE_TYPE (lhs))
5530 && VN_INFO_PTR_INFO (lhs)
5531 && ! VN_INFO_PTR_INFO (sprime))
5533 duplicate_ssa_name_ptr_info (sprime,
5534 VN_INFO_PTR_INFO (lhs));
5535 if (b != sprime_b)
5536 mark_ptr_info_alignment_unknown
5537 (SSA_NAME_PTR_INFO (sprime));
5539 else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
5540 && VN_INFO_RANGE_INFO (lhs)
5541 && ! VN_INFO_RANGE_INFO (sprime)
5542 && b == sprime_b)
5543 duplicate_ssa_name_range_info (sprime,
5544 VN_INFO_RANGE_TYPE (lhs),
5545 VN_INFO_RANGE_INFO (lhs));
5548 /* Inhibit the use of an inserted PHI on a loop header when
5549 the address of the memory reference is a simple induction
5550 variable. In other cases the vectorizer won't do anything
5551 anyway (either it's loop invariant or a complicated
5552 expression). */
5553 if (sprime
5554 && TREE_CODE (sprime) == SSA_NAME
5555 && do_pre
5556 && (flag_tree_loop_vectorize || flag_tree_parallelize_loops > 1)
5557 && loop_outer (b->loop_father)
5558 && has_zero_uses (sprime)
5559 && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
5560 && gimple_assign_load_p (stmt))
5562 gimple *def_stmt = SSA_NAME_DEF_STMT (sprime);
5563 basic_block def_bb = gimple_bb (def_stmt);
5564 if (gimple_code (def_stmt) == GIMPLE_PHI
5565 && def_bb->loop_father->header == def_bb)
5567 loop_p loop = def_bb->loop_father;
5568 ssa_op_iter iter;
5569 tree op;
5570 bool found = false;
5571 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5573 affine_iv iv;
5574 def_bb = gimple_bb (SSA_NAME_DEF_STMT (op));
5575 if (def_bb
5576 && flow_bb_inside_loop_p (loop, def_bb)
5577 && simple_iv (loop, loop, op, &iv, true))
5579 found = true;
5580 break;
5583 if (found)
5585 if (dump_file && (dump_flags & TDF_DETAILS))
5587 fprintf (dump_file, "Not replacing ");
5588 print_gimple_expr (dump_file, stmt, 0);
5589 fprintf (dump_file, " with ");
5590 print_generic_expr (dump_file, sprime);
5591 fprintf (dump_file, " which would add a loop"
5592 " carried dependence to loop %d\n",
5593 loop->num);
5595 /* Don't keep sprime available. */
5596 sprime = NULL_TREE;
5601 if (sprime)
5603 /* If we can propagate the value computed for LHS into
5604 all uses don't bother doing anything with this stmt. */
5605 if (may_propagate_copy (lhs, sprime))
5607 /* Mark it for removal. */
5608 to_remove.safe_push (stmt);
5610 /* ??? Don't count copy/constant propagations. */
5611 if (gimple_assign_single_p (stmt)
5612 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5613 || gimple_assign_rhs1 (stmt) == sprime))
5614 continue;
5616 if (dump_file && (dump_flags & TDF_DETAILS))
5618 fprintf (dump_file, "Replaced ");
5619 print_gimple_expr (dump_file, stmt, 0);
5620 fprintf (dump_file, " with ");
5621 print_generic_expr (dump_file, sprime);
5622 fprintf (dump_file, " in all uses of ");
5623 print_gimple_stmt (dump_file, stmt, 0);
5626 eliminations++;
5627 continue;
5630 /* If this is an assignment from our leader (which
5631 happens in the case the value-number is a constant)
5632 then there is nothing to do. */
5633 if (gimple_assign_single_p (stmt)
5634 && sprime == gimple_assign_rhs1 (stmt))
5635 continue;
5637 /* Else replace its RHS. */
5638 bool can_make_abnormal_goto
5639 = is_gimple_call (stmt)
5640 && stmt_can_make_abnormal_goto (stmt);
5642 if (dump_file && (dump_flags & TDF_DETAILS))
5644 fprintf (dump_file, "Replaced ");
5645 print_gimple_expr (dump_file, stmt, 0);
5646 fprintf (dump_file, " with ");
5647 print_generic_expr (dump_file, sprime);
5648 fprintf (dump_file, " in ");
5649 print_gimple_stmt (dump_file, stmt, 0);
5652 eliminations++;
5653 gimple *orig_stmt = stmt;
5654 if (!useless_type_conversion_p (TREE_TYPE (lhs),
5655 TREE_TYPE (sprime)))
5656 sprime = fold_convert (TREE_TYPE (lhs), sprime);
5657 tree vdef = gimple_vdef (stmt);
5658 tree vuse = gimple_vuse (stmt);
5659 propagate_tree_value_into_stmt (&gsi, sprime);
5660 stmt = gsi_stmt (gsi);
5661 update_stmt (stmt);
5662 if (vdef != gimple_vdef (stmt))
5663 VN_INFO (vdef)->valnum = vuse;
5665 /* If we removed EH side-effects from the statement, clean
5666 its EH information. */
5667 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
5669 bitmap_set_bit (need_eh_cleanup,
5670 gimple_bb (stmt)->index);
5671 if (dump_file && (dump_flags & TDF_DETAILS))
5672 fprintf (dump_file, " Removed EH side-effects.\n");
5675 /* Likewise for AB side-effects. */
5676 if (can_make_abnormal_goto
5677 && !stmt_can_make_abnormal_goto (stmt))
5679 bitmap_set_bit (need_ab_cleanup,
5680 gimple_bb (stmt)->index);
5681 if (dump_file && (dump_flags & TDF_DETAILS))
5682 fprintf (dump_file, " Removed AB side-effects.\n");
5685 continue;
5689 /* If the statement is a scalar store, see if the expression
5690 has the same value number as its rhs. If so, the store is
5691 dead. */
5692 if (gimple_assign_single_p (stmt)
5693 && !gimple_has_volatile_ops (stmt)
5694 && !is_gimple_reg (gimple_assign_lhs (stmt))
5695 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5696 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
5698 tree val;
5699 tree rhs = gimple_assign_rhs1 (stmt);
5700 vn_reference_t vnresult;
5701 val = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_WALKREWRITE,
5702 &vnresult, false);
5703 if (TREE_CODE (rhs) == SSA_NAME)
5704 rhs = VN_INFO (rhs)->valnum;
5705 if (val
5706 && operand_equal_p (val, rhs, 0))
5708 /* We can only remove the later store if the former aliases
5709 at least all accesses the later one does or if the store
5710 was to readonly memory storing the same value. */
5711 alias_set_type set = get_alias_set (lhs);
5712 if (! vnresult
5713 || vnresult->set == set
5714 || alias_set_subset_of (set, vnresult->set))
5716 if (dump_file && (dump_flags & TDF_DETAILS))
5718 fprintf (dump_file, "Deleted redundant store ");
5719 print_gimple_stmt (dump_file, stmt, 0);
5722 /* Queue stmt for removal. */
5723 to_remove.safe_push (stmt);
5724 continue;
5729 /* If this is a control statement value numbering left edges
5730 unexecuted on force the condition in a way consistent with
5731 that. */
5732 if (gcond *cond = dyn_cast <gcond *> (stmt))
5734 if ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE)
5735 ^ (EDGE_SUCC (b, 1)->flags & EDGE_EXECUTABLE))
5737 if (dump_file && (dump_flags & TDF_DETAILS))
5739 fprintf (dump_file, "Removing unexecutable edge from ");
5740 print_gimple_stmt (dump_file, stmt, 0);
5742 if (((EDGE_SUCC (b, 0)->flags & EDGE_TRUE_VALUE) != 0)
5743 == ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE) != 0))
5744 gimple_cond_make_true (cond);
5745 else
5746 gimple_cond_make_false (cond);
5747 update_stmt (cond);
5748 el_todo |= TODO_cleanup_cfg;
5749 continue;
5753 bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
5754 bool was_noreturn = (is_gimple_call (stmt)
5755 && gimple_call_noreturn_p (stmt));
5756 tree vdef = gimple_vdef (stmt);
5757 tree vuse = gimple_vuse (stmt);
5759 /* If we didn't replace the whole stmt (or propagate the result
5760 into all uses), replace all uses on this stmt with their
5761 leaders. */
5762 bool modified = false;
5763 use_operand_p use_p;
5764 ssa_op_iter iter;
5765 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5767 tree use = USE_FROM_PTR (use_p);
5768 /* ??? The call code above leaves stmt operands un-updated. */
5769 if (TREE_CODE (use) != SSA_NAME)
5770 continue;
5771 tree sprime = eliminate_avail (use);
5772 if (sprime && sprime != use
5773 && may_propagate_copy (use, sprime)
5774 /* We substitute into debug stmts to avoid excessive
5775 debug temporaries created by removed stmts, but we need
5776 to avoid doing so for inserted sprimes as we never want
5777 to create debug temporaries for them. */
5778 && (!inserted_exprs
5779 || TREE_CODE (sprime) != SSA_NAME
5780 || !is_gimple_debug (stmt)
5781 || !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
5783 propagate_value (use_p, sprime);
5784 modified = true;
5788 /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
5789 into which is a requirement for the IPA devirt machinery. */
5790 gimple *old_stmt = stmt;
5791 if (modified)
5793 /* If a formerly non-invariant ADDR_EXPR is turned into an
5794 invariant one it was on a separate stmt. */
5795 if (gimple_assign_single_p (stmt)
5796 && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
5797 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
5798 gimple_stmt_iterator prev = gsi;
5799 gsi_prev (&prev);
5800 if (fold_stmt (&gsi))
5802 /* fold_stmt may have created new stmts inbetween
5803 the previous stmt and the folded stmt. Mark
5804 all defs created there as varying to not confuse
5805 the SCCVN machinery as we're using that even during
5806 elimination. */
5807 if (gsi_end_p (prev))
5808 prev = gsi_start_bb (b);
5809 else
5810 gsi_next (&prev);
5811 if (gsi_stmt (prev) != gsi_stmt (gsi))
5814 tree def;
5815 ssa_op_iter dit;
5816 FOR_EACH_SSA_TREE_OPERAND (def, gsi_stmt (prev),
5817 dit, SSA_OP_ALL_DEFS)
5818 /* As existing DEFs may move between stmts
5819 we have to guard VN_INFO_GET. */
5820 if (! has_VN_INFO (def))
5821 VN_INFO_GET (def)->valnum = def;
5822 if (gsi_stmt (prev) == gsi_stmt (gsi))
5823 break;
5824 gsi_next (&prev);
5826 while (1);
5828 stmt = gsi_stmt (gsi);
5829 /* In case we folded the stmt away schedule the NOP for removal. */
5830 if (gimple_nop_p (stmt))
5831 to_remove.safe_push (stmt);
5834 /* Visit indirect calls and turn them into direct calls if
5835 possible using the devirtualization machinery. Do this before
5836 checking for required EH/abnormal/noreturn cleanup as devird
5837 may expose more of those. */
5838 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
5840 tree fn = gimple_call_fn (call_stmt);
5841 if (fn
5842 && flag_devirtualize
5843 && virtual_method_call_p (fn))
5845 tree otr_type = obj_type_ref_class (fn);
5846 unsigned HOST_WIDE_INT otr_tok
5847 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn));
5848 tree instance;
5849 ipa_polymorphic_call_context context (current_function_decl,
5850 fn, stmt, &instance);
5851 context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn),
5852 otr_type, stmt);
5853 bool final;
5854 vec <cgraph_node *> targets
5855 = possible_polymorphic_call_targets (obj_type_ref_class (fn),
5856 otr_tok, context, &final);
5857 if (dump_file)
5858 dump_possible_polymorphic_call_targets (dump_file,
5859 obj_type_ref_class (fn),
5860 otr_tok, context);
5861 if (final && targets.length () <= 1 && dbg_cnt (devirt))
5863 tree fn;
5864 if (targets.length () == 1)
5865 fn = targets[0]->decl;
5866 else
5867 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5868 if (dump_enabled_p ())
5870 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
5871 "converting indirect call to "
5872 "function %s\n",
5873 lang_hooks.decl_printable_name (fn, 2));
5875 gimple_call_set_fndecl (call_stmt, fn);
5876 /* If changing the call to __builtin_unreachable
5877 or similar noreturn function, adjust gimple_call_fntype
5878 too. */
5879 if (gimple_call_noreturn_p (call_stmt)
5880 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
5881 && TYPE_ARG_TYPES (TREE_TYPE (fn))
5882 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn)))
5883 == void_type_node))
5884 gimple_call_set_fntype (call_stmt, TREE_TYPE (fn));
5885 maybe_remove_unused_call_args (cfun, call_stmt);
5886 modified = true;
5891 if (modified)
5893 /* When changing a call into a noreturn call, cfg cleanup
5894 is needed to fix up the noreturn call. */
5895 if (!was_noreturn
5896 && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
5897 to_fixup.safe_push (stmt);
5898 /* When changing a condition or switch into one we know what
5899 edge will be executed, schedule a cfg cleanup. */
5900 if ((gimple_code (stmt) == GIMPLE_COND
5901 && (gimple_cond_true_p (as_a <gcond *> (stmt))
5902 || gimple_cond_false_p (as_a <gcond *> (stmt))))
5903 || (gimple_code (stmt) == GIMPLE_SWITCH
5904 && TREE_CODE (gimple_switch_index
5905 (as_a <gswitch *> (stmt))) == INTEGER_CST))
5906 el_todo |= TODO_cleanup_cfg;
5907 /* If we removed EH side-effects from the statement, clean
5908 its EH information. */
5909 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
5911 bitmap_set_bit (need_eh_cleanup,
5912 gimple_bb (stmt)->index);
5913 if (dump_file && (dump_flags & TDF_DETAILS))
5914 fprintf (dump_file, " Removed EH side-effects.\n");
5916 /* Likewise for AB side-effects. */
5917 if (can_make_abnormal_goto
5918 && !stmt_can_make_abnormal_goto (stmt))
5920 bitmap_set_bit (need_ab_cleanup,
5921 gimple_bb (stmt)->index);
5922 if (dump_file && (dump_flags & TDF_DETAILS))
5923 fprintf (dump_file, " Removed AB side-effects.\n");
5925 update_stmt (stmt);
5926 if (vdef != gimple_vdef (stmt))
5927 VN_INFO (vdef)->valnum = vuse;
5930 /* Make new values available - for fully redundant LHS we
5931 continue with the next stmt above and skip this. */
5932 def_operand_p defp;
5933 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
5934 eliminate_push_avail (DEF_FROM_PTR (defp));
5937 /* Replace destination PHI arguments. */
5938 FOR_EACH_EDGE (e, ei, b->succs)
5939 if (e->flags & EDGE_EXECUTABLE)
5940 for (gphi_iterator gsi = gsi_start_phis (e->dest);
5941 !gsi_end_p (gsi);
5942 gsi_next (&gsi))
5944 gphi *phi = gsi.phi ();
5945 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
5946 tree arg = USE_FROM_PTR (use_p);
5947 if (TREE_CODE (arg) != SSA_NAME
5948 || virtual_operand_p (arg))
5949 continue;
5950 tree sprime = eliminate_avail (arg);
5951 if (sprime && may_propagate_copy (arg, sprime))
5952 propagate_value (use_p, sprime);
5954 return NULL;
5957 /* Make no longer available leaders no longer available. */
5959 void
5960 eliminate_dom_walker::after_dom_children (basic_block)
5962 tree entry;
5963 while ((entry = avail_stack.pop ()) != NULL_TREE)
5965 tree valnum = VN_INFO (entry)->valnum;
5966 tree old = avail[SSA_NAME_VERSION (valnum)];
5967 if (old == entry)
5968 avail[SSA_NAME_VERSION (valnum)] = NULL_TREE;
5969 else
5970 avail[SSA_NAME_VERSION (valnum)] = entry;
5974 /* Eliminate fully redundant computations. */
5976 unsigned int
5977 vn_eliminate (bitmap inserted_exprs)
5979 eliminate_dom_walker el (CDI_DOMINATORS, inserted_exprs);
5980 el.avail.reserve (num_ssa_names);
5982 el.walk (cfun->cfg->x_entry_block_ptr);
5984 /* We cannot remove stmts during BB walk, especially not release SSA
5985 names there as this confuses the VN machinery. The stmts ending
5986 up in to_remove are either stores or simple copies.
5987 Remove stmts in reverse order to make debug stmt creation possible. */
5988 while (!el.to_remove.is_empty ())
5990 gimple *stmt = el.to_remove.pop ();
5992 if (dump_file && (dump_flags & TDF_DETAILS))
5994 fprintf (dump_file, "Removing dead stmt ");
5995 print_gimple_stmt (dump_file, stmt, 0, TDF_NONE);
5998 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5999 if (gimple_code (stmt) == GIMPLE_PHI)
6000 remove_phi_node (&gsi, true);
6001 else
6003 basic_block bb = gimple_bb (stmt);
6004 unlink_stmt_vdef (stmt);
6005 if (gsi_remove (&gsi, true))
6006 bitmap_set_bit (el.need_eh_cleanup, bb->index);
6007 if (is_gimple_call (stmt) && stmt_can_make_abnormal_goto (stmt))
6008 bitmap_set_bit (el.need_ab_cleanup, bb->index);
6009 release_defs (stmt);
6012 /* Removing a stmt may expose a forwarder block. */
6013 el.el_todo |= TODO_cleanup_cfg;
6016 /* Fixup stmts that became noreturn calls. This may require splitting
6017 blocks and thus isn't possible during the dominator walk. Do this
6018 in reverse order so we don't inadvertedly remove a stmt we want to
6019 fixup by visiting a dominating now noreturn call first. */
6020 while (!el.to_fixup.is_empty ())
6022 gimple *stmt = el.to_fixup.pop ();
6024 if (dump_file && (dump_flags & TDF_DETAILS))
6026 fprintf (dump_file, "Fixing up noreturn call ");
6027 print_gimple_stmt (dump_file, stmt, 0);
6030 if (fixup_noreturn_call (stmt))
6031 el.el_todo |= TODO_cleanup_cfg;
6034 bool do_eh_cleanup = !bitmap_empty_p (el.need_eh_cleanup);
6035 bool do_ab_cleanup = !bitmap_empty_p (el.need_ab_cleanup);
6037 if (do_eh_cleanup)
6038 gimple_purge_all_dead_eh_edges (el.need_eh_cleanup);
6040 if (do_ab_cleanup)
6041 gimple_purge_all_dead_abnormal_call_edges (el.need_ab_cleanup);
6043 if (do_eh_cleanup || do_ab_cleanup)
6044 el.el_todo |= TODO_cleanup_cfg;
6046 statistics_counter_event (cfun, "Eliminated", el.eliminations);
6047 statistics_counter_event (cfun, "Insertions", el.insertions);
6049 return el.el_todo;
6053 namespace {
6055 const pass_data pass_data_fre =
6057 GIMPLE_PASS, /* type */
6058 "fre", /* name */
6059 OPTGROUP_NONE, /* optinfo_flags */
6060 TV_TREE_FRE, /* tv_id */
6061 ( PROP_cfg | PROP_ssa ), /* properties_required */
6062 0, /* properties_provided */
6063 0, /* properties_destroyed */
6064 0, /* todo_flags_start */
6065 0, /* todo_flags_finish */
6068 class pass_fre : public gimple_opt_pass
6070 public:
6071 pass_fre (gcc::context *ctxt)
6072 : gimple_opt_pass (pass_data_fre, ctxt)
6075 /* opt_pass methods: */
6076 opt_pass * clone () { return new pass_fre (m_ctxt); }
6077 virtual bool gate (function *) { return flag_tree_fre != 0; }
6078 virtual unsigned int execute (function *);
6080 }; // class pass_fre
6082 unsigned int
6083 pass_fre::execute (function *)
6085 unsigned int todo = 0;
6087 run_scc_vn (VN_WALKREWRITE);
6089 /* Remove all the redundant expressions. */
6090 todo |= vn_eliminate (NULL);
6092 scc_vn_restore_ssa_info ();
6093 free_scc_vn ();
6095 return todo;
6098 } // anon namespace
6100 gimple_opt_pass *
6101 make_pass_fre (gcc::context *ctxt)
6103 return new pass_fre (ctxt);