* tree-ssa-dse.c (compute_trims): Avoid folding away undefined
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blob43f3313911f8ba006eb65b34197ed0d57815f562
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2018 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "ssa.h"
29 #include "expmed.h"
30 #include "insn-config.h"
31 #include "memmodel.h"
32 #include "emit-rtl.h"
33 #include "cgraph.h"
34 #include "gimple-pretty-print.h"
35 #include "alias.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "cfganal.h"
39 #include "tree-inline.h"
40 #include "internal-fn.h"
41 #include "gimple-fold.h"
42 #include "tree-eh.h"
43 #include "gimplify.h"
44 #include "flags.h"
45 #include "dojump.h"
46 #include "explow.h"
47 #include "calls.h"
48 #include "varasm.h"
49 #include "stmt.h"
50 #include "expr.h"
51 #include "tree-dfa.h"
52 #include "tree-ssa.h"
53 #include "dumpfile.h"
54 #include "cfgloop.h"
55 #include "params.h"
56 #include "tree-ssa-propagate.h"
57 #include "tree-cfg.h"
58 #include "domwalk.h"
59 #include "gimple-iterator.h"
60 #include "gimple-match.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63 #include "tree-pass.h"
64 #include "statistics.h"
65 #include "langhooks.h"
66 #include "ipa-utils.h"
67 #include "dbgcnt.h"
68 #include "tree-cfgcleanup.h"
69 #include "tree-ssa-loop.h"
70 #include "tree-scalar-evolution.h"
71 #include "tree-ssa-sccvn.h"
73 /* This algorithm is based on the SCC algorithm presented by Keith
74 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
75 (http://citeseer.ist.psu.edu/41805.html). In
76 straight line code, it is equivalent to a regular hash based value
77 numbering that is performed in reverse postorder.
79 For code with cycles, there are two alternatives, both of which
80 require keeping the hashtables separate from the actual list of
81 value numbers for SSA names.
83 1. Iterate value numbering in an RPO walk of the blocks, removing
84 all the entries from the hashtable after each iteration (but
85 keeping the SSA name->value number mapping between iterations).
86 Iterate until it does not change.
88 2. Perform value numbering as part of an SCC walk on the SSA graph,
89 iterating only the cycles in the SSA graph until they do not change
90 (using a separate, optimistic hashtable for value numbering the SCC
91 operands).
93 The second is not just faster in practice (because most SSA graph
94 cycles do not involve all the variables in the graph), it also has
95 some nice properties.
97 One of these nice properties is that when we pop an SCC off the
98 stack, we are guaranteed to have processed all the operands coming from
99 *outside of that SCC*, so we do not need to do anything special to
100 ensure they have value numbers.
102 Another nice property is that the SCC walk is done as part of a DFS
103 of the SSA graph, which makes it easy to perform combining and
104 simplifying operations at the same time.
106 The code below is deliberately written in a way that makes it easy
107 to separate the SCC walk from the other work it does.
109 In order to propagate constants through the code, we track which
110 expressions contain constants, and use those while folding. In
111 theory, we could also track expressions whose value numbers are
112 replaced, in case we end up folding based on expression
113 identities.
115 In order to value number memory, we assign value numbers to vuses.
116 This enables us to note that, for example, stores to the same
117 address of the same value from the same starting memory states are
118 equivalent.
119 TODO:
121 1. We can iterate only the changing portions of the SCC's, but
122 I have not seen an SCC big enough for this to be a win.
123 2. If you differentiate between phi nodes for loops and phi nodes
124 for if-then-else, you can properly consider phi nodes in different
125 blocks for equivalence.
126 3. We could value number vuses in more cases, particularly, whole
127 structure copies.
131 static tree *last_vuse_ptr;
132 static vn_lookup_kind vn_walk_kind;
133 static vn_lookup_kind default_vn_walk_kind;
134 bitmap const_parms;
136 /* vn_nary_op hashtable helpers. */
138 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
140 typedef vn_nary_op_s *compare_type;
141 static inline hashval_t hash (const vn_nary_op_s *);
142 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
145 /* Return the computed hashcode for nary operation P1. */
147 inline hashval_t
148 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
150 return vno1->hashcode;
153 /* Compare nary operations P1 and P2 and return true if they are
154 equivalent. */
156 inline bool
157 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
159 return vno1 == vno2 || vn_nary_op_eq (vno1, vno2);
162 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
163 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
166 /* vn_phi hashtable helpers. */
168 static int
169 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
171 struct vn_phi_hasher : nofree_ptr_hash <vn_phi_s>
173 static inline hashval_t hash (const vn_phi_s *);
174 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
177 /* Return the computed hashcode for phi operation P1. */
179 inline hashval_t
180 vn_phi_hasher::hash (const vn_phi_s *vp1)
182 return vp1->hashcode;
185 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
187 inline bool
188 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
190 return vp1 == vp2 || vn_phi_eq (vp1, vp2);
193 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
194 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
197 /* Compare two reference operands P1 and P2 for equality. Return true if
198 they are equal, and false otherwise. */
200 static int
201 vn_reference_op_eq (const void *p1, const void *p2)
203 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
204 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
206 return (vro1->opcode == vro2->opcode
207 /* We do not care for differences in type qualification. */
208 && (vro1->type == vro2->type
209 || (vro1->type && vro2->type
210 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
211 TYPE_MAIN_VARIANT (vro2->type))))
212 && expressions_equal_p (vro1->op0, vro2->op0)
213 && expressions_equal_p (vro1->op1, vro2->op1)
214 && expressions_equal_p (vro1->op2, vro2->op2));
217 /* Free a reference operation structure VP. */
219 static inline void
220 free_reference (vn_reference_s *vr)
222 vr->operands.release ();
226 /* vn_reference hashtable helpers. */
228 struct vn_reference_hasher : nofree_ptr_hash <vn_reference_s>
230 static inline hashval_t hash (const vn_reference_s *);
231 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
234 /* Return the hashcode for a given reference operation P1. */
236 inline hashval_t
237 vn_reference_hasher::hash (const vn_reference_s *vr1)
239 return vr1->hashcode;
242 inline bool
243 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
245 return v == c || vn_reference_eq (v, c);
248 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
249 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
252 /* The set of VN hashtables. */
254 typedef struct vn_tables_s
256 vn_nary_op_table_type *nary;
257 vn_phi_table_type *phis;
258 vn_reference_table_type *references;
259 } *vn_tables_t;
262 /* vn_constant hashtable helpers. */
264 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
266 static inline hashval_t hash (const vn_constant_s *);
267 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
270 /* Hash table hash function for vn_constant_t. */
272 inline hashval_t
273 vn_constant_hasher::hash (const vn_constant_s *vc1)
275 return vc1->hashcode;
278 /* Hash table equality function for vn_constant_t. */
280 inline bool
281 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
283 if (vc1->hashcode != vc2->hashcode)
284 return false;
286 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
289 static hash_table<vn_constant_hasher> *constant_to_value_id;
290 static bitmap constant_value_ids;
293 /* Obstack we allocate the vn-tables elements from. */
294 static obstack vn_tables_obstack;
295 /* Special obstack we never unwind. */
296 static obstack vn_tables_insert_obstack;
298 static vn_reference_t last_inserted_ref;
299 static vn_phi_t last_inserted_phi;
300 static vn_nary_op_t last_inserted_nary;
302 /* Valid hashtables storing information we have proven to be
303 correct. */
304 static vn_tables_t valid_info;
307 /* Reverse post order index for each basic block. */
308 static int *rpo_numbers;
310 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
312 /* Return the SSA value of the VUSE x, supporting released VDEFs
313 during elimination which will value-number the VDEF to the
314 associated VUSE (but not substitute in the whole lattice). */
316 static inline tree
317 vuse_ssa_val (tree x)
319 if (!x)
320 return NULL_TREE;
324 tree tem = SSA_VAL (x);
325 /* stmt walking can walk over a backedge and reach code we didn't
326 value-number yet. */
327 if (tem == VN_TOP)
328 return x;
329 x = tem;
331 while (SSA_NAME_IN_FREE_LIST (x));
333 return x;
336 /* This represents the top of the VN lattice, which is the universal
337 value. */
339 tree VN_TOP;
341 /* Unique counter for our value ids. */
343 static unsigned int next_value_id;
345 /* Next DFS number and the stack for strongly connected component
346 detection. */
348 static unsigned int next_dfs_num;
349 static vec<tree> sccstack;
353 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
354 are allocated on an obstack for locality reasons, and to free them
355 without looping over the vec. */
357 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
358 static struct obstack vn_ssa_aux_obstack;
360 /* Return whether there is value numbering information for a given SSA name. */
362 bool
363 has_VN_INFO (tree name)
365 if (SSA_NAME_VERSION (name) < vn_ssa_aux_table.length ())
366 return vn_ssa_aux_table[SSA_NAME_VERSION (name)] != NULL;
367 return false;
370 /* Return the value numbering information for a given SSA name. */
372 vn_ssa_aux_t
373 VN_INFO (tree name)
375 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
376 gcc_checking_assert (res);
377 return res;
380 /* Set the value numbering info for a given SSA name to a given
381 value. */
383 static inline void
384 VN_INFO_SET (tree name, vn_ssa_aux_t value)
386 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
389 /* Initialize the value numbering info for a given SSA name.
390 This should be called just once for every SSA name. */
392 vn_ssa_aux_t
393 VN_INFO_GET (tree name)
395 vn_ssa_aux_t newinfo;
397 gcc_assert (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ()
398 || vn_ssa_aux_table[SSA_NAME_VERSION (name)] == NULL);
399 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
400 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
401 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
402 vn_ssa_aux_table.safe_grow_cleared (SSA_NAME_VERSION (name) + 1);
403 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
404 return newinfo;
408 /* Return the vn_kind the expression computed by the stmt should be
409 associated with. */
411 enum vn_kind
412 vn_get_stmt_kind (gimple *stmt)
414 switch (gimple_code (stmt))
416 case GIMPLE_CALL:
417 return VN_REFERENCE;
418 case GIMPLE_PHI:
419 return VN_PHI;
420 case GIMPLE_ASSIGN:
422 enum tree_code code = gimple_assign_rhs_code (stmt);
423 tree rhs1 = gimple_assign_rhs1 (stmt);
424 switch (get_gimple_rhs_class (code))
426 case GIMPLE_UNARY_RHS:
427 case GIMPLE_BINARY_RHS:
428 case GIMPLE_TERNARY_RHS:
429 return VN_NARY;
430 case GIMPLE_SINGLE_RHS:
431 switch (TREE_CODE_CLASS (code))
433 case tcc_reference:
434 /* VOP-less references can go through unary case. */
435 if ((code == REALPART_EXPR
436 || code == IMAGPART_EXPR
437 || code == VIEW_CONVERT_EXPR
438 || code == BIT_FIELD_REF)
439 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
440 return VN_NARY;
442 /* Fallthrough. */
443 case tcc_declaration:
444 return VN_REFERENCE;
446 case tcc_constant:
447 return VN_CONSTANT;
449 default:
450 if (code == ADDR_EXPR)
451 return (is_gimple_min_invariant (rhs1)
452 ? VN_CONSTANT : VN_REFERENCE);
453 else if (code == CONSTRUCTOR)
454 return VN_NARY;
455 return VN_NONE;
457 default:
458 return VN_NONE;
461 default:
462 return VN_NONE;
466 /* Lookup a value id for CONSTANT and return it. If it does not
467 exist returns 0. */
469 unsigned int
470 get_constant_value_id (tree constant)
472 vn_constant_s **slot;
473 struct vn_constant_s vc;
475 vc.hashcode = vn_hash_constant_with_type (constant);
476 vc.constant = constant;
477 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
478 if (slot)
479 return (*slot)->value_id;
480 return 0;
483 /* Lookup a value id for CONSTANT, and if it does not exist, create a
484 new one and return it. If it does exist, return it. */
486 unsigned int
487 get_or_alloc_constant_value_id (tree constant)
489 vn_constant_s **slot;
490 struct vn_constant_s vc;
491 vn_constant_t vcp;
493 vc.hashcode = vn_hash_constant_with_type (constant);
494 vc.constant = constant;
495 slot = constant_to_value_id->find_slot (&vc, INSERT);
496 if (*slot)
497 return (*slot)->value_id;
499 vcp = XNEW (struct vn_constant_s);
500 vcp->hashcode = vc.hashcode;
501 vcp->constant = constant;
502 vcp->value_id = get_next_value_id ();
503 *slot = vcp;
504 bitmap_set_bit (constant_value_ids, vcp->value_id);
505 return vcp->value_id;
508 /* Return true if V is a value id for a constant. */
510 bool
511 value_id_constant_p (unsigned int v)
513 return bitmap_bit_p (constant_value_ids, v);
516 /* Compute the hash for a reference operand VRO1. */
518 static void
519 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
521 hstate.add_int (vro1->opcode);
522 if (vro1->op0)
523 inchash::add_expr (vro1->op0, hstate);
524 if (vro1->op1)
525 inchash::add_expr (vro1->op1, hstate);
526 if (vro1->op2)
527 inchash::add_expr (vro1->op2, hstate);
530 /* Compute a hash for the reference operation VR1 and return it. */
532 static hashval_t
533 vn_reference_compute_hash (const vn_reference_t vr1)
535 inchash::hash hstate;
536 hashval_t result;
537 int i;
538 vn_reference_op_t vro;
539 poly_int64 off = -1;
540 bool deref = false;
542 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
544 if (vro->opcode == MEM_REF)
545 deref = true;
546 else if (vro->opcode != ADDR_EXPR)
547 deref = false;
548 if (maybe_ne (vro->off, -1))
550 if (known_eq (off, -1))
551 off = 0;
552 off += vro->off;
554 else
556 if (maybe_ne (off, -1)
557 && maybe_ne (off, 0))
558 hstate.add_poly_int (off);
559 off = -1;
560 if (deref
561 && vro->opcode == ADDR_EXPR)
563 if (vro->op0)
565 tree op = TREE_OPERAND (vro->op0, 0);
566 hstate.add_int (TREE_CODE (op));
567 inchash::add_expr (op, hstate);
570 else
571 vn_reference_op_compute_hash (vro, hstate);
574 result = hstate.end ();
575 /* ??? We would ICE later if we hash instead of adding that in. */
576 if (vr1->vuse)
577 result += SSA_NAME_VERSION (vr1->vuse);
579 return result;
582 /* Return true if reference operations VR1 and VR2 are equivalent. This
583 means they have the same set of operands and vuses. */
585 bool
586 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
588 unsigned i, j;
590 /* Early out if this is not a hash collision. */
591 if (vr1->hashcode != vr2->hashcode)
592 return false;
594 /* The VOP needs to be the same. */
595 if (vr1->vuse != vr2->vuse)
596 return false;
598 /* If the operands are the same we are done. */
599 if (vr1->operands == vr2->operands)
600 return true;
602 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
603 return false;
605 if (INTEGRAL_TYPE_P (vr1->type)
606 && INTEGRAL_TYPE_P (vr2->type))
608 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
609 return false;
611 else if (INTEGRAL_TYPE_P (vr1->type)
612 && (TYPE_PRECISION (vr1->type)
613 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
614 return false;
615 else if (INTEGRAL_TYPE_P (vr2->type)
616 && (TYPE_PRECISION (vr2->type)
617 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
618 return false;
620 i = 0;
621 j = 0;
624 poly_int64 off1 = 0, off2 = 0;
625 vn_reference_op_t vro1, vro2;
626 vn_reference_op_s tem1, tem2;
627 bool deref1 = false, deref2 = false;
628 for (; vr1->operands.iterate (i, &vro1); i++)
630 if (vro1->opcode == MEM_REF)
631 deref1 = true;
632 /* Do not look through a storage order barrier. */
633 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
634 return false;
635 if (known_eq (vro1->off, -1))
636 break;
637 off1 += vro1->off;
639 for (; vr2->operands.iterate (j, &vro2); j++)
641 if (vro2->opcode == MEM_REF)
642 deref2 = true;
643 /* Do not look through a storage order barrier. */
644 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
645 return false;
646 if (known_eq (vro2->off, -1))
647 break;
648 off2 += vro2->off;
650 if (maybe_ne (off1, off2))
651 return false;
652 if (deref1 && vro1->opcode == ADDR_EXPR)
654 memset (&tem1, 0, sizeof (tem1));
655 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
656 tem1.type = TREE_TYPE (tem1.op0);
657 tem1.opcode = TREE_CODE (tem1.op0);
658 vro1 = &tem1;
659 deref1 = false;
661 if (deref2 && vro2->opcode == ADDR_EXPR)
663 memset (&tem2, 0, sizeof (tem2));
664 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
665 tem2.type = TREE_TYPE (tem2.op0);
666 tem2.opcode = TREE_CODE (tem2.op0);
667 vro2 = &tem2;
668 deref2 = false;
670 if (deref1 != deref2)
671 return false;
672 if (!vn_reference_op_eq (vro1, vro2))
673 return false;
674 ++j;
675 ++i;
677 while (vr1->operands.length () != i
678 || vr2->operands.length () != j);
680 return true;
683 /* Copy the operations present in load/store REF into RESULT, a vector of
684 vn_reference_op_s's. */
686 static void
687 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
689 if (TREE_CODE (ref) == TARGET_MEM_REF)
691 vn_reference_op_s temp;
693 result->reserve (3);
695 memset (&temp, 0, sizeof (temp));
696 temp.type = TREE_TYPE (ref);
697 temp.opcode = TREE_CODE (ref);
698 temp.op0 = TMR_INDEX (ref);
699 temp.op1 = TMR_STEP (ref);
700 temp.op2 = TMR_OFFSET (ref);
701 temp.off = -1;
702 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
703 temp.base = MR_DEPENDENCE_BASE (ref);
704 result->quick_push (temp);
706 memset (&temp, 0, sizeof (temp));
707 temp.type = NULL_TREE;
708 temp.opcode = ERROR_MARK;
709 temp.op0 = TMR_INDEX2 (ref);
710 temp.off = -1;
711 result->quick_push (temp);
713 memset (&temp, 0, sizeof (temp));
714 temp.type = NULL_TREE;
715 temp.opcode = TREE_CODE (TMR_BASE (ref));
716 temp.op0 = TMR_BASE (ref);
717 temp.off = -1;
718 result->quick_push (temp);
719 return;
722 /* For non-calls, store the information that makes up the address. */
723 tree orig = ref;
724 while (ref)
726 vn_reference_op_s temp;
728 memset (&temp, 0, sizeof (temp));
729 temp.type = TREE_TYPE (ref);
730 temp.opcode = TREE_CODE (ref);
731 temp.off = -1;
733 switch (temp.opcode)
735 case MODIFY_EXPR:
736 temp.op0 = TREE_OPERAND (ref, 1);
737 break;
738 case WITH_SIZE_EXPR:
739 temp.op0 = TREE_OPERAND (ref, 1);
740 temp.off = 0;
741 break;
742 case MEM_REF:
743 /* The base address gets its own vn_reference_op_s structure. */
744 temp.op0 = TREE_OPERAND (ref, 1);
745 if (!mem_ref_offset (ref).to_shwi (&temp.off))
746 temp.off = -1;
747 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
748 temp.base = MR_DEPENDENCE_BASE (ref);
749 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
750 break;
751 case BIT_FIELD_REF:
752 /* Record bits, position and storage order. */
753 temp.op0 = TREE_OPERAND (ref, 1);
754 temp.op1 = TREE_OPERAND (ref, 2);
755 if (!multiple_p (bit_field_offset (ref), BITS_PER_UNIT, &temp.off))
756 temp.off = -1;
757 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
758 break;
759 case COMPONENT_REF:
760 /* The field decl is enough to unambiguously specify the field,
761 a matching type is not necessary and a mismatching type
762 is always a spurious difference. */
763 temp.type = NULL_TREE;
764 temp.op0 = TREE_OPERAND (ref, 1);
765 temp.op1 = TREE_OPERAND (ref, 2);
767 tree this_offset = component_ref_field_offset (ref);
768 if (this_offset
769 && poly_int_tree_p (this_offset))
771 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
772 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
774 poly_offset_int off
775 = (wi::to_poly_offset (this_offset)
776 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
777 /* Probibit value-numbering zero offset components
778 of addresses the same before the pass folding
779 __builtin_object_size had a chance to run
780 (checking cfun->after_inlining does the
781 trick here). */
782 if (TREE_CODE (orig) != ADDR_EXPR
783 || maybe_ne (off, 0)
784 || cfun->after_inlining)
785 off.to_shwi (&temp.off);
789 break;
790 case ARRAY_RANGE_REF:
791 case ARRAY_REF:
793 tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
794 /* Record index as operand. */
795 temp.op0 = TREE_OPERAND (ref, 1);
796 /* Always record lower bounds and element size. */
797 temp.op1 = array_ref_low_bound (ref);
798 /* But record element size in units of the type alignment. */
799 temp.op2 = TREE_OPERAND (ref, 3);
800 temp.align = eltype->type_common.align;
801 if (! temp.op2)
802 temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
803 size_int (TYPE_ALIGN_UNIT (eltype)));
804 if (poly_int_tree_p (temp.op0)
805 && poly_int_tree_p (temp.op1)
806 && TREE_CODE (temp.op2) == INTEGER_CST)
808 poly_offset_int off = ((wi::to_poly_offset (temp.op0)
809 - wi::to_poly_offset (temp.op1))
810 * wi::to_offset (temp.op2)
811 * vn_ref_op_align_unit (&temp));
812 off.to_shwi (&temp.off);
815 break;
816 case VAR_DECL:
817 if (DECL_HARD_REGISTER (ref))
819 temp.op0 = ref;
820 break;
822 /* Fallthru. */
823 case PARM_DECL:
824 case CONST_DECL:
825 case RESULT_DECL:
826 /* Canonicalize decls to MEM[&decl] which is what we end up with
827 when valueizing MEM[ptr] with ptr = &decl. */
828 temp.opcode = MEM_REF;
829 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
830 temp.off = 0;
831 result->safe_push (temp);
832 temp.opcode = ADDR_EXPR;
833 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
834 temp.type = TREE_TYPE (temp.op0);
835 temp.off = -1;
836 break;
837 case STRING_CST:
838 case INTEGER_CST:
839 case COMPLEX_CST:
840 case VECTOR_CST:
841 case REAL_CST:
842 case FIXED_CST:
843 case CONSTRUCTOR:
844 case SSA_NAME:
845 temp.op0 = ref;
846 break;
847 case ADDR_EXPR:
848 if (is_gimple_min_invariant (ref))
850 temp.op0 = ref;
851 break;
853 break;
854 /* These are only interesting for their operands, their
855 existence, and their type. They will never be the last
856 ref in the chain of references (IE they require an
857 operand), so we don't have to put anything
858 for op* as it will be handled by the iteration */
859 case REALPART_EXPR:
860 temp.off = 0;
861 break;
862 case VIEW_CONVERT_EXPR:
863 temp.off = 0;
864 temp.reverse = storage_order_barrier_p (ref);
865 break;
866 case IMAGPART_EXPR:
867 /* This is only interesting for its constant offset. */
868 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
869 break;
870 default:
871 gcc_unreachable ();
873 result->safe_push (temp);
875 if (REFERENCE_CLASS_P (ref)
876 || TREE_CODE (ref) == MODIFY_EXPR
877 || TREE_CODE (ref) == WITH_SIZE_EXPR
878 || (TREE_CODE (ref) == ADDR_EXPR
879 && !is_gimple_min_invariant (ref)))
880 ref = TREE_OPERAND (ref, 0);
881 else
882 ref = NULL_TREE;
886 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
887 operands in *OPS, the reference alias set SET and the reference type TYPE.
888 Return true if something useful was produced. */
890 bool
891 ao_ref_init_from_vn_reference (ao_ref *ref,
892 alias_set_type set, tree type,
893 vec<vn_reference_op_s> ops)
895 vn_reference_op_t op;
896 unsigned i;
897 tree base = NULL_TREE;
898 tree *op0_p = &base;
899 poly_offset_int offset = 0;
900 poly_offset_int max_size;
901 poly_offset_int size = -1;
902 tree size_tree = NULL_TREE;
903 alias_set_type base_alias_set = -1;
905 /* First get the final access size from just the outermost expression. */
906 op = &ops[0];
907 if (op->opcode == COMPONENT_REF)
908 size_tree = DECL_SIZE (op->op0);
909 else if (op->opcode == BIT_FIELD_REF)
910 size_tree = op->op0;
911 else
913 machine_mode mode = TYPE_MODE (type);
914 if (mode == BLKmode)
915 size_tree = TYPE_SIZE (type);
916 else
917 size = GET_MODE_BITSIZE (mode);
919 if (size_tree != NULL_TREE
920 && poly_int_tree_p (size_tree))
921 size = wi::to_poly_offset (size_tree);
923 /* Initially, maxsize is the same as the accessed element size.
924 In the following it will only grow (or become -1). */
925 max_size = size;
927 /* Compute cumulative bit-offset for nested component-refs and array-refs,
928 and find the ultimate containing object. */
929 FOR_EACH_VEC_ELT (ops, i, op)
931 switch (op->opcode)
933 /* These may be in the reference ops, but we cannot do anything
934 sensible with them here. */
935 case ADDR_EXPR:
936 /* Apart from ADDR_EXPR arguments to MEM_REF. */
937 if (base != NULL_TREE
938 && TREE_CODE (base) == MEM_REF
939 && op->op0
940 && DECL_P (TREE_OPERAND (op->op0, 0)))
942 vn_reference_op_t pop = &ops[i-1];
943 base = TREE_OPERAND (op->op0, 0);
944 if (known_eq (pop->off, -1))
946 max_size = -1;
947 offset = 0;
949 else
950 offset += pop->off * BITS_PER_UNIT;
951 op0_p = NULL;
952 break;
954 /* Fallthru. */
955 case CALL_EXPR:
956 return false;
958 /* Record the base objects. */
959 case MEM_REF:
960 base_alias_set = get_deref_alias_set (op->op0);
961 *op0_p = build2 (MEM_REF, op->type,
962 NULL_TREE, op->op0);
963 MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
964 MR_DEPENDENCE_BASE (*op0_p) = op->base;
965 op0_p = &TREE_OPERAND (*op0_p, 0);
966 break;
968 case VAR_DECL:
969 case PARM_DECL:
970 case RESULT_DECL:
971 case SSA_NAME:
972 *op0_p = op->op0;
973 op0_p = NULL;
974 break;
976 /* And now the usual component-reference style ops. */
977 case BIT_FIELD_REF:
978 offset += wi::to_poly_offset (op->op1);
979 break;
981 case COMPONENT_REF:
983 tree field = op->op0;
984 /* We do not have a complete COMPONENT_REF tree here so we
985 cannot use component_ref_field_offset. Do the interesting
986 parts manually. */
987 tree this_offset = DECL_FIELD_OFFSET (field);
989 if (op->op1 || !poly_int_tree_p (this_offset))
990 max_size = -1;
991 else
993 poly_offset_int woffset = (wi::to_poly_offset (this_offset)
994 << LOG2_BITS_PER_UNIT);
995 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
996 offset += woffset;
998 break;
1001 case ARRAY_RANGE_REF:
1002 case ARRAY_REF:
1003 /* We recorded the lower bound and the element size. */
1004 if (!poly_int_tree_p (op->op0)
1005 || !poly_int_tree_p (op->op1)
1006 || TREE_CODE (op->op2) != INTEGER_CST)
1007 max_size = -1;
1008 else
1010 poly_offset_int woffset
1011 = wi::sext (wi::to_poly_offset (op->op0)
1012 - wi::to_poly_offset (op->op1),
1013 TYPE_PRECISION (TREE_TYPE (op->op0)));
1014 woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
1015 woffset <<= LOG2_BITS_PER_UNIT;
1016 offset += woffset;
1018 break;
1020 case REALPART_EXPR:
1021 break;
1023 case IMAGPART_EXPR:
1024 offset += size;
1025 break;
1027 case VIEW_CONVERT_EXPR:
1028 break;
1030 case STRING_CST:
1031 case INTEGER_CST:
1032 case COMPLEX_CST:
1033 case VECTOR_CST:
1034 case REAL_CST:
1035 case CONSTRUCTOR:
1036 case CONST_DECL:
1037 return false;
1039 default:
1040 return false;
1044 if (base == NULL_TREE)
1045 return false;
1047 ref->ref = NULL_TREE;
1048 ref->base = base;
1049 ref->ref_alias_set = set;
1050 if (base_alias_set != -1)
1051 ref->base_alias_set = base_alias_set;
1052 else
1053 ref->base_alias_set = get_alias_set (base);
1054 /* We discount volatiles from value-numbering elsewhere. */
1055 ref->volatile_p = false;
1057 if (!size.to_shwi (&ref->size) || maybe_lt (ref->size, 0))
1059 ref->offset = 0;
1060 ref->size = -1;
1061 ref->max_size = -1;
1062 return true;
1065 if (!offset.to_shwi (&ref->offset))
1067 ref->offset = 0;
1068 ref->max_size = -1;
1069 return true;
1072 if (!max_size.to_shwi (&ref->max_size) || maybe_lt (ref->max_size, 0))
1073 ref->max_size = -1;
1075 return true;
1078 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1079 vn_reference_op_s's. */
1081 static void
1082 copy_reference_ops_from_call (gcall *call,
1083 vec<vn_reference_op_s> *result)
1085 vn_reference_op_s temp;
1086 unsigned i;
1087 tree lhs = gimple_call_lhs (call);
1088 int lr;
1090 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1091 different. By adding the lhs here in the vector, we ensure that the
1092 hashcode is different, guaranteeing a different value number. */
1093 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1095 memset (&temp, 0, sizeof (temp));
1096 temp.opcode = MODIFY_EXPR;
1097 temp.type = TREE_TYPE (lhs);
1098 temp.op0 = lhs;
1099 temp.off = -1;
1100 result->safe_push (temp);
1103 /* Copy the type, opcode, function, static chain and EH region, if any. */
1104 memset (&temp, 0, sizeof (temp));
1105 temp.type = gimple_call_return_type (call);
1106 temp.opcode = CALL_EXPR;
1107 temp.op0 = gimple_call_fn (call);
1108 temp.op1 = gimple_call_chain (call);
1109 if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1110 temp.op2 = size_int (lr);
1111 temp.off = -1;
1112 result->safe_push (temp);
1114 /* Copy the call arguments. As they can be references as well,
1115 just chain them together. */
1116 for (i = 0; i < gimple_call_num_args (call); ++i)
1118 tree callarg = gimple_call_arg (call, i);
1119 copy_reference_ops_from_ref (callarg, result);
1123 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1124 *I_P to point to the last element of the replacement. */
1125 static bool
1126 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1127 unsigned int *i_p)
1129 unsigned int i = *i_p;
1130 vn_reference_op_t op = &(*ops)[i];
1131 vn_reference_op_t mem_op = &(*ops)[i - 1];
1132 tree addr_base;
1133 poly_int64 addr_offset = 0;
1135 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1136 from .foo.bar to the preceding MEM_REF offset and replace the
1137 address with &OBJ. */
1138 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1139 &addr_offset);
1140 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1141 if (addr_base != TREE_OPERAND (op->op0, 0))
1143 poly_offset_int off
1144 = (poly_offset_int::from (wi::to_poly_wide (mem_op->op0),
1145 SIGNED)
1146 + addr_offset);
1147 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1148 op->op0 = build_fold_addr_expr (addr_base);
1149 if (tree_fits_shwi_p (mem_op->op0))
1150 mem_op->off = tree_to_shwi (mem_op->op0);
1151 else
1152 mem_op->off = -1;
1153 return true;
1155 return false;
1158 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1159 *I_P to point to the last element of the replacement. */
1160 static bool
1161 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1162 unsigned int *i_p)
1164 unsigned int i = *i_p;
1165 vn_reference_op_t op = &(*ops)[i];
1166 vn_reference_op_t mem_op = &(*ops)[i - 1];
1167 gimple *def_stmt;
1168 enum tree_code code;
1169 poly_offset_int off;
1171 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1172 if (!is_gimple_assign (def_stmt))
1173 return false;
1175 code = gimple_assign_rhs_code (def_stmt);
1176 if (code != ADDR_EXPR
1177 && code != POINTER_PLUS_EXPR)
1178 return false;
1180 off = poly_offset_int::from (wi::to_poly_wide (mem_op->op0), SIGNED);
1182 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1183 from .foo.bar to the preceding MEM_REF offset and replace the
1184 address with &OBJ. */
1185 if (code == ADDR_EXPR)
1187 tree addr, addr_base;
1188 poly_int64 addr_offset;
1190 addr = gimple_assign_rhs1 (def_stmt);
1191 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1192 &addr_offset);
1193 /* If that didn't work because the address isn't invariant propagate
1194 the reference tree from the address operation in case the current
1195 dereference isn't offsetted. */
1196 if (!addr_base
1197 && *i_p == ops->length () - 1
1198 && known_eq (off, 0)
1199 /* This makes us disable this transform for PRE where the
1200 reference ops might be also used for code insertion which
1201 is invalid. */
1202 && default_vn_walk_kind == VN_WALKREWRITE)
1204 auto_vec<vn_reference_op_s, 32> tem;
1205 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1206 /* Make sure to preserve TBAA info. The only objects not
1207 wrapped in MEM_REFs that can have their address taken are
1208 STRING_CSTs. */
1209 if (tem.length () >= 2
1210 && tem[tem.length () - 2].opcode == MEM_REF)
1212 vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1213 new_mem_op->op0
1214 = wide_int_to_tree (TREE_TYPE (mem_op->op0),
1215 wi::to_poly_wide (new_mem_op->op0));
1217 else
1218 gcc_assert (tem.last ().opcode == STRING_CST);
1219 ops->pop ();
1220 ops->pop ();
1221 ops->safe_splice (tem);
1222 --*i_p;
1223 return true;
1225 if (!addr_base
1226 || TREE_CODE (addr_base) != MEM_REF
1227 || (TREE_CODE (TREE_OPERAND (addr_base, 0)) == SSA_NAME
1228 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (addr_base, 0))))
1229 return false;
1231 off += addr_offset;
1232 off += mem_ref_offset (addr_base);
1233 op->op0 = TREE_OPERAND (addr_base, 0);
1235 else
1237 tree ptr, ptroff;
1238 ptr = gimple_assign_rhs1 (def_stmt);
1239 ptroff = gimple_assign_rhs2 (def_stmt);
1240 if (TREE_CODE (ptr) != SSA_NAME
1241 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1242 || !poly_int_tree_p (ptroff))
1243 return false;
1245 off += wi::to_poly_offset (ptroff);
1246 op->op0 = ptr;
1249 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1250 if (tree_fits_shwi_p (mem_op->op0))
1251 mem_op->off = tree_to_shwi (mem_op->op0);
1252 else
1253 mem_op->off = -1;
1254 if (TREE_CODE (op->op0) == SSA_NAME)
1255 op->op0 = SSA_VAL (op->op0);
1256 if (TREE_CODE (op->op0) != SSA_NAME)
1257 op->opcode = TREE_CODE (op->op0);
1259 /* And recurse. */
1260 if (TREE_CODE (op->op0) == SSA_NAME)
1261 vn_reference_maybe_forwprop_address (ops, i_p);
1262 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1263 vn_reference_fold_indirect (ops, i_p);
1264 return true;
1267 /* Optimize the reference REF to a constant if possible or return
1268 NULL_TREE if not. */
1270 tree
1271 fully_constant_vn_reference_p (vn_reference_t ref)
1273 vec<vn_reference_op_s> operands = ref->operands;
1274 vn_reference_op_t op;
1276 /* Try to simplify the translated expression if it is
1277 a call to a builtin function with at most two arguments. */
1278 op = &operands[0];
1279 if (op->opcode == CALL_EXPR
1280 && TREE_CODE (op->op0) == ADDR_EXPR
1281 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1282 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1283 && operands.length () >= 2
1284 && operands.length () <= 3)
1286 vn_reference_op_t arg0, arg1 = NULL;
1287 bool anyconst = false;
1288 arg0 = &operands[1];
1289 if (operands.length () > 2)
1290 arg1 = &operands[2];
1291 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1292 || (arg0->opcode == ADDR_EXPR
1293 && is_gimple_min_invariant (arg0->op0)))
1294 anyconst = true;
1295 if (arg1
1296 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1297 || (arg1->opcode == ADDR_EXPR
1298 && is_gimple_min_invariant (arg1->op0))))
1299 anyconst = true;
1300 if (anyconst)
1302 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1303 arg1 ? 2 : 1,
1304 arg0->op0,
1305 arg1 ? arg1->op0 : NULL);
1306 if (folded
1307 && TREE_CODE (folded) == NOP_EXPR)
1308 folded = TREE_OPERAND (folded, 0);
1309 if (folded
1310 && is_gimple_min_invariant (folded))
1311 return folded;
1315 /* Simplify reads from constants or constant initializers. */
1316 else if (BITS_PER_UNIT == 8
1317 && is_gimple_reg_type (ref->type)
1318 && (!INTEGRAL_TYPE_P (ref->type)
1319 || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
1321 poly_int64 off = 0;
1322 HOST_WIDE_INT size;
1323 if (INTEGRAL_TYPE_P (ref->type))
1324 size = TYPE_PRECISION (ref->type);
1325 else
1326 size = tree_to_shwi (TYPE_SIZE (ref->type));
1327 if (size % BITS_PER_UNIT != 0
1328 || size > MAX_BITSIZE_MODE_ANY_MODE)
1329 return NULL_TREE;
1330 size /= BITS_PER_UNIT;
1331 unsigned i;
1332 for (i = 0; i < operands.length (); ++i)
1334 if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1336 ++i;
1337 break;
1339 if (known_eq (operands[i].off, -1))
1340 return NULL_TREE;
1341 off += operands[i].off;
1342 if (operands[i].opcode == MEM_REF)
1344 ++i;
1345 break;
1348 vn_reference_op_t base = &operands[--i];
1349 tree ctor = error_mark_node;
1350 tree decl = NULL_TREE;
1351 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1352 ctor = base->op0;
1353 else if (base->opcode == MEM_REF
1354 && base[1].opcode == ADDR_EXPR
1355 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1356 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL
1357 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == STRING_CST))
1359 decl = TREE_OPERAND (base[1].op0, 0);
1360 if (TREE_CODE (decl) == STRING_CST)
1361 ctor = decl;
1362 else
1363 ctor = ctor_for_folding (decl);
1365 if (ctor == NULL_TREE)
1366 return build_zero_cst (ref->type);
1367 else if (ctor != error_mark_node)
1369 HOST_WIDE_INT const_off;
1370 if (decl)
1372 tree res = fold_ctor_reference (ref->type, ctor,
1373 off * BITS_PER_UNIT,
1374 size * BITS_PER_UNIT, decl);
1375 if (res)
1377 STRIP_USELESS_TYPE_CONVERSION (res);
1378 if (is_gimple_min_invariant (res))
1379 return res;
1382 else if (off.is_constant (&const_off))
1384 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1385 int len = native_encode_expr (ctor, buf, size, const_off);
1386 if (len > 0)
1387 return native_interpret_expr (ref->type, buf, len);
1392 return NULL_TREE;
1395 /* Return true if OPS contain a storage order barrier. */
1397 static bool
1398 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1400 vn_reference_op_t op;
1401 unsigned i;
1403 FOR_EACH_VEC_ELT (ops, i, op)
1404 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1405 return true;
1407 return false;
1410 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1411 structures into their value numbers. This is done in-place, and
1412 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1413 whether any operands were valueized. */
1415 static vec<vn_reference_op_s>
1416 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1418 vn_reference_op_t vro;
1419 unsigned int i;
1421 *valueized_anything = false;
1423 FOR_EACH_VEC_ELT (orig, i, vro)
1425 if (vro->opcode == SSA_NAME
1426 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1428 tree tem = SSA_VAL (vro->op0);
1429 if (tem != vro->op0)
1431 *valueized_anything = true;
1432 vro->op0 = tem;
1434 /* If it transforms from an SSA_NAME to a constant, update
1435 the opcode. */
1436 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1437 vro->opcode = TREE_CODE (vro->op0);
1439 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1441 tree tem = SSA_VAL (vro->op1);
1442 if (tem != vro->op1)
1444 *valueized_anything = true;
1445 vro->op1 = tem;
1448 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1450 tree tem = SSA_VAL (vro->op2);
1451 if (tem != vro->op2)
1453 *valueized_anything = true;
1454 vro->op2 = tem;
1457 /* If it transforms from an SSA_NAME to an address, fold with
1458 a preceding indirect reference. */
1459 if (i > 0
1460 && vro->op0
1461 && TREE_CODE (vro->op0) == ADDR_EXPR
1462 && orig[i - 1].opcode == MEM_REF)
1464 if (vn_reference_fold_indirect (&orig, &i))
1465 *valueized_anything = true;
1467 else if (i > 0
1468 && vro->opcode == SSA_NAME
1469 && orig[i - 1].opcode == MEM_REF)
1471 if (vn_reference_maybe_forwprop_address (&orig, &i))
1472 *valueized_anything = true;
1474 /* If it transforms a non-constant ARRAY_REF into a constant
1475 one, adjust the constant offset. */
1476 else if (vro->opcode == ARRAY_REF
1477 && known_eq (vro->off, -1)
1478 && poly_int_tree_p (vro->op0)
1479 && poly_int_tree_p (vro->op1)
1480 && TREE_CODE (vro->op2) == INTEGER_CST)
1482 poly_offset_int off = ((wi::to_poly_offset (vro->op0)
1483 - wi::to_poly_offset (vro->op1))
1484 * wi::to_offset (vro->op2)
1485 * vn_ref_op_align_unit (vro));
1486 off.to_shwi (&vro->off);
1490 return orig;
1493 static vec<vn_reference_op_s>
1494 valueize_refs (vec<vn_reference_op_s> orig)
1496 bool tem;
1497 return valueize_refs_1 (orig, &tem);
1500 static vec<vn_reference_op_s> shared_lookup_references;
1502 /* Create a vector of vn_reference_op_s structures from REF, a
1503 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1504 this function. *VALUEIZED_ANYTHING will specify whether any
1505 operands were valueized. */
1507 static vec<vn_reference_op_s>
1508 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1510 if (!ref)
1511 return vNULL;
1512 shared_lookup_references.truncate (0);
1513 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1514 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1515 valueized_anything);
1516 return shared_lookup_references;
1519 /* Create a vector of vn_reference_op_s structures from CALL, a
1520 call statement. The vector is shared among all callers of
1521 this function. */
1523 static vec<vn_reference_op_s>
1524 valueize_shared_reference_ops_from_call (gcall *call)
1526 if (!call)
1527 return vNULL;
1528 shared_lookup_references.truncate (0);
1529 copy_reference_ops_from_call (call, &shared_lookup_references);
1530 shared_lookup_references = valueize_refs (shared_lookup_references);
1531 return shared_lookup_references;
1534 /* Lookup a SCCVN reference operation VR in the current hash table.
1535 Returns the resulting value number if it exists in the hash table,
1536 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1537 vn_reference_t stored in the hashtable if something is found. */
1539 static tree
1540 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1542 vn_reference_s **slot;
1543 hashval_t hash;
1545 hash = vr->hashcode;
1546 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1547 if (slot)
1549 if (vnresult)
1550 *vnresult = (vn_reference_t)*slot;
1551 return ((vn_reference_t)*slot)->result;
1554 return NULL_TREE;
1557 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1558 with the current VUSE and performs the expression lookup. */
1560 static void *
1561 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1562 unsigned int cnt, void *vr_)
1564 vn_reference_t vr = (vn_reference_t)vr_;
1565 vn_reference_s **slot;
1566 hashval_t hash;
1568 /* This bounds the stmt walks we perform on reference lookups
1569 to O(1) instead of O(N) where N is the number of dominating
1570 stores. */
1571 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1572 return (void *)-1;
1574 if (last_vuse_ptr)
1575 *last_vuse_ptr = vuse;
1577 /* Fixup vuse and hash. */
1578 if (vr->vuse)
1579 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1580 vr->vuse = vuse_ssa_val (vuse);
1581 if (vr->vuse)
1582 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1584 hash = vr->hashcode;
1585 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1586 if (slot)
1587 return *slot;
1589 return NULL;
1592 /* Lookup an existing or insert a new vn_reference entry into the
1593 value table for the VUSE, SET, TYPE, OPERANDS reference which
1594 has the value VALUE which is either a constant or an SSA name. */
1596 static vn_reference_t
1597 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1598 alias_set_type set,
1599 tree type,
1600 vec<vn_reference_op_s,
1601 va_heap> operands,
1602 tree value)
1604 vn_reference_s vr1;
1605 vn_reference_t result;
1606 unsigned value_id;
1607 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1608 vr1.operands = operands;
1609 vr1.type = type;
1610 vr1.set = set;
1611 vr1.hashcode = vn_reference_compute_hash (&vr1);
1612 if (vn_reference_lookup_1 (&vr1, &result))
1613 return result;
1614 if (TREE_CODE (value) == SSA_NAME)
1615 value_id = VN_INFO (value)->value_id;
1616 else
1617 value_id = get_or_alloc_constant_value_id (value);
1618 return vn_reference_insert_pieces (vuse, set, type,
1619 operands.copy (), value, value_id);
1622 static vn_nary_op_t vn_nary_op_insert_stmt (gimple *, tree);
1623 static unsigned int vn_nary_length_from_stmt (gimple *);
1624 static vn_nary_op_t alloc_vn_nary_op_noinit (unsigned int, obstack *);
1625 static vn_nary_op_t vn_nary_op_insert_into (vn_nary_op_t,
1626 vn_nary_op_table_type *, bool);
1627 static void init_vn_nary_op_from_stmt (vn_nary_op_t, gimple *);
1629 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
1631 static tree
1632 vn_lookup_simplify_result (gimple_match_op *res_op)
1634 if (!res_op->code.is_tree_code ())
1635 return NULL_TREE;
1636 tree *ops = res_op->ops;
1637 unsigned int length = res_op->num_ops;
1638 if (res_op->code == CONSTRUCTOR
1639 /* ??? We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
1640 and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree. */
1641 && TREE_CODE (res_op->ops[0]) == CONSTRUCTOR)
1643 length = CONSTRUCTOR_NELTS (res_op->ops[0]);
1644 ops = XALLOCAVEC (tree, length);
1645 for (unsigned i = 0; i < length; ++i)
1646 ops[i] = CONSTRUCTOR_ELT (res_op->ops[0], i)->value;
1648 vn_nary_op_t vnresult = NULL;
1649 return vn_nary_op_lookup_pieces (length, (tree_code) res_op->code,
1650 res_op->type, ops, &vnresult);
1653 /* Return a value-number for RCODE OPS... either by looking up an existing
1654 value-number for the simplified result or by inserting the operation if
1655 INSERT is true. */
1657 static tree
1658 vn_nary_build_or_lookup_1 (gimple_match_op *res_op, bool insert)
1660 tree result = NULL_TREE;
1661 /* We will be creating a value number for
1662 RCODE (OPS...).
1663 So first simplify and lookup this expression to see if it
1664 is already available. */
1665 mprts_hook = vn_lookup_simplify_result;
1666 bool res = false;
1667 switch (TREE_CODE_LENGTH ((tree_code) res_op->code))
1669 case 1:
1670 res = gimple_resimplify1 (NULL, res_op, vn_valueize);
1671 break;
1672 case 2:
1673 res = gimple_resimplify2 (NULL, res_op, vn_valueize);
1674 break;
1675 case 3:
1676 res = gimple_resimplify3 (NULL, res_op, vn_valueize);
1677 break;
1679 mprts_hook = NULL;
1680 gimple *new_stmt = NULL;
1681 if (res
1682 && gimple_simplified_result_is_gimple_val (res_op))
1683 /* The expression is already available. */
1684 result = res_op->ops[0];
1685 else
1687 tree val = vn_lookup_simplify_result (res_op);
1688 if (!val && insert)
1690 gimple_seq stmts = NULL;
1691 result = maybe_push_res_to_seq (res_op, &stmts);
1692 if (result)
1694 gcc_assert (gimple_seq_singleton_p (stmts));
1695 new_stmt = gimple_seq_first_stmt (stmts);
1698 else
1699 /* The expression is already available. */
1700 result = val;
1702 if (new_stmt)
1704 /* The expression is not yet available, value-number lhs to
1705 the new SSA_NAME we created. */
1706 /* Initialize value-number information properly. */
1707 VN_INFO_GET (result)->valnum = result;
1708 VN_INFO (result)->value_id = get_next_value_id ();
1709 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
1710 new_stmt);
1711 VN_INFO (result)->needs_insertion = true;
1712 /* ??? PRE phi-translation inserts NARYs without corresponding
1713 SSA name result. Re-use those but set their result according
1714 to the stmt we just built. */
1715 vn_nary_op_t nary = NULL;
1716 vn_nary_op_lookup_stmt (new_stmt, &nary);
1717 if (nary)
1719 gcc_assert (nary->result == NULL_TREE);
1720 nary->result = gimple_assign_lhs (new_stmt);
1722 /* As all "inserted" statements are singleton SCCs, insert
1723 to the valid table. This is strictly needed to
1724 avoid re-generating new value SSA_NAMEs for the same
1725 expression during SCC iteration over and over (the
1726 optimistic table gets cleared after each iteration).
1727 We do not need to insert into the optimistic table, as
1728 lookups there will fall back to the valid table. */
1729 else
1731 unsigned int length = vn_nary_length_from_stmt (new_stmt);
1732 vn_nary_op_t vno1
1733 = alloc_vn_nary_op_noinit (length, &vn_tables_insert_obstack);
1734 vno1->value_id = VN_INFO (result)->value_id;
1735 vno1->length = length;
1736 vno1->result = result;
1737 init_vn_nary_op_from_stmt (vno1, new_stmt);
1738 vn_nary_op_insert_into (vno1, valid_info->nary, true);
1739 /* Also do not link it into the undo chain. */
1740 last_inserted_nary = vno1->next;
1741 vno1->next = (vn_nary_op_t)(void *)-1;
1743 if (dump_file && (dump_flags & TDF_DETAILS))
1745 fprintf (dump_file, "Inserting name ");
1746 print_generic_expr (dump_file, result);
1747 fprintf (dump_file, " for expression ");
1748 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
1749 fprintf (dump_file, "\n");
1752 return result;
1755 /* Return a value-number for RCODE OPS... either by looking up an existing
1756 value-number for the simplified result or by inserting the operation. */
1758 static tree
1759 vn_nary_build_or_lookup (gimple_match_op *res_op)
1761 return vn_nary_build_or_lookup_1 (res_op, true);
1764 /* Try to simplify the expression RCODE OPS... of type TYPE and return
1765 its value if present. */
1767 tree
1768 vn_nary_simplify (vn_nary_op_t nary)
1770 if (nary->length > gimple_match_op::MAX_NUM_OPS)
1771 return NULL_TREE;
1772 gimple_match_op op (gimple_match_cond::UNCOND, nary->opcode,
1773 nary->type, nary->length);
1774 memcpy (op.ops, nary->op, sizeof (tree) * nary->length);
1775 return vn_nary_build_or_lookup_1 (&op, false);
1779 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1780 from the statement defining VUSE and if not successful tries to
1781 translate *REFP and VR_ through an aggregate copy at the definition
1782 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
1783 of *REF and *VR. If only disambiguation was performed then
1784 *DISAMBIGUATE_ONLY is set to true. */
1786 static void *
1787 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1788 bool *disambiguate_only)
1790 vn_reference_t vr = (vn_reference_t)vr_;
1791 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
1792 tree base = ao_ref_base (ref);
1793 HOST_WIDE_INT offseti, maxsizei;
1794 static vec<vn_reference_op_s> lhs_ops;
1795 ao_ref lhs_ref;
1796 bool lhs_ref_ok = false;
1797 poly_int64 copy_size;
1799 /* If the reference is based on a parameter that was determined as
1800 pointing to readonly memory it doesn't change. */
1801 if (TREE_CODE (base) == MEM_REF
1802 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
1803 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
1804 && bitmap_bit_p (const_parms,
1805 SSA_NAME_VERSION (TREE_OPERAND (base, 0))))
1807 *disambiguate_only = true;
1808 return NULL;
1811 /* First try to disambiguate after value-replacing in the definitions LHS. */
1812 if (is_gimple_assign (def_stmt))
1814 tree lhs = gimple_assign_lhs (def_stmt);
1815 bool valueized_anything = false;
1816 /* Avoid re-allocation overhead. */
1817 lhs_ops.truncate (0);
1818 copy_reference_ops_from_ref (lhs, &lhs_ops);
1819 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1820 if (valueized_anything)
1822 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1823 get_alias_set (lhs),
1824 TREE_TYPE (lhs), lhs_ops);
1825 if (lhs_ref_ok
1826 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1828 *disambiguate_only = true;
1829 return NULL;
1832 else
1834 ao_ref_init (&lhs_ref, lhs);
1835 lhs_ref_ok = true;
1838 /* If we reach a clobbering statement try to skip it and see if
1839 we find a VN result with exactly the same value as the
1840 possible clobber. In this case we can ignore the clobber
1841 and return the found value.
1842 Note that we don't need to worry about partial overlapping
1843 accesses as we then can use TBAA to disambiguate against the
1844 clobbering statement when looking up a load (thus the
1845 VN_WALKREWRITE guard). */
1846 if (vn_walk_kind == VN_WALKREWRITE
1847 && is_gimple_reg_type (TREE_TYPE (lhs))
1848 && types_compatible_p (TREE_TYPE (lhs), vr->type))
1850 tree *saved_last_vuse_ptr = last_vuse_ptr;
1851 /* Do not update last_vuse_ptr in vn_reference_lookup_2. */
1852 last_vuse_ptr = NULL;
1853 tree saved_vuse = vr->vuse;
1854 hashval_t saved_hashcode = vr->hashcode;
1855 void *res = vn_reference_lookup_2 (ref,
1856 gimple_vuse (def_stmt), 0, vr);
1857 /* Need to restore vr->vuse and vr->hashcode. */
1858 vr->vuse = saved_vuse;
1859 vr->hashcode = saved_hashcode;
1860 last_vuse_ptr = saved_last_vuse_ptr;
1861 if (res && res != (void *)-1)
1863 vn_reference_t vnresult = (vn_reference_t) res;
1864 if (vnresult->result
1865 && operand_equal_p (vnresult->result,
1866 gimple_assign_rhs1 (def_stmt), 0))
1867 return res;
1871 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1872 && gimple_call_num_args (def_stmt) <= 4)
1874 /* For builtin calls valueize its arguments and call the
1875 alias oracle again. Valueization may improve points-to
1876 info of pointers and constify size and position arguments.
1877 Originally this was motivated by PR61034 which has
1878 conditional calls to free falsely clobbering ref because
1879 of imprecise points-to info of the argument. */
1880 tree oldargs[4];
1881 bool valueized_anything = false;
1882 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1884 oldargs[i] = gimple_call_arg (def_stmt, i);
1885 tree val = vn_valueize (oldargs[i]);
1886 if (val != oldargs[i])
1888 gimple_call_set_arg (def_stmt, i, val);
1889 valueized_anything = true;
1892 if (valueized_anything)
1894 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1895 ref);
1896 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1897 gimple_call_set_arg (def_stmt, i, oldargs[i]);
1898 if (!res)
1900 *disambiguate_only = true;
1901 return NULL;
1906 if (*disambiguate_only)
1907 return (void *)-1;
1909 /* If we cannot constrain the size of the reference we cannot
1910 test if anything kills it. */
1911 if (!ref->max_size_known_p ())
1912 return (void *)-1;
1914 poly_int64 offset = ref->offset;
1915 poly_int64 maxsize = ref->max_size;
1917 /* We can't deduce anything useful from clobbers. */
1918 if (gimple_clobber_p (def_stmt))
1919 return (void *)-1;
1921 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1922 from that definition.
1923 1) Memset. */
1924 if (is_gimple_reg_type (vr->type)
1925 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1926 && (integer_zerop (gimple_call_arg (def_stmt, 1))
1927 || ((TREE_CODE (gimple_call_arg (def_stmt, 1)) == INTEGER_CST
1928 || (INTEGRAL_TYPE_P (vr->type) && known_eq (ref->size, 8)))
1929 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1930 && offset.is_constant (&offseti)
1931 && offseti % BITS_PER_UNIT == 0))
1932 && poly_int_tree_p (gimple_call_arg (def_stmt, 2))
1933 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
1934 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME))
1936 tree base2;
1937 poly_int64 offset2, size2, maxsize2;
1938 bool reverse;
1939 tree ref2 = gimple_call_arg (def_stmt, 0);
1940 if (TREE_CODE (ref2) == SSA_NAME)
1942 ref2 = SSA_VAL (ref2);
1943 if (TREE_CODE (ref2) == SSA_NAME
1944 && (TREE_CODE (base) != MEM_REF
1945 || TREE_OPERAND (base, 0) != ref2))
1947 gimple *def_stmt = SSA_NAME_DEF_STMT (ref2);
1948 if (gimple_assign_single_p (def_stmt)
1949 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
1950 ref2 = gimple_assign_rhs1 (def_stmt);
1953 if (TREE_CODE (ref2) == ADDR_EXPR)
1955 ref2 = TREE_OPERAND (ref2, 0);
1956 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
1957 &reverse);
1958 if (!known_size_p (maxsize2)
1959 || !known_eq (maxsize2, size2)
1960 || !operand_equal_p (base, base2, OEP_ADDRESS_OF))
1961 return (void *)-1;
1963 else if (TREE_CODE (ref2) == SSA_NAME)
1965 poly_int64 soff;
1966 if (TREE_CODE (base) != MEM_REF
1967 || !(mem_ref_offset (base) << LOG2_BITS_PER_UNIT).to_shwi (&soff))
1968 return (void *)-1;
1969 offset += soff;
1970 offset2 = 0;
1971 if (TREE_OPERAND (base, 0) != ref2)
1973 gimple *def = SSA_NAME_DEF_STMT (ref2);
1974 if (is_gimple_assign (def)
1975 && gimple_assign_rhs_code (def) == POINTER_PLUS_EXPR
1976 && gimple_assign_rhs1 (def) == TREE_OPERAND (base, 0)
1977 && poly_int_tree_p (gimple_assign_rhs2 (def))
1978 && (wi::to_poly_offset (gimple_assign_rhs2 (def))
1979 << LOG2_BITS_PER_UNIT).to_shwi (&offset2))
1981 ref2 = gimple_assign_rhs1 (def);
1982 if (TREE_CODE (ref2) == SSA_NAME)
1983 ref2 = SSA_VAL (ref2);
1985 else
1986 return (void *)-1;
1989 else
1990 return (void *)-1;
1991 tree len = gimple_call_arg (def_stmt, 2);
1992 if (known_subrange_p (offset, maxsize, offset2,
1993 wi::to_poly_offset (len) << LOG2_BITS_PER_UNIT))
1995 tree val;
1996 if (integer_zerop (gimple_call_arg (def_stmt, 1)))
1997 val = build_zero_cst (vr->type);
1998 else if (INTEGRAL_TYPE_P (vr->type)
1999 && known_eq (ref->size, 8))
2001 gimple_match_op res_op (gimple_match_cond::UNCOND, NOP_EXPR,
2002 vr->type, gimple_call_arg (def_stmt, 1));
2003 val = vn_nary_build_or_lookup (&res_op);
2004 if (!val
2005 || (TREE_CODE (val) == SSA_NAME
2006 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2007 return (void *)-1;
2009 else
2011 unsigned len = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (vr->type));
2012 unsigned char *buf = XALLOCAVEC (unsigned char, len);
2013 memset (buf, TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 1)),
2014 len);
2015 val = native_interpret_expr (vr->type, buf, len);
2016 if (!val)
2017 return (void *)-1;
2019 return vn_reference_lookup_or_insert_for_pieces
2020 (vuse, vr->set, vr->type, vr->operands, val);
2024 /* 2) Assignment from an empty CONSTRUCTOR. */
2025 else if (is_gimple_reg_type (vr->type)
2026 && gimple_assign_single_p (def_stmt)
2027 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
2028 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
2030 tree base2;
2031 poly_int64 offset2, size2, maxsize2;
2032 bool reverse;
2033 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2034 &offset2, &size2, &maxsize2, &reverse);
2035 if (known_size_p (maxsize2)
2036 && operand_equal_p (base, base2, 0)
2037 && known_subrange_p (offset, maxsize, offset2, size2))
2039 tree val = build_zero_cst (vr->type);
2040 return vn_reference_lookup_or_insert_for_pieces
2041 (vuse, vr->set, vr->type, vr->operands, val);
2045 /* 3) Assignment from a constant. We can use folds native encode/interpret
2046 routines to extract the assigned bits. */
2047 else if (known_eq (ref->size, maxsize)
2048 && is_gimple_reg_type (vr->type)
2049 && !contains_storage_order_barrier_p (vr->operands)
2050 && gimple_assign_single_p (def_stmt)
2051 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
2052 /* native_encode and native_decode operate on arrays of bytes
2053 and so fundamentally need a compile-time size and offset. */
2054 && maxsize.is_constant (&maxsizei)
2055 && maxsizei % BITS_PER_UNIT == 0
2056 && offset.is_constant (&offseti)
2057 && offseti % BITS_PER_UNIT == 0
2058 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
2059 || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
2060 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
2062 tree base2;
2063 HOST_WIDE_INT offset2, size2;
2064 bool reverse;
2065 base2 = get_ref_base_and_extent_hwi (gimple_assign_lhs (def_stmt),
2066 &offset2, &size2, &reverse);
2067 if (base2
2068 && !reverse
2069 && size2 % BITS_PER_UNIT == 0
2070 && offset2 % BITS_PER_UNIT == 0
2071 && operand_equal_p (base, base2, 0)
2072 && known_subrange_p (offseti, maxsizei, offset2, size2))
2074 /* We support up to 512-bit values (for V8DFmode). */
2075 unsigned char buffer[64];
2076 int len;
2078 tree rhs = gimple_assign_rhs1 (def_stmt);
2079 if (TREE_CODE (rhs) == SSA_NAME)
2080 rhs = SSA_VAL (rhs);
2081 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
2082 buffer, sizeof (buffer),
2083 (offseti - offset2) / BITS_PER_UNIT);
2084 if (len > 0 && len * BITS_PER_UNIT >= maxsizei)
2086 tree type = vr->type;
2087 /* Make sure to interpret in a type that has a range
2088 covering the whole access size. */
2089 if (INTEGRAL_TYPE_P (vr->type)
2090 && maxsizei != TYPE_PRECISION (vr->type))
2091 type = build_nonstandard_integer_type (maxsizei,
2092 TYPE_UNSIGNED (type));
2093 tree val = native_interpret_expr (type, buffer,
2094 maxsizei / BITS_PER_UNIT);
2095 /* If we chop off bits because the types precision doesn't
2096 match the memory access size this is ok when optimizing
2097 reads but not when called from the DSE code during
2098 elimination. */
2099 if (val
2100 && type != vr->type)
2102 if (! int_fits_type_p (val, vr->type))
2103 val = NULL_TREE;
2104 else
2105 val = fold_convert (vr->type, val);
2108 if (val)
2109 return vn_reference_lookup_or_insert_for_pieces
2110 (vuse, vr->set, vr->type, vr->operands, val);
2115 /* 4) Assignment from an SSA name which definition we may be able
2116 to access pieces from. */
2117 else if (known_eq (ref->size, maxsize)
2118 && is_gimple_reg_type (vr->type)
2119 && !contains_storage_order_barrier_p (vr->operands)
2120 && gimple_assign_single_p (def_stmt)
2121 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
2123 tree base2;
2124 poly_int64 offset2, size2, maxsize2;
2125 bool reverse;
2126 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2127 &offset2, &size2, &maxsize2,
2128 &reverse);
2129 if (!reverse
2130 && known_size_p (maxsize2)
2131 && known_eq (maxsize2, size2)
2132 && operand_equal_p (base, base2, 0)
2133 && known_subrange_p (offset, maxsize, offset2, size2)
2134 /* ??? We can't handle bitfield precision extracts without
2135 either using an alternate type for the BIT_FIELD_REF and
2136 then doing a conversion or possibly adjusting the offset
2137 according to endianness. */
2138 && (! INTEGRAL_TYPE_P (vr->type)
2139 || known_eq (ref->size, TYPE_PRECISION (vr->type)))
2140 && multiple_p (ref->size, BITS_PER_UNIT))
2142 gimple_match_op op (gimple_match_cond::UNCOND,
2143 BIT_FIELD_REF, vr->type,
2144 SSA_VAL (gimple_assign_rhs1 (def_stmt)),
2145 bitsize_int (ref->size),
2146 bitsize_int (offset - offset2));
2147 tree val = vn_nary_build_or_lookup (&op);
2148 if (val
2149 && (TREE_CODE (val) != SSA_NAME
2150 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2152 vn_reference_t res = vn_reference_lookup_or_insert_for_pieces
2153 (vuse, vr->set, vr->type, vr->operands, val);
2154 return res;
2159 /* 5) For aggregate copies translate the reference through them if
2160 the copy kills ref. */
2161 else if (vn_walk_kind == VN_WALKREWRITE
2162 && gimple_assign_single_p (def_stmt)
2163 && (DECL_P (gimple_assign_rhs1 (def_stmt))
2164 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
2165 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
2167 tree base2;
2168 int i, j, k;
2169 auto_vec<vn_reference_op_s> rhs;
2170 vn_reference_op_t vro;
2171 ao_ref r;
2173 if (!lhs_ref_ok)
2174 return (void *)-1;
2176 /* See if the assignment kills REF. */
2177 base2 = ao_ref_base (&lhs_ref);
2178 if (!lhs_ref.max_size_known_p ()
2179 || (base != base2
2180 && (TREE_CODE (base) != MEM_REF
2181 || TREE_CODE (base2) != MEM_REF
2182 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
2183 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
2184 TREE_OPERAND (base2, 1))))
2185 || !stmt_kills_ref_p (def_stmt, ref))
2186 return (void *)-1;
2188 /* Find the common base of ref and the lhs. lhs_ops already
2189 contains valueized operands for the lhs. */
2190 i = vr->operands.length () - 1;
2191 j = lhs_ops.length () - 1;
2192 while (j >= 0 && i >= 0
2193 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
2195 i--;
2196 j--;
2199 /* ??? The innermost op should always be a MEM_REF and we already
2200 checked that the assignment to the lhs kills vr. Thus for
2201 aggregate copies using char[] types the vn_reference_op_eq
2202 may fail when comparing types for compatibility. But we really
2203 don't care here - further lookups with the rewritten operands
2204 will simply fail if we messed up types too badly. */
2205 poly_int64 extra_off = 0;
2206 if (j == 0 && i >= 0
2207 && lhs_ops[0].opcode == MEM_REF
2208 && maybe_ne (lhs_ops[0].off, -1))
2210 if (known_eq (lhs_ops[0].off, vr->operands[i].off))
2211 i--, j--;
2212 else if (vr->operands[i].opcode == MEM_REF
2213 && maybe_ne (vr->operands[i].off, -1))
2215 extra_off = vr->operands[i].off - lhs_ops[0].off;
2216 i--, j--;
2220 /* i now points to the first additional op.
2221 ??? LHS may not be completely contained in VR, one or more
2222 VIEW_CONVERT_EXPRs could be in its way. We could at least
2223 try handling outermost VIEW_CONVERT_EXPRs. */
2224 if (j != -1)
2225 return (void *)-1;
2227 /* Punt if the additional ops contain a storage order barrier. */
2228 for (k = i; k >= 0; k--)
2230 vro = &vr->operands[k];
2231 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
2232 return (void *)-1;
2235 /* Now re-write REF to be based on the rhs of the assignment. */
2236 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
2238 /* Apply an extra offset to the inner MEM_REF of the RHS. */
2239 if (maybe_ne (extra_off, 0))
2241 if (rhs.length () < 2)
2242 return (void *)-1;
2243 int ix = rhs.length () - 2;
2244 if (rhs[ix].opcode != MEM_REF
2245 || known_eq (rhs[ix].off, -1))
2246 return (void *)-1;
2247 rhs[ix].off += extra_off;
2248 rhs[ix].op0 = int_const_binop (PLUS_EXPR, rhs[ix].op0,
2249 build_int_cst (TREE_TYPE (rhs[ix].op0),
2250 extra_off));
2253 /* We need to pre-pend vr->operands[0..i] to rhs. */
2254 vec<vn_reference_op_s> old = vr->operands;
2255 if (i + 1 + rhs.length () > vr->operands.length ())
2256 vr->operands.safe_grow (i + 1 + rhs.length ());
2257 else
2258 vr->operands.truncate (i + 1 + rhs.length ());
2259 FOR_EACH_VEC_ELT (rhs, j, vro)
2260 vr->operands[i + 1 + j] = *vro;
2261 vr->operands = valueize_refs (vr->operands);
2262 if (old == shared_lookup_references)
2263 shared_lookup_references = vr->operands;
2264 vr->hashcode = vn_reference_compute_hash (vr);
2266 /* Try folding the new reference to a constant. */
2267 tree val = fully_constant_vn_reference_p (vr);
2268 if (val)
2269 return vn_reference_lookup_or_insert_for_pieces
2270 (vuse, vr->set, vr->type, vr->operands, val);
2272 /* Adjust *ref from the new operands. */
2273 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2274 return (void *)-1;
2275 /* This can happen with bitfields. */
2276 if (maybe_ne (ref->size, r.size))
2277 return (void *)-1;
2278 *ref = r;
2280 /* Do not update last seen VUSE after translating. */
2281 last_vuse_ptr = NULL;
2283 /* Keep looking for the adjusted *REF / VR pair. */
2284 return NULL;
2287 /* 6) For memcpy copies translate the reference through them if
2288 the copy kills ref. */
2289 else if (vn_walk_kind == VN_WALKREWRITE
2290 && is_gimple_reg_type (vr->type)
2291 /* ??? Handle BCOPY as well. */
2292 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2293 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2294 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2295 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2296 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2297 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2298 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2299 && poly_int_tree_p (gimple_call_arg (def_stmt, 2), &copy_size))
2301 tree lhs, rhs;
2302 ao_ref r;
2303 poly_int64 rhs_offset, lhs_offset;
2304 vn_reference_op_s op;
2305 poly_uint64 mem_offset;
2306 poly_int64 at, byte_maxsize;
2308 /* Only handle non-variable, addressable refs. */
2309 if (maybe_ne (ref->size, maxsize)
2310 || !multiple_p (offset, BITS_PER_UNIT, &at)
2311 || !multiple_p (maxsize, BITS_PER_UNIT, &byte_maxsize))
2312 return (void *)-1;
2314 /* Extract a pointer base and an offset for the destination. */
2315 lhs = gimple_call_arg (def_stmt, 0);
2316 lhs_offset = 0;
2317 if (TREE_CODE (lhs) == SSA_NAME)
2319 lhs = SSA_VAL (lhs);
2320 if (TREE_CODE (lhs) == SSA_NAME)
2322 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
2323 if (gimple_assign_single_p (def_stmt)
2324 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2325 lhs = gimple_assign_rhs1 (def_stmt);
2328 if (TREE_CODE (lhs) == ADDR_EXPR)
2330 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2331 &lhs_offset);
2332 if (!tem)
2333 return (void *)-1;
2334 if (TREE_CODE (tem) == MEM_REF
2335 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
2337 lhs = TREE_OPERAND (tem, 0);
2338 if (TREE_CODE (lhs) == SSA_NAME)
2339 lhs = SSA_VAL (lhs);
2340 lhs_offset += mem_offset;
2342 else if (DECL_P (tem))
2343 lhs = build_fold_addr_expr (tem);
2344 else
2345 return (void *)-1;
2347 if (TREE_CODE (lhs) != SSA_NAME
2348 && TREE_CODE (lhs) != ADDR_EXPR)
2349 return (void *)-1;
2351 /* Extract a pointer base and an offset for the source. */
2352 rhs = gimple_call_arg (def_stmt, 1);
2353 rhs_offset = 0;
2354 if (TREE_CODE (rhs) == SSA_NAME)
2355 rhs = SSA_VAL (rhs);
2356 if (TREE_CODE (rhs) == ADDR_EXPR)
2358 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2359 &rhs_offset);
2360 if (!tem)
2361 return (void *)-1;
2362 if (TREE_CODE (tem) == MEM_REF
2363 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
2365 rhs = TREE_OPERAND (tem, 0);
2366 rhs_offset += mem_offset;
2368 else if (DECL_P (tem)
2369 || TREE_CODE (tem) == STRING_CST)
2370 rhs = build_fold_addr_expr (tem);
2371 else
2372 return (void *)-1;
2374 if (TREE_CODE (rhs) != SSA_NAME
2375 && TREE_CODE (rhs) != ADDR_EXPR)
2376 return (void *)-1;
2378 /* The bases of the destination and the references have to agree. */
2379 if (TREE_CODE (base) == MEM_REF)
2381 if (TREE_OPERAND (base, 0) != lhs
2382 || !poly_int_tree_p (TREE_OPERAND (base, 1), &mem_offset))
2383 return (void *) -1;
2384 at += mem_offset;
2386 else if (!DECL_P (base)
2387 || TREE_CODE (lhs) != ADDR_EXPR
2388 || TREE_OPERAND (lhs, 0) != base)
2389 return (void *)-1;
2391 /* If the access is completely outside of the memcpy destination
2392 area there is no aliasing. */
2393 if (!ranges_maybe_overlap_p (lhs_offset, copy_size, at, byte_maxsize))
2394 return NULL;
2395 /* And the access has to be contained within the memcpy destination. */
2396 if (!known_subrange_p (at, byte_maxsize, lhs_offset, copy_size))
2397 return (void *)-1;
2399 /* Make room for 2 operands in the new reference. */
2400 if (vr->operands.length () < 2)
2402 vec<vn_reference_op_s> old = vr->operands;
2403 vr->operands.safe_grow_cleared (2);
2404 if (old == shared_lookup_references)
2405 shared_lookup_references = vr->operands;
2407 else
2408 vr->operands.truncate (2);
2410 /* The looked-through reference is a simple MEM_REF. */
2411 memset (&op, 0, sizeof (op));
2412 op.type = vr->type;
2413 op.opcode = MEM_REF;
2414 op.op0 = build_int_cst (ptr_type_node, at - lhs_offset + rhs_offset);
2415 op.off = at - lhs_offset + rhs_offset;
2416 vr->operands[0] = op;
2417 op.type = TREE_TYPE (rhs);
2418 op.opcode = TREE_CODE (rhs);
2419 op.op0 = rhs;
2420 op.off = -1;
2421 vr->operands[1] = op;
2422 vr->hashcode = vn_reference_compute_hash (vr);
2424 /* Try folding the new reference to a constant. */
2425 tree val = fully_constant_vn_reference_p (vr);
2426 if (val)
2427 return vn_reference_lookup_or_insert_for_pieces
2428 (vuse, vr->set, vr->type, vr->operands, val);
2430 /* Adjust *ref from the new operands. */
2431 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2432 return (void *)-1;
2433 /* This can happen with bitfields. */
2434 if (maybe_ne (ref->size, r.size))
2435 return (void *)-1;
2436 *ref = r;
2438 /* Do not update last seen VUSE after translating. */
2439 last_vuse_ptr = NULL;
2441 /* Keep looking for the adjusted *REF / VR pair. */
2442 return NULL;
2445 /* Bail out and stop walking. */
2446 return (void *)-1;
2449 /* Return a reference op vector from OP that can be used for
2450 vn_reference_lookup_pieces. The caller is responsible for releasing
2451 the vector. */
2453 vec<vn_reference_op_s>
2454 vn_reference_operands_for_lookup (tree op)
2456 bool valueized;
2457 return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
2460 /* Lookup a reference operation by it's parts, in the current hash table.
2461 Returns the resulting value number if it exists in the hash table,
2462 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2463 vn_reference_t stored in the hashtable if something is found. */
2465 tree
2466 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2467 vec<vn_reference_op_s> operands,
2468 vn_reference_t *vnresult, vn_lookup_kind kind)
2470 struct vn_reference_s vr1;
2471 vn_reference_t tmp;
2472 tree cst;
2474 if (!vnresult)
2475 vnresult = &tmp;
2476 *vnresult = NULL;
2478 vr1.vuse = vuse_ssa_val (vuse);
2479 shared_lookup_references.truncate (0);
2480 shared_lookup_references.safe_grow (operands.length ());
2481 memcpy (shared_lookup_references.address (),
2482 operands.address (),
2483 sizeof (vn_reference_op_s)
2484 * operands.length ());
2485 vr1.operands = operands = shared_lookup_references
2486 = valueize_refs (shared_lookup_references);
2487 vr1.type = type;
2488 vr1.set = set;
2489 vr1.hashcode = vn_reference_compute_hash (&vr1);
2490 if ((cst = fully_constant_vn_reference_p (&vr1)))
2491 return cst;
2493 vn_reference_lookup_1 (&vr1, vnresult);
2494 if (!*vnresult
2495 && kind != VN_NOWALK
2496 && vr1.vuse)
2498 ao_ref r;
2499 vn_walk_kind = kind;
2500 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2501 *vnresult =
2502 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2503 vn_reference_lookup_2,
2504 vn_reference_lookup_3,
2505 vuse_ssa_val, &vr1);
2506 gcc_checking_assert (vr1.operands == shared_lookup_references);
2509 if (*vnresult)
2510 return (*vnresult)->result;
2512 return NULL_TREE;
2515 /* Lookup OP in the current hash table, and return the resulting value
2516 number if it exists in the hash table. Return NULL_TREE if it does
2517 not exist in the hash table or if the result field of the structure
2518 was NULL.. VNRESULT will be filled in with the vn_reference_t
2519 stored in the hashtable if one exists. When TBAA_P is false assume
2520 we are looking up a store and treat it as having alias-set zero. */
2522 tree
2523 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2524 vn_reference_t *vnresult, bool tbaa_p)
2526 vec<vn_reference_op_s> operands;
2527 struct vn_reference_s vr1;
2528 tree cst;
2529 bool valuezied_anything;
2531 if (vnresult)
2532 *vnresult = NULL;
2534 vr1.vuse = vuse_ssa_val (vuse);
2535 vr1.operands = operands
2536 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2537 vr1.type = TREE_TYPE (op);
2538 vr1.set = tbaa_p ? get_alias_set (op) : 0;
2539 vr1.hashcode = vn_reference_compute_hash (&vr1);
2540 if ((cst = fully_constant_vn_reference_p (&vr1)))
2541 return cst;
2543 if (kind != VN_NOWALK
2544 && vr1.vuse)
2546 vn_reference_t wvnresult;
2547 ao_ref r;
2548 /* Make sure to use a valueized reference if we valueized anything.
2549 Otherwise preserve the full reference for advanced TBAA. */
2550 if (!valuezied_anything
2551 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2552 vr1.operands))
2553 ao_ref_init (&r, op);
2554 if (! tbaa_p)
2555 r.ref_alias_set = r.base_alias_set = 0;
2556 vn_walk_kind = kind;
2557 wvnresult =
2558 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2559 vn_reference_lookup_2,
2560 vn_reference_lookup_3,
2561 vuse_ssa_val, &vr1);
2562 gcc_checking_assert (vr1.operands == shared_lookup_references);
2563 if (wvnresult)
2565 if (vnresult)
2566 *vnresult = wvnresult;
2567 return wvnresult->result;
2570 return NULL_TREE;
2573 return vn_reference_lookup_1 (&vr1, vnresult);
2576 /* Lookup CALL in the current hash table and return the entry in
2577 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2579 void
2580 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2581 vn_reference_t vr)
2583 if (vnresult)
2584 *vnresult = NULL;
2586 tree vuse = gimple_vuse (call);
2588 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2589 vr->operands = valueize_shared_reference_ops_from_call (call);
2590 vr->type = gimple_expr_type (call);
2591 vr->set = 0;
2592 vr->hashcode = vn_reference_compute_hash (vr);
2593 vn_reference_lookup_1 (vr, vnresult);
2596 /* Insert OP into the current hash table with a value number of
2597 RESULT, and return the resulting reference structure we created. */
2599 static vn_reference_t
2600 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2602 vn_reference_s **slot;
2603 vn_reference_t vr1;
2604 bool tem;
2606 vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
2607 if (TREE_CODE (result) == SSA_NAME)
2608 vr1->value_id = VN_INFO (result)->value_id;
2609 else
2610 vr1->value_id = get_or_alloc_constant_value_id (result);
2611 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2612 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2613 vr1->type = TREE_TYPE (op);
2614 vr1->set = get_alias_set (op);
2615 vr1->hashcode = vn_reference_compute_hash (vr1);
2616 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2617 vr1->result_vdef = vdef;
2619 slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2620 INSERT);
2622 /* Because we lookup stores using vuses, and value number failures
2623 using the vdefs (see visit_reference_op_store for how and why),
2624 it's possible that on failure we may try to insert an already
2625 inserted store. This is not wrong, there is no ssa name for a
2626 store that we could use as a differentiator anyway. Thus, unlike
2627 the other lookup functions, you cannot gcc_assert (!*slot)
2628 here. */
2630 /* But free the old slot in case of a collision. */
2631 if (*slot)
2632 free_reference (*slot);
2634 *slot = vr1;
2635 vr1->next = last_inserted_ref;
2636 last_inserted_ref = vr1;
2637 return vr1;
2640 /* Insert a reference by it's pieces into the current hash table with
2641 a value number of RESULT. Return the resulting reference
2642 structure we created. */
2644 vn_reference_t
2645 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2646 vec<vn_reference_op_s> operands,
2647 tree result, unsigned int value_id)
2650 vn_reference_s **slot;
2651 vn_reference_t vr1;
2653 vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
2654 vr1->value_id = value_id;
2655 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2656 vr1->operands = valueize_refs (operands);
2657 vr1->type = type;
2658 vr1->set = set;
2659 vr1->hashcode = vn_reference_compute_hash (vr1);
2660 if (result && TREE_CODE (result) == SSA_NAME)
2661 result = SSA_VAL (result);
2662 vr1->result = result;
2664 slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2665 INSERT);
2667 /* At this point we should have all the things inserted that we have
2668 seen before, and we should never try inserting something that
2669 already exists. */
2670 gcc_assert (!*slot);
2671 if (*slot)
2672 free_reference (*slot);
2674 *slot = vr1;
2675 vr1->next = last_inserted_ref;
2676 last_inserted_ref = vr1;
2677 return vr1;
2680 /* Compute and return the hash value for nary operation VBO1. */
2682 static hashval_t
2683 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2685 inchash::hash hstate;
2686 unsigned i;
2688 for (i = 0; i < vno1->length; ++i)
2689 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2690 vno1->op[i] = SSA_VAL (vno1->op[i]);
2692 if (((vno1->length == 2
2693 && commutative_tree_code (vno1->opcode))
2694 || (vno1->length == 3
2695 && commutative_ternary_tree_code (vno1->opcode)))
2696 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2697 std::swap (vno1->op[0], vno1->op[1]);
2698 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
2699 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2701 std::swap (vno1->op[0], vno1->op[1]);
2702 vno1->opcode = swap_tree_comparison (vno1->opcode);
2705 hstate.add_int (vno1->opcode);
2706 for (i = 0; i < vno1->length; ++i)
2707 inchash::add_expr (vno1->op[i], hstate);
2709 return hstate.end ();
2712 /* Compare nary operations VNO1 and VNO2 and return true if they are
2713 equivalent. */
2715 bool
2716 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2718 unsigned i;
2720 if (vno1->hashcode != vno2->hashcode)
2721 return false;
2723 if (vno1->length != vno2->length)
2724 return false;
2726 if (vno1->opcode != vno2->opcode
2727 || !types_compatible_p (vno1->type, vno2->type))
2728 return false;
2730 for (i = 0; i < vno1->length; ++i)
2731 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2732 return false;
2734 /* BIT_INSERT_EXPR has an implict operand as the type precision
2735 of op1. Need to check to make sure they are the same. */
2736 if (vno1->opcode == BIT_INSERT_EXPR
2737 && TREE_CODE (vno1->op[1]) == INTEGER_CST
2738 && TYPE_PRECISION (TREE_TYPE (vno1->op[1]))
2739 != TYPE_PRECISION (TREE_TYPE (vno2->op[1])))
2740 return false;
2742 return true;
2745 /* Initialize VNO from the pieces provided. */
2747 static void
2748 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2749 enum tree_code code, tree type, tree *ops)
2751 vno->opcode = code;
2752 vno->length = length;
2753 vno->type = type;
2754 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2757 /* Initialize VNO from OP. */
2759 static void
2760 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2762 unsigned i;
2764 vno->opcode = TREE_CODE (op);
2765 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2766 vno->type = TREE_TYPE (op);
2767 for (i = 0; i < vno->length; ++i)
2768 vno->op[i] = TREE_OPERAND (op, i);
2771 /* Return the number of operands for a vn_nary ops structure from STMT. */
2773 static unsigned int
2774 vn_nary_length_from_stmt (gimple *stmt)
2776 switch (gimple_assign_rhs_code (stmt))
2778 case REALPART_EXPR:
2779 case IMAGPART_EXPR:
2780 case VIEW_CONVERT_EXPR:
2781 return 1;
2783 case BIT_FIELD_REF:
2784 return 3;
2786 case CONSTRUCTOR:
2787 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2789 default:
2790 return gimple_num_ops (stmt) - 1;
2794 /* Initialize VNO from STMT. */
2796 static void
2797 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
2799 unsigned i;
2801 vno->opcode = gimple_assign_rhs_code (stmt);
2802 vno->type = gimple_expr_type (stmt);
2803 switch (vno->opcode)
2805 case REALPART_EXPR:
2806 case IMAGPART_EXPR:
2807 case VIEW_CONVERT_EXPR:
2808 vno->length = 1;
2809 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2810 break;
2812 case BIT_FIELD_REF:
2813 vno->length = 3;
2814 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2815 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2816 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2817 break;
2819 case CONSTRUCTOR:
2820 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2821 for (i = 0; i < vno->length; ++i)
2822 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2823 break;
2825 default:
2826 gcc_checking_assert (!gimple_assign_single_p (stmt));
2827 vno->length = gimple_num_ops (stmt) - 1;
2828 for (i = 0; i < vno->length; ++i)
2829 vno->op[i] = gimple_op (stmt, i + 1);
2833 /* Compute the hashcode for VNO and look for it in the hash table;
2834 return the resulting value number if it exists in the hash table.
2835 Return NULL_TREE if it does not exist in the hash table or if the
2836 result field of the operation is NULL. VNRESULT will contain the
2837 vn_nary_op_t from the hashtable if it exists. */
2839 static tree
2840 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2842 vn_nary_op_s **slot;
2844 if (vnresult)
2845 *vnresult = NULL;
2847 vno->hashcode = vn_nary_op_compute_hash (vno);
2848 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2849 NO_INSERT);
2850 if (!slot)
2851 return NULL_TREE;
2852 if (vnresult)
2853 *vnresult = *slot;
2854 return (*slot)->result;
2857 /* Lookup a n-ary operation by its pieces and return the resulting value
2858 number if it exists in the hash table. Return NULL_TREE if it does
2859 not exist in the hash table or if the result field of the operation
2860 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2861 if it exists. */
2863 tree
2864 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2865 tree type, tree *ops, vn_nary_op_t *vnresult)
2867 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2868 sizeof_vn_nary_op (length));
2869 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2870 return vn_nary_op_lookup_1 (vno1, vnresult);
2873 /* Lookup OP in the current hash table, and return the resulting value
2874 number if it exists in the hash table. Return NULL_TREE if it does
2875 not exist in the hash table or if the result field of the operation
2876 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2877 if it exists. */
2879 tree
2880 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2882 vn_nary_op_t vno1
2883 = XALLOCAVAR (struct vn_nary_op_s,
2884 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2885 init_vn_nary_op_from_op (vno1, op);
2886 return vn_nary_op_lookup_1 (vno1, vnresult);
2889 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2890 value number if it exists in the hash table. Return NULL_TREE if
2891 it does not exist in the hash table. VNRESULT will contain the
2892 vn_nary_op_t from the hashtable if it exists. */
2894 tree
2895 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
2897 vn_nary_op_t vno1
2898 = XALLOCAVAR (struct vn_nary_op_s,
2899 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2900 init_vn_nary_op_from_stmt (vno1, stmt);
2901 return vn_nary_op_lookup_1 (vno1, vnresult);
2904 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2906 static vn_nary_op_t
2907 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2909 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2912 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2913 obstack. */
2915 static vn_nary_op_t
2916 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2918 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length, &vn_tables_obstack);
2920 vno1->value_id = value_id;
2921 vno1->length = length;
2922 vno1->result = result;
2924 return vno1;
2927 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2928 VNO->HASHCODE first. */
2930 static vn_nary_op_t
2931 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
2932 bool compute_hash)
2934 vn_nary_op_s **slot;
2936 if (compute_hash)
2937 vno->hashcode = vn_nary_op_compute_hash (vno);
2939 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
2940 /* While we do not want to insert things twice it's awkward to
2941 avoid it in the case where visit_nary_op pattern-matches stuff
2942 and ends up simplifying the replacement to itself. We then
2943 get two inserts, one from visit_nary_op and one from
2944 vn_nary_build_or_lookup.
2945 So allow inserts with the same value number. */
2946 if (*slot && (*slot)->result == vno->result)
2947 return *slot;
2949 gcc_assert (!*slot);
2951 *slot = vno;
2952 vno->next = last_inserted_nary;
2953 last_inserted_nary = vno;
2954 return vno;
2957 /* Insert a n-ary operation into the current hash table using it's
2958 pieces. Return the vn_nary_op_t structure we created and put in
2959 the hashtable. */
2961 vn_nary_op_t
2962 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2963 tree type, tree *ops,
2964 tree result, unsigned int value_id)
2966 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2967 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2968 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
2971 /* Insert OP into the current hash table with a value number of
2972 RESULT. Return the vn_nary_op_t structure we created and put in
2973 the hashtable. */
2975 vn_nary_op_t
2976 vn_nary_op_insert (tree op, tree result)
2978 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2979 vn_nary_op_t vno1;
2981 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2982 init_vn_nary_op_from_op (vno1, op);
2983 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
2986 /* Insert the rhs of STMT into the current hash table with a value number of
2987 RESULT. */
2989 static vn_nary_op_t
2990 vn_nary_op_insert_stmt (gimple *stmt, tree result)
2992 vn_nary_op_t vno1
2993 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2994 result, VN_INFO (result)->value_id);
2995 init_vn_nary_op_from_stmt (vno1, stmt);
2996 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
2999 /* Compute a hashcode for PHI operation VP1 and return it. */
3001 static inline hashval_t
3002 vn_phi_compute_hash (vn_phi_t vp1)
3004 inchash::hash hstate (EDGE_COUNT (vp1->block->preds) > 2
3005 ? vp1->block->index : EDGE_COUNT (vp1->block->preds));
3006 tree phi1op;
3007 tree type;
3008 edge e;
3009 edge_iterator ei;
3011 /* If all PHI arguments are constants we need to distinguish
3012 the PHI node via its type. */
3013 type = vp1->type;
3014 hstate.merge_hash (vn_hash_type (type));
3016 FOR_EACH_EDGE (e, ei, vp1->block->preds)
3018 /* Don't hash backedge values they need to be handled as VN_TOP
3019 for optimistic value-numbering. */
3020 if (e->flags & EDGE_DFS_BACK)
3021 continue;
3023 phi1op = vp1->phiargs[e->dest_idx];
3024 if (phi1op == VN_TOP)
3025 continue;
3026 inchash::add_expr (phi1op, hstate);
3029 return hstate.end ();
3033 /* Return true if COND1 and COND2 represent the same condition, set
3034 *INVERTED_P if one needs to be inverted to make it the same as
3035 the other. */
3037 static bool
3038 cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
3039 gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
3041 enum tree_code code1 = gimple_cond_code (cond1);
3042 enum tree_code code2 = gimple_cond_code (cond2);
3044 *inverted_p = false;
3045 if (code1 == code2)
3047 else if (code1 == swap_tree_comparison (code2))
3048 std::swap (lhs2, rhs2);
3049 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
3050 *inverted_p = true;
3051 else if (code1 == invert_tree_comparison
3052 (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
3054 std::swap (lhs2, rhs2);
3055 *inverted_p = true;
3057 else
3058 return false;
3060 return ((expressions_equal_p (lhs1, lhs2)
3061 && expressions_equal_p (rhs1, rhs2))
3062 || (commutative_tree_code (code1)
3063 && expressions_equal_p (lhs1, rhs2)
3064 && expressions_equal_p (rhs1, lhs2)));
3067 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
3069 static int
3070 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
3072 if (vp1->hashcode != vp2->hashcode)
3073 return false;
3075 if (vp1->block != vp2->block)
3077 if (EDGE_COUNT (vp1->block->preds) != EDGE_COUNT (vp2->block->preds))
3078 return false;
3080 switch (EDGE_COUNT (vp1->block->preds))
3082 case 1:
3083 /* Single-arg PHIs are just copies. */
3084 break;
3086 case 2:
3088 /* Rule out backedges into the PHI. */
3089 if (vp1->block->loop_father->header == vp1->block
3090 || vp2->block->loop_father->header == vp2->block)
3091 return false;
3093 /* If the PHI nodes do not have compatible types
3094 they are not the same. */
3095 if (!types_compatible_p (vp1->type, vp2->type))
3096 return false;
3098 basic_block idom1
3099 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3100 basic_block idom2
3101 = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
3102 /* If the immediate dominator end in switch stmts multiple
3103 values may end up in the same PHI arg via intermediate
3104 CFG merges. */
3105 if (EDGE_COUNT (idom1->succs) != 2
3106 || EDGE_COUNT (idom2->succs) != 2)
3107 return false;
3109 /* Verify the controlling stmt is the same. */
3110 gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1));
3111 gcond *last2 = safe_dyn_cast <gcond *> (last_stmt (idom2));
3112 if (! last1 || ! last2)
3113 return false;
3114 bool inverted_p;
3115 if (! cond_stmts_equal_p (last1, vp1->cclhs, vp1->ccrhs,
3116 last2, vp2->cclhs, vp2->ccrhs,
3117 &inverted_p))
3118 return false;
3120 /* Get at true/false controlled edges into the PHI. */
3121 edge te1, te2, fe1, fe2;
3122 if (! extract_true_false_controlled_edges (idom1, vp1->block,
3123 &te1, &fe1)
3124 || ! extract_true_false_controlled_edges (idom2, vp2->block,
3125 &te2, &fe2))
3126 return false;
3128 /* Swap edges if the second condition is the inverted of the
3129 first. */
3130 if (inverted_p)
3131 std::swap (te2, fe2);
3133 /* ??? Handle VN_TOP specially. */
3134 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
3135 vp2->phiargs[te2->dest_idx])
3136 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
3137 vp2->phiargs[fe2->dest_idx]))
3138 return false;
3140 return true;
3143 default:
3144 return false;
3148 /* If the PHI nodes do not have compatible types
3149 they are not the same. */
3150 if (!types_compatible_p (vp1->type, vp2->type))
3151 return false;
3153 /* Any phi in the same block will have it's arguments in the
3154 same edge order, because of how we store phi nodes. */
3155 for (unsigned i = 0; i < EDGE_COUNT (vp1->block->preds); ++i)
3157 tree phi1op = vp1->phiargs[i];
3158 tree phi2op = vp2->phiargs[i];
3159 if (phi1op == VN_TOP || phi2op == VN_TOP)
3160 continue;
3161 if (!expressions_equal_p (phi1op, phi2op))
3162 return false;
3165 return true;
3168 /* Lookup PHI in the current hash table, and return the resulting
3169 value number if it exists in the hash table. Return NULL_TREE if
3170 it does not exist in the hash table. */
3172 static tree
3173 vn_phi_lookup (gimple *phi)
3175 vn_phi_s **slot;
3176 struct vn_phi_s *vp1;
3177 edge e;
3178 edge_iterator ei;
3180 vp1 = XALLOCAVAR (struct vn_phi_s,
3181 sizeof (struct vn_phi_s)
3182 + (gimple_phi_num_args (phi) - 1) * sizeof (tree));
3184 /* Canonicalize the SSA_NAME's to their value number. */
3185 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3187 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3188 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3189 vp1->phiargs[e->dest_idx] = def;
3191 vp1->type = TREE_TYPE (gimple_phi_result (phi));
3192 vp1->block = gimple_bb (phi);
3193 /* Extract values of the controlling condition. */
3194 vp1->cclhs = NULL_TREE;
3195 vp1->ccrhs = NULL_TREE;
3196 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3197 if (EDGE_COUNT (idom1->succs) == 2)
3198 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3200 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
3201 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
3203 vp1->hashcode = vn_phi_compute_hash (vp1);
3204 slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode,
3205 NO_INSERT);
3206 if (!slot)
3207 return NULL_TREE;
3208 return (*slot)->result;
3211 /* Insert PHI into the current hash table with a value number of
3212 RESULT. */
3214 static vn_phi_t
3215 vn_phi_insert (gimple *phi, tree result)
3217 vn_phi_s **slot;
3218 vn_phi_t vp1 = (vn_phi_t) obstack_alloc (&vn_tables_obstack,
3219 sizeof (vn_phi_s)
3220 + ((gimple_phi_num_args (phi) - 1)
3221 * sizeof (tree)));
3222 edge e;
3223 edge_iterator ei;
3225 /* Canonicalize the SSA_NAME's to their value number. */
3226 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3228 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3229 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3230 vp1->phiargs[e->dest_idx] = def;
3232 vp1->value_id = VN_INFO (result)->value_id;
3233 vp1->type = TREE_TYPE (gimple_phi_result (phi));
3234 vp1->block = gimple_bb (phi);
3235 /* Extract values of the controlling condition. */
3236 vp1->cclhs = NULL_TREE;
3237 vp1->ccrhs = NULL_TREE;
3238 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3239 if (EDGE_COUNT (idom1->succs) == 2)
3240 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3242 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
3243 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
3245 vp1->result = result;
3246 vp1->hashcode = vn_phi_compute_hash (vp1);
3248 slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
3250 /* Because we iterate over phi operations more than once, it's
3251 possible the slot might already exist here, hence no assert.*/
3252 *slot = vp1;
3253 vp1->next = last_inserted_phi;
3254 last_inserted_phi = vp1;
3255 return vp1;
3259 /* Print set of components in strongly connected component SCC to OUT. */
3261 static void
3262 print_scc (FILE *out, vec<tree> scc)
3264 tree var;
3265 unsigned int i;
3267 fprintf (out, "SCC consists of %u:", scc.length ());
3268 FOR_EACH_VEC_ELT (scc, i, var)
3270 fprintf (out, " ");
3271 print_generic_expr (out, var);
3273 fprintf (out, "\n");
3276 /* Return true if BB1 is dominated by BB2 taking into account edges
3277 that are not executable. */
3279 static bool
3280 dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
3282 edge_iterator ei;
3283 edge e;
3285 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3286 return true;
3288 /* Before iterating we'd like to know if there exists a
3289 (executable) path from bb2 to bb1 at all, if not we can
3290 directly return false. For now simply iterate once. */
3292 /* Iterate to the single executable bb1 predecessor. */
3293 if (EDGE_COUNT (bb1->preds) > 1)
3295 edge prede = NULL;
3296 FOR_EACH_EDGE (e, ei, bb1->preds)
3297 if (e->flags & EDGE_EXECUTABLE)
3299 if (prede)
3301 prede = NULL;
3302 break;
3304 prede = e;
3306 if (prede)
3308 bb1 = prede->src;
3310 /* Re-do the dominance check with changed bb1. */
3311 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3312 return true;
3316 /* Iterate to the single executable bb2 successor. */
3317 edge succe = NULL;
3318 FOR_EACH_EDGE (e, ei, bb2->succs)
3319 if (e->flags & EDGE_EXECUTABLE)
3321 if (succe)
3323 succe = NULL;
3324 break;
3326 succe = e;
3328 if (succe)
3330 /* Verify the reached block is only reached through succe.
3331 If there is only one edge we can spare us the dominator
3332 check and iterate directly. */
3333 if (EDGE_COUNT (succe->dest->preds) > 1)
3335 FOR_EACH_EDGE (e, ei, succe->dest->preds)
3336 if (e != succe
3337 && (e->flags & EDGE_EXECUTABLE))
3339 succe = NULL;
3340 break;
3343 if (succe)
3345 bb2 = succe->dest;
3347 /* Re-do the dominance check with changed bb2. */
3348 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3349 return true;
3353 /* We could now iterate updating bb1 / bb2. */
3354 return false;
3357 /* Set the value number of FROM to TO, return true if it has changed
3358 as a result. */
3360 static inline bool
3361 set_ssa_val_to (tree from, tree to)
3363 tree currval = SSA_VAL (from);
3364 poly_int64 toff, coff;
3366 /* The only thing we allow as value numbers are ssa_names
3367 and invariants. So assert that here. We don't allow VN_TOP
3368 as visiting a stmt should produce a value-number other than
3369 that.
3370 ??? Still VN_TOP can happen for unreachable code, so force
3371 it to varying in that case. Not all code is prepared to
3372 get VN_TOP on valueization. */
3373 if (to == VN_TOP)
3375 if (dump_file && (dump_flags & TDF_DETAILS))
3376 fprintf (dump_file, "Forcing value number to varying on "
3377 "receiving VN_TOP\n");
3378 to = from;
3381 gcc_assert (to != NULL_TREE
3382 && ((TREE_CODE (to) == SSA_NAME
3383 && (to == from || SSA_VAL (to) == to))
3384 || is_gimple_min_invariant (to)));
3386 if (from != to)
3388 if (currval == from)
3390 if (dump_file && (dump_flags & TDF_DETAILS))
3392 fprintf (dump_file, "Not changing value number of ");
3393 print_generic_expr (dump_file, from);
3394 fprintf (dump_file, " from VARYING to ");
3395 print_generic_expr (dump_file, to);
3396 fprintf (dump_file, "\n");
3398 return false;
3400 else if (currval != VN_TOP
3401 && ! is_gimple_min_invariant (currval)
3402 && is_gimple_min_invariant (to))
3404 if (dump_file && (dump_flags & TDF_DETAILS))
3406 fprintf (dump_file, "Forcing VARYING instead of changing "
3407 "value number of ");
3408 print_generic_expr (dump_file, from);
3409 fprintf (dump_file, " from ");
3410 print_generic_expr (dump_file, currval);
3411 fprintf (dump_file, " (non-constant) to ");
3412 print_generic_expr (dump_file, to);
3413 fprintf (dump_file, " (constant)\n");
3415 to = from;
3417 else if (TREE_CODE (to) == SSA_NAME
3418 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
3419 to = from;
3422 if (dump_file && (dump_flags & TDF_DETAILS))
3424 fprintf (dump_file, "Setting value number of ");
3425 print_generic_expr (dump_file, from);
3426 fprintf (dump_file, " to ");
3427 print_generic_expr (dump_file, to);
3430 if (currval != to
3431 && !operand_equal_p (currval, to, 0)
3432 /* Different undefined SSA names are not actually different. See
3433 PR82320 for a testcase were we'd otherwise not terminate iteration. */
3434 && !(TREE_CODE (currval) == SSA_NAME
3435 && TREE_CODE (to) == SSA_NAME
3436 && ssa_undefined_value_p (currval, false)
3437 && ssa_undefined_value_p (to, false))
3438 /* ??? For addresses involving volatile objects or types operand_equal_p
3439 does not reliably detect ADDR_EXPRs as equal. We know we are only
3440 getting invariant gimple addresses here, so can use
3441 get_addr_base_and_unit_offset to do this comparison. */
3442 && !(TREE_CODE (currval) == ADDR_EXPR
3443 && TREE_CODE (to) == ADDR_EXPR
3444 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
3445 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
3446 && known_eq (coff, toff)))
3448 if (dump_file && (dump_flags & TDF_DETAILS))
3449 fprintf (dump_file, " (changed)\n");
3451 /* If we equate two SSA names we have to make the side-band info
3452 of the leader conservative (and remember whatever original value
3453 was present). */
3454 if (TREE_CODE (to) == SSA_NAME)
3456 if (INTEGRAL_TYPE_P (TREE_TYPE (to))
3457 && SSA_NAME_RANGE_INFO (to))
3459 if (SSA_NAME_IS_DEFAULT_DEF (to)
3460 || dominated_by_p_w_unex
3461 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3462 gimple_bb (SSA_NAME_DEF_STMT (to))))
3463 /* Keep the info from the dominator. */
3465 else
3467 /* Save old info. */
3468 if (! VN_INFO (to)->info.range_info)
3470 VN_INFO (to)->info.range_info = SSA_NAME_RANGE_INFO (to);
3471 VN_INFO (to)->range_info_anti_range_p
3472 = SSA_NAME_ANTI_RANGE_P (to);
3474 /* Rather than allocating memory and unioning the info
3475 just clear it. */
3476 if (dump_file && (dump_flags & TDF_DETAILS))
3478 fprintf (dump_file, "clearing range info of ");
3479 print_generic_expr (dump_file, to);
3480 fprintf (dump_file, "\n");
3482 SSA_NAME_RANGE_INFO (to) = NULL;
3485 else if (POINTER_TYPE_P (TREE_TYPE (to))
3486 && SSA_NAME_PTR_INFO (to))
3488 if (SSA_NAME_IS_DEFAULT_DEF (to)
3489 || dominated_by_p_w_unex
3490 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3491 gimple_bb (SSA_NAME_DEF_STMT (to))))
3492 /* Keep the info from the dominator. */
3494 else if (! SSA_NAME_PTR_INFO (from)
3495 /* Handle the case of trivially equivalent info. */
3496 || memcmp (SSA_NAME_PTR_INFO (to),
3497 SSA_NAME_PTR_INFO (from),
3498 sizeof (ptr_info_def)) != 0)
3500 /* Save old info. */
3501 if (! VN_INFO (to)->info.ptr_info)
3502 VN_INFO (to)->info.ptr_info = SSA_NAME_PTR_INFO (to);
3503 /* Rather than allocating memory and unioning the info
3504 just clear it. */
3505 if (dump_file && (dump_flags & TDF_DETAILS))
3507 fprintf (dump_file, "clearing points-to info of ");
3508 print_generic_expr (dump_file, to);
3509 fprintf (dump_file, "\n");
3511 SSA_NAME_PTR_INFO (to) = NULL;
3516 VN_INFO (from)->valnum = to;
3517 return true;
3519 if (dump_file && (dump_flags & TDF_DETAILS))
3520 fprintf (dump_file, "\n");
3521 return false;
3524 /* Mark as processed all the definitions in the defining stmt of USE, or
3525 the USE itself. */
3527 static void
3528 mark_use_processed (tree use)
3530 ssa_op_iter iter;
3531 def_operand_p defp;
3532 gimple *stmt = SSA_NAME_DEF_STMT (use);
3534 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
3536 VN_INFO (use)->use_processed = true;
3537 return;
3540 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3542 tree def = DEF_FROM_PTR (defp);
3544 VN_INFO (def)->use_processed = true;
3548 /* Set all definitions in STMT to value number to themselves.
3549 Return true if a value number changed. */
3551 static bool
3552 defs_to_varying (gimple *stmt)
3554 bool changed = false;
3555 ssa_op_iter iter;
3556 def_operand_p defp;
3558 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3560 tree def = DEF_FROM_PTR (defp);
3561 changed |= set_ssa_val_to (def, def);
3563 return changed;
3566 /* Visit a copy between LHS and RHS, return true if the value number
3567 changed. */
3569 static bool
3570 visit_copy (tree lhs, tree rhs)
3572 /* Valueize. */
3573 rhs = SSA_VAL (rhs);
3575 return set_ssa_val_to (lhs, rhs);
3578 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
3579 is the same. */
3581 static tree
3582 valueized_wider_op (tree wide_type, tree op)
3584 if (TREE_CODE (op) == SSA_NAME)
3585 op = SSA_VAL (op);
3587 /* Either we have the op widened available. */
3588 tree ops[3] = {};
3589 ops[0] = op;
3590 tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
3591 wide_type, ops, NULL);
3592 if (tem)
3593 return tem;
3595 /* Or the op is truncated from some existing value. */
3596 if (TREE_CODE (op) == SSA_NAME)
3598 gimple *def = SSA_NAME_DEF_STMT (op);
3599 if (is_gimple_assign (def)
3600 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3602 tem = gimple_assign_rhs1 (def);
3603 if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)))
3605 if (TREE_CODE (tem) == SSA_NAME)
3606 tem = SSA_VAL (tem);
3607 return tem;
3612 /* For constants simply extend it. */
3613 if (TREE_CODE (op) == INTEGER_CST)
3614 return wide_int_to_tree (wide_type, wi::to_wide (op));
3616 return NULL_TREE;
3619 /* Visit a nary operator RHS, value number it, and return true if the
3620 value number of LHS has changed as a result. */
3622 static bool
3623 visit_nary_op (tree lhs, gassign *stmt)
3625 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
3626 if (result)
3627 return set_ssa_val_to (lhs, result);
3629 /* Do some special pattern matching for redundancies of operations
3630 in different types. */
3631 enum tree_code code = gimple_assign_rhs_code (stmt);
3632 tree type = TREE_TYPE (lhs);
3633 tree rhs1 = gimple_assign_rhs1 (stmt);
3634 switch (code)
3636 CASE_CONVERT:
3637 /* Match arithmetic done in a different type where we can easily
3638 substitute the result from some earlier sign-changed or widened
3639 operation. */
3640 if (INTEGRAL_TYPE_P (type)
3641 && TREE_CODE (rhs1) == SSA_NAME
3642 /* We only handle sign-changes or zero-extension -> & mask. */
3643 && ((TYPE_UNSIGNED (TREE_TYPE (rhs1))
3644 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs1)))
3645 || TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (rhs1))))
3647 gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
3648 if (def
3649 && (gimple_assign_rhs_code (def) == PLUS_EXPR
3650 || gimple_assign_rhs_code (def) == MINUS_EXPR
3651 || gimple_assign_rhs_code (def) == MULT_EXPR))
3653 tree ops[3] = {};
3654 /* Either we have the op widened available. */
3655 ops[0] = valueized_wider_op (type,
3656 gimple_assign_rhs1 (def));
3657 if (ops[0])
3658 ops[1] = valueized_wider_op (type,
3659 gimple_assign_rhs2 (def));
3660 if (ops[0] && ops[1])
3662 ops[0] = vn_nary_op_lookup_pieces
3663 (2, gimple_assign_rhs_code (def), type, ops, NULL);
3664 /* We have wider operation available. */
3665 if (ops[0])
3667 unsigned lhs_prec = TYPE_PRECISION (type);
3668 unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1));
3669 if (lhs_prec == rhs_prec)
3671 gimple_match_op match_op (gimple_match_cond::UNCOND,
3672 NOP_EXPR, type, ops[0]);
3673 result = vn_nary_build_or_lookup (&match_op);
3674 if (result)
3676 bool changed = set_ssa_val_to (lhs, result);
3677 vn_nary_op_insert_stmt (stmt, result);
3678 return changed;
3681 else
3683 tree mask = wide_int_to_tree
3684 (type, wi::mask (rhs_prec, false, lhs_prec));
3685 gimple_match_op match_op (gimple_match_cond::UNCOND,
3686 BIT_AND_EXPR,
3687 TREE_TYPE (lhs),
3688 ops[0], mask);
3689 result = vn_nary_build_or_lookup (&match_op);
3690 if (result)
3692 bool changed = set_ssa_val_to (lhs, result);
3693 vn_nary_op_insert_stmt (stmt, result);
3694 return changed;
3701 default:;
3704 bool changed = set_ssa_val_to (lhs, lhs);
3705 vn_nary_op_insert_stmt (stmt, lhs);
3706 return changed;
3709 /* Visit a call STMT storing into LHS. Return true if the value number
3710 of the LHS has changed as a result. */
3712 static bool
3713 visit_reference_op_call (tree lhs, gcall *stmt)
3715 bool changed = false;
3716 struct vn_reference_s vr1;
3717 vn_reference_t vnresult = NULL;
3718 tree vdef = gimple_vdef (stmt);
3720 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
3721 if (lhs && TREE_CODE (lhs) != SSA_NAME)
3722 lhs = NULL_TREE;
3724 vn_reference_lookup_call (stmt, &vnresult, &vr1);
3725 if (vnresult)
3727 if (vnresult->result_vdef && vdef)
3728 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
3729 else if (vdef)
3730 /* If the call was discovered to be pure or const reflect
3731 that as far as possible. */
3732 changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt)));
3734 if (!vnresult->result && lhs)
3735 vnresult->result = lhs;
3737 if (vnresult->result && lhs)
3738 changed |= set_ssa_val_to (lhs, vnresult->result);
3740 else
3742 vn_reference_t vr2;
3743 vn_reference_s **slot;
3744 tree vdef_val = vdef;
3745 if (vdef)
3747 /* If we value numbered an indirect functions function to
3748 one not clobbering memory value number its VDEF to its
3749 VUSE. */
3750 tree fn = gimple_call_fn (stmt);
3751 if (fn && TREE_CODE (fn) == SSA_NAME)
3753 fn = SSA_VAL (fn);
3754 if (TREE_CODE (fn) == ADDR_EXPR
3755 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
3756 && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
3757 & (ECF_CONST | ECF_PURE)))
3758 vdef_val = vuse_ssa_val (gimple_vuse (stmt));
3760 changed |= set_ssa_val_to (vdef, vdef_val);
3762 if (lhs)
3763 changed |= set_ssa_val_to (lhs, lhs);
3764 vr2 = XOBNEW (&vn_tables_obstack, vn_reference_s);
3765 vr2->vuse = vr1.vuse;
3766 /* As we are not walking the virtual operand chain we know the
3767 shared_lookup_references are still original so we can re-use
3768 them here. */
3769 vr2->operands = vr1.operands.copy ();
3770 vr2->type = vr1.type;
3771 vr2->set = vr1.set;
3772 vr2->hashcode = vr1.hashcode;
3773 vr2->result = lhs;
3774 vr2->result_vdef = vdef_val;
3775 slot = valid_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3776 INSERT);
3777 gcc_assert (!*slot);
3778 *slot = vr2;
3779 vr2->next = last_inserted_ref;
3780 last_inserted_ref = vr2;
3783 return changed;
3786 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3787 and return true if the value number of the LHS has changed as a result. */
3789 static bool
3790 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
3792 bool changed = false;
3793 tree last_vuse;
3794 tree result;
3796 last_vuse = gimple_vuse (stmt);
3797 last_vuse_ptr = &last_vuse;
3798 result = vn_reference_lookup (op, gimple_vuse (stmt),
3799 default_vn_walk_kind, NULL, true);
3800 last_vuse_ptr = NULL;
3802 /* We handle type-punning through unions by value-numbering based
3803 on offset and size of the access. Be prepared to handle a
3804 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3805 if (result
3806 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3808 /* We will be setting the value number of lhs to the value number
3809 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3810 So first simplify and lookup this expression to see if it
3811 is already available. */
3812 gimple_match_op res_op (gimple_match_cond::UNCOND,
3813 VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
3814 result = vn_nary_build_or_lookup (&res_op);
3817 if (result)
3818 changed = set_ssa_val_to (lhs, result);
3819 else
3821 changed = set_ssa_val_to (lhs, lhs);
3822 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
3825 return changed;
3829 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3830 and return true if the value number of the LHS has changed as a result. */
3832 static bool
3833 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
3835 bool changed = false;
3836 vn_reference_t vnresult = NULL;
3837 tree assign;
3838 bool resultsame = false;
3839 tree vuse = gimple_vuse (stmt);
3840 tree vdef = gimple_vdef (stmt);
3842 if (TREE_CODE (op) == SSA_NAME)
3843 op = SSA_VAL (op);
3845 /* First we want to lookup using the *vuses* from the store and see
3846 if there the last store to this location with the same address
3847 had the same value.
3849 The vuses represent the memory state before the store. If the
3850 memory state, address, and value of the store is the same as the
3851 last store to this location, then this store will produce the
3852 same memory state as that store.
3854 In this case the vdef versions for this store are value numbered to those
3855 vuse versions, since they represent the same memory state after
3856 this store.
3858 Otherwise, the vdefs for the store are used when inserting into
3859 the table, since the store generates a new memory state. */
3861 vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
3862 if (vnresult
3863 && vnresult->result)
3865 tree result = vnresult->result;
3866 if (TREE_CODE (result) == SSA_NAME)
3867 result = SSA_VAL (result);
3868 resultsame = expressions_equal_p (result, op);
3869 if (resultsame)
3871 /* If the TBAA state isn't compatible for downstream reads
3872 we cannot value-number the VDEFs the same. */
3873 alias_set_type set = get_alias_set (lhs);
3874 if (vnresult->set != set
3875 && ! alias_set_subset_of (set, vnresult->set))
3876 resultsame = false;
3880 if (!resultsame)
3882 /* Only perform the following when being called from PRE
3883 which embeds tail merging. */
3884 if (default_vn_walk_kind == VN_WALK)
3886 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3887 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
3888 if (vnresult)
3890 VN_INFO (vdef)->use_processed = true;
3891 return set_ssa_val_to (vdef, vnresult->result_vdef);
3895 if (dump_file && (dump_flags & TDF_DETAILS))
3897 fprintf (dump_file, "No store match\n");
3898 fprintf (dump_file, "Value numbering store ");
3899 print_generic_expr (dump_file, lhs);
3900 fprintf (dump_file, " to ");
3901 print_generic_expr (dump_file, op);
3902 fprintf (dump_file, "\n");
3904 /* Have to set value numbers before insert, since insert is
3905 going to valueize the references in-place. */
3906 if (vdef)
3907 changed |= set_ssa_val_to (vdef, vdef);
3909 /* Do not insert structure copies into the tables. */
3910 if (is_gimple_min_invariant (op)
3911 || is_gimple_reg (op))
3912 vn_reference_insert (lhs, op, vdef, NULL);
3914 /* Only perform the following when being called from PRE
3915 which embeds tail merging. */
3916 if (default_vn_walk_kind == VN_WALK)
3918 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3919 vn_reference_insert (assign, lhs, vuse, vdef);
3922 else
3924 /* We had a match, so value number the vdef to have the value
3925 number of the vuse it came from. */
3927 if (dump_file && (dump_flags & TDF_DETAILS))
3928 fprintf (dump_file, "Store matched earlier value, "
3929 "value numbering store vdefs to matching vuses.\n");
3931 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3934 return changed;
3937 /* Visit and value number PHI, return true if the value number
3938 changed. */
3940 static bool
3941 visit_phi (gimple *phi)
3943 tree result, sameval = VN_TOP, seen_undef = NULL_TREE;
3944 tree sameval_base = NULL_TREE;
3945 poly_int64 soff, doff;
3946 unsigned n_executable = 0;
3947 bool allsame = true;
3948 edge_iterator ei;
3949 edge e;
3951 /* TODO: We could check for this in init_sccvn, and replace this
3952 with a gcc_assert. */
3953 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3954 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3956 /* See if all non-TOP arguments have the same value. TOP is
3957 equivalent to everything, so we can ignore it. */
3958 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3959 if (e->flags & EDGE_EXECUTABLE)
3961 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3963 ++n_executable;
3964 if (TREE_CODE (def) == SSA_NAME)
3965 def = SSA_VAL (def);
3966 if (def == VN_TOP)
3968 /* Ignore undefined defs for sameval but record one. */
3969 else if (TREE_CODE (def) == SSA_NAME
3970 && ssa_undefined_value_p (def, false))
3971 seen_undef = def;
3972 else if (sameval == VN_TOP)
3973 sameval = def;
3974 else if (!expressions_equal_p (def, sameval))
3976 /* We know we're arriving only with invariant addresses here,
3977 try harder comparing them. We can do some caching here
3978 which we cannot do in expressions_equal_p. */
3979 if (TREE_CODE (def) == ADDR_EXPR
3980 && TREE_CODE (sameval) == ADDR_EXPR
3981 && sameval_base != (void *)-1)
3983 if (!sameval_base)
3984 sameval_base = get_addr_base_and_unit_offset
3985 (TREE_OPERAND (sameval, 0), &soff);
3986 if (!sameval_base)
3987 sameval_base = (tree)(void *)-1;
3988 else if ((get_addr_base_and_unit_offset
3989 (TREE_OPERAND (def, 0), &doff) == sameval_base)
3990 && known_eq (soff, doff))
3991 continue;
3993 allsame = false;
3994 break;
3999 /* If none of the edges was executable keep the value-number at VN_TOP,
4000 if only a single edge is exectuable use its value. */
4001 if (n_executable <= 1)
4002 result = seen_undef ? seen_undef : sameval;
4003 /* If we saw only undefined values and VN_TOP use one of the
4004 undefined values. */
4005 else if (sameval == VN_TOP)
4006 result = seen_undef ? seen_undef : sameval;
4007 /* First see if it is equivalent to a phi node in this block. We prefer
4008 this as it allows IV elimination - see PRs 66502 and 67167. */
4009 else if ((result = vn_phi_lookup (phi)))
4011 /* If all values are the same use that, unless we've seen undefined
4012 values as well and the value isn't constant.
4013 CCP/copyprop have the same restriction to not remove uninit warnings. */
4014 else if (allsame
4015 && (! seen_undef || is_gimple_min_invariant (sameval)))
4016 result = sameval;
4017 else
4019 result = PHI_RESULT (phi);
4020 /* Only insert PHIs that are varying, for constant value numbers
4021 we mess up equivalences otherwise as we are only comparing
4022 the immediate controlling predicates. */
4023 vn_phi_insert (phi, result);
4026 return set_ssa_val_to (PHI_RESULT (phi), result);
4029 /* Try to simplify RHS using equivalences and constant folding. */
4031 static tree
4032 try_to_simplify (gassign *stmt)
4034 enum tree_code code = gimple_assign_rhs_code (stmt);
4035 tree tem;
4037 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
4038 in this case, there is no point in doing extra work. */
4039 if (code == SSA_NAME)
4040 return NULL_TREE;
4042 /* First try constant folding based on our current lattice. */
4043 mprts_hook = vn_lookup_simplify_result;
4044 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
4045 mprts_hook = NULL;
4046 if (tem
4047 && (TREE_CODE (tem) == SSA_NAME
4048 || is_gimple_min_invariant (tem)))
4049 return tem;
4051 return NULL_TREE;
4054 /* Visit and value number USE, return true if the value number
4055 changed. */
4057 static bool
4058 visit_use (tree use)
4060 bool changed = false;
4061 gimple *stmt = SSA_NAME_DEF_STMT (use);
4063 mark_use_processed (use);
4065 gcc_assert (!SSA_NAME_IN_FREE_LIST (use)
4066 && !SSA_NAME_IS_DEFAULT_DEF (use));
4068 if (dump_file && (dump_flags & TDF_DETAILS))
4070 fprintf (dump_file, "Value numbering ");
4071 print_generic_expr (dump_file, use);
4072 fprintf (dump_file, " stmt = ");
4073 print_gimple_stmt (dump_file, stmt, 0);
4076 if (gimple_code (stmt) == GIMPLE_PHI)
4077 changed = visit_phi (stmt);
4078 else if (gimple_has_volatile_ops (stmt))
4079 changed = defs_to_varying (stmt);
4080 else if (gassign *ass = dyn_cast <gassign *> (stmt))
4082 enum tree_code code = gimple_assign_rhs_code (ass);
4083 tree lhs = gimple_assign_lhs (ass);
4084 tree rhs1 = gimple_assign_rhs1 (ass);
4085 tree simplified;
4087 /* Shortcut for copies. Simplifying copies is pointless,
4088 since we copy the expression and value they represent. */
4089 if (code == SSA_NAME
4090 && TREE_CODE (lhs) == SSA_NAME)
4092 changed = visit_copy (lhs, rhs1);
4093 goto done;
4095 simplified = try_to_simplify (ass);
4096 if (simplified)
4098 if (dump_file && (dump_flags & TDF_DETAILS))
4100 fprintf (dump_file, "RHS ");
4101 print_gimple_expr (dump_file, ass, 0);
4102 fprintf (dump_file, " simplified to ");
4103 print_generic_expr (dump_file, simplified);
4104 fprintf (dump_file, "\n");
4107 /* Setting value numbers to constants will occasionally
4108 screw up phi congruence because constants are not
4109 uniquely associated with a single ssa name that can be
4110 looked up. */
4111 if (simplified
4112 && is_gimple_min_invariant (simplified)
4113 && TREE_CODE (lhs) == SSA_NAME)
4115 changed = set_ssa_val_to (lhs, simplified);
4116 goto done;
4118 else if (simplified
4119 && TREE_CODE (simplified) == SSA_NAME
4120 && TREE_CODE (lhs) == SSA_NAME)
4122 changed = visit_copy (lhs, simplified);
4123 goto done;
4126 if ((TREE_CODE (lhs) == SSA_NAME
4127 /* We can substitute SSA_NAMEs that are live over
4128 abnormal edges with their constant value. */
4129 && !(gimple_assign_copy_p (ass)
4130 && is_gimple_min_invariant (rhs1))
4131 && !(simplified
4132 && is_gimple_min_invariant (simplified))
4133 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4134 /* Stores or copies from SSA_NAMEs that are live over
4135 abnormal edges are a problem. */
4136 || (code == SSA_NAME
4137 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
4138 changed = defs_to_varying (ass);
4139 else if (REFERENCE_CLASS_P (lhs)
4140 || DECL_P (lhs))
4141 changed = visit_reference_op_store (lhs, rhs1, ass);
4142 else if (TREE_CODE (lhs) == SSA_NAME)
4144 if ((gimple_assign_copy_p (ass)
4145 && is_gimple_min_invariant (rhs1))
4146 || (simplified
4147 && is_gimple_min_invariant (simplified)))
4149 if (simplified)
4150 changed = set_ssa_val_to (lhs, simplified);
4151 else
4152 changed = set_ssa_val_to (lhs, rhs1);
4154 else
4156 /* Visit the original statement. */
4157 switch (vn_get_stmt_kind (ass))
4159 case VN_NARY:
4160 changed = visit_nary_op (lhs, ass);
4161 break;
4162 case VN_REFERENCE:
4163 changed = visit_reference_op_load (lhs, rhs1, ass);
4164 break;
4165 default:
4166 changed = defs_to_varying (ass);
4167 break;
4171 else
4172 changed = defs_to_varying (ass);
4174 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
4176 tree lhs = gimple_call_lhs (call_stmt);
4177 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4179 /* Try constant folding based on our current lattice. */
4180 tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
4181 vn_valueize);
4182 if (simplified)
4184 if (dump_file && (dump_flags & TDF_DETAILS))
4186 fprintf (dump_file, "call ");
4187 print_gimple_expr (dump_file, call_stmt, 0);
4188 fprintf (dump_file, " simplified to ");
4189 print_generic_expr (dump_file, simplified);
4190 fprintf (dump_file, "\n");
4193 /* Setting value numbers to constants will occasionally
4194 screw up phi congruence because constants are not
4195 uniquely associated with a single ssa name that can be
4196 looked up. */
4197 if (simplified
4198 && is_gimple_min_invariant (simplified))
4200 changed = set_ssa_val_to (lhs, simplified);
4201 if (gimple_vdef (call_stmt))
4202 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4203 SSA_VAL (gimple_vuse (call_stmt)));
4204 goto done;
4206 else if (simplified
4207 && TREE_CODE (simplified) == SSA_NAME)
4209 changed = visit_copy (lhs, simplified);
4210 if (gimple_vdef (call_stmt))
4211 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4212 SSA_VAL (gimple_vuse (call_stmt)));
4213 goto done;
4215 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4217 changed = defs_to_varying (call_stmt);
4218 goto done;
4222 /* Pick up flags from a devirtualization target. */
4223 tree fn = gimple_call_fn (stmt);
4224 int extra_fnflags = 0;
4225 if (fn && TREE_CODE (fn) == SSA_NAME)
4227 fn = SSA_VAL (fn);
4228 if (TREE_CODE (fn) == ADDR_EXPR
4229 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
4230 extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0));
4232 if (!gimple_call_internal_p (call_stmt)
4233 && (/* Calls to the same function with the same vuse
4234 and the same operands do not necessarily return the same
4235 value, unless they're pure or const. */
4236 ((gimple_call_flags (call_stmt) | extra_fnflags)
4237 & (ECF_PURE | ECF_CONST))
4238 /* If calls have a vdef, subsequent calls won't have
4239 the same incoming vuse. So, if 2 calls with vdef have the
4240 same vuse, we know they're not subsequent.
4241 We can value number 2 calls to the same function with the
4242 same vuse and the same operands which are not subsequent
4243 the same, because there is no code in the program that can
4244 compare the 2 values... */
4245 || (gimple_vdef (call_stmt)
4246 /* ... unless the call returns a pointer which does
4247 not alias with anything else. In which case the
4248 information that the values are distinct are encoded
4249 in the IL. */
4250 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
4251 /* Only perform the following when being called from PRE
4252 which embeds tail merging. */
4253 && default_vn_walk_kind == VN_WALK)))
4254 changed = visit_reference_op_call (lhs, call_stmt);
4255 else
4256 changed = defs_to_varying (call_stmt);
4258 else
4259 changed = defs_to_varying (stmt);
4260 done:
4261 return changed;
4264 /* Compare two operands by reverse postorder index */
4266 static int
4267 compare_ops (const void *pa, const void *pb)
4269 const tree opa = *((const tree *)pa);
4270 const tree opb = *((const tree *)pb);
4271 gimple *opstmta = SSA_NAME_DEF_STMT (opa);
4272 gimple *opstmtb = SSA_NAME_DEF_STMT (opb);
4273 basic_block bba;
4274 basic_block bbb;
4276 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
4277 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4278 else if (gimple_nop_p (opstmta))
4279 return -1;
4280 else if (gimple_nop_p (opstmtb))
4281 return 1;
4283 bba = gimple_bb (opstmta);
4284 bbb = gimple_bb (opstmtb);
4286 if (!bba && !bbb)
4287 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4288 else if (!bba)
4289 return -1;
4290 else if (!bbb)
4291 return 1;
4293 if (bba == bbb)
4295 if (gimple_code (opstmta) == GIMPLE_PHI
4296 && gimple_code (opstmtb) == GIMPLE_PHI)
4297 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4298 else if (gimple_code (opstmta) == GIMPLE_PHI)
4299 return -1;
4300 else if (gimple_code (opstmtb) == GIMPLE_PHI)
4301 return 1;
4302 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
4303 return gimple_uid (opstmta) - gimple_uid (opstmtb);
4304 else
4305 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4307 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
4310 /* Sort an array containing members of a strongly connected component
4311 SCC so that the members are ordered by RPO number.
4312 This means that when the sort is complete, iterating through the
4313 array will give you the members in RPO order. */
4315 static void
4316 sort_scc (vec<tree> scc)
4318 scc.qsort (compare_ops);
4321 /* Process a strongly connected component in the SSA graph. */
4323 static void
4324 process_scc (vec<tree> scc)
4326 tree var;
4327 unsigned int i;
4328 unsigned int iterations = 0;
4329 bool changed = true;
4331 /* If the SCC has a single member, just visit it. */
4332 if (scc.length () == 1)
4334 tree use = scc[0];
4335 if (VN_INFO (use)->use_processed)
4336 return;
4337 /* We need to make sure it doesn't form a cycle itself, which can
4338 happen for self-referential PHI nodes. In that case we would
4339 end up inserting an expression with VN_TOP operands into the
4340 valid table which makes us derive bogus equivalences later.
4341 The cheapest way to check this is to assume it for all PHI nodes. */
4342 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
4343 /* Fallthru to iteration. */ ;
4344 else
4346 visit_use (use);
4347 return;
4351 if (dump_file && (dump_flags & TDF_DETAILS))
4352 print_scc (dump_file, scc);
4354 /* Iterate over the SCC with the optimistic table until it stops
4355 changing. */
4356 while (changed)
4358 changed = false;
4359 iterations++;
4360 if (dump_file && (dump_flags & TDF_DETAILS))
4361 fprintf (dump_file, "Starting iteration %d\n", iterations);
4362 /* As we are value-numbering optimistically we have to
4363 clear the expression tables and the simplified expressions
4364 in each iteration until we converge. */
4365 void *ob_top = obstack_alloc (&vn_tables_obstack, 0);
4366 vn_reference_t ref_top = last_inserted_ref;
4367 vn_phi_t phi_top = last_inserted_phi;
4368 vn_nary_op_t nary_top = last_inserted_nary;
4369 FOR_EACH_VEC_ELT (scc, i, var)
4370 gcc_assert (!VN_INFO (var)->needs_insertion
4371 && VN_INFO (var)->expr == NULL);
4372 FOR_EACH_VEC_ELT (scc, i, var)
4373 changed |= visit_use (var);
4374 if (changed)
4376 for (; last_inserted_nary != nary_top;
4377 last_inserted_nary = last_inserted_nary->next)
4379 vn_nary_op_t *slot;
4380 slot = valid_info->nary->find_slot_with_hash (last_inserted_nary,
4381 last_inserted_nary->hashcode,
4382 NO_INSERT);
4383 gcc_assert (slot);
4384 valid_info->nary->clear_slot (slot);
4386 for (; last_inserted_phi != phi_top;
4387 last_inserted_phi = last_inserted_phi->next)
4389 vn_phi_t *slot;
4390 slot = valid_info->phis->find_slot_with_hash (last_inserted_phi,
4391 last_inserted_phi->hashcode,
4392 NO_INSERT);
4393 gcc_assert (slot);
4394 valid_info->phis->clear_slot (slot);
4396 for (; last_inserted_ref != ref_top;
4397 last_inserted_ref = last_inserted_ref->next)
4399 vn_reference_t *slot;
4400 slot = valid_info->references->find_slot_with_hash (last_inserted_ref,
4401 last_inserted_ref->hashcode,
4402 NO_INSERT);
4403 gcc_assert (slot);
4404 (*slot)->operands.release ();
4405 valid_info->references->clear_slot (slot);
4407 obstack_free (&vn_tables_obstack, ob_top);
4411 if (dump_file && (dump_flags & TDF_DETAILS))
4412 fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
4413 statistics_histogram_event (cfun, "SCC iterations", iterations);
4417 /* Pop the components of the found SCC for NAME off the SCC stack
4418 and process them. Returns true if all went well, false if
4419 we run into resource limits. */
4421 static void
4422 extract_and_process_scc_for_name (tree name)
4424 auto_vec<tree> scc;
4425 tree x;
4427 /* Found an SCC, pop the components off the SCC stack and
4428 process them. */
4431 x = sccstack.pop ();
4433 VN_INFO (x)->on_sccstack = false;
4434 scc.safe_push (x);
4435 } while (x != name);
4437 /* Drop all defs in the SCC to varying in case a SCC turns out to be
4438 incredibly large.
4439 ??? Just switch to a non-optimistic mode that avoids any iteration. */
4440 if (scc.length () > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
4442 if (dump_file)
4444 print_scc (dump_file, scc);
4445 fprintf (dump_file, "WARNING: Giving up value-numbering SCC due to "
4446 "size %u exceeding %u\n", scc.length (),
4447 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
4449 tree var;
4450 unsigned i;
4451 FOR_EACH_VEC_ELT (scc, i, var)
4453 gimple *def = SSA_NAME_DEF_STMT (var);
4454 mark_use_processed (var);
4455 if (SSA_NAME_IS_DEFAULT_DEF (var)
4456 || gimple_code (def) == GIMPLE_PHI)
4457 set_ssa_val_to (var, var);
4458 else
4459 defs_to_varying (def);
4461 return;
4464 if (scc.length () > 1)
4465 sort_scc (scc);
4467 process_scc (scc);
4470 /* Depth first search on NAME to discover and process SCC's in the SSA
4471 graph.
4472 Execution of this algorithm relies on the fact that the SCC's are
4473 popped off the stack in topological order.
4474 Returns true if successful, false if we stopped processing SCC's due
4475 to resource constraints. */
4477 static void
4478 DFS (tree name)
4480 auto_vec<ssa_op_iter> itervec;
4481 auto_vec<tree> namevec;
4482 use_operand_p usep = NULL;
4483 gimple *defstmt;
4484 tree use;
4485 ssa_op_iter iter;
4487 start_over:
4488 /* SCC info */
4489 VN_INFO (name)->dfsnum = next_dfs_num++;
4490 VN_INFO (name)->visited = true;
4491 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
4493 sccstack.safe_push (name);
4494 VN_INFO (name)->on_sccstack = true;
4495 defstmt = SSA_NAME_DEF_STMT (name);
4497 /* Recursively DFS on our operands, looking for SCC's. */
4498 if (!gimple_nop_p (defstmt))
4500 /* Push a new iterator. */
4501 if (gphi *phi = dyn_cast <gphi *> (defstmt))
4502 usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
4503 else
4504 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
4506 else
4507 clear_and_done_ssa_iter (&iter);
4509 while (1)
4511 /* If we are done processing uses of a name, go up the stack
4512 of iterators and process SCCs as we found them. */
4513 if (op_iter_done (&iter))
4515 /* See if we found an SCC. */
4516 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
4517 extract_and_process_scc_for_name (name);
4519 /* Check if we are done. */
4520 if (namevec.is_empty ())
4521 return;
4523 /* Restore the last use walker and continue walking there. */
4524 use = name;
4525 name = namevec.pop ();
4526 memcpy (&iter, &itervec.last (),
4527 sizeof (ssa_op_iter));
4528 itervec.pop ();
4529 goto continue_walking;
4532 use = USE_FROM_PTR (usep);
4534 /* Since we handle phi nodes, we will sometimes get
4535 invariants in the use expression. */
4536 if (TREE_CODE (use) == SSA_NAME)
4538 if (! (VN_INFO (use)->visited))
4540 /* Recurse by pushing the current use walking state on
4541 the stack and starting over. */
4542 itervec.safe_push (iter);
4543 namevec.safe_push (name);
4544 name = use;
4545 goto start_over;
4547 continue_walking:
4548 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
4549 VN_INFO (use)->low);
4551 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
4552 && VN_INFO (use)->on_sccstack)
4554 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
4555 VN_INFO (name)->low);
4559 usep = op_iter_next_use (&iter);
4563 /* Allocate a value number table. */
4565 static void
4566 allocate_vn_table (vn_tables_t table)
4568 table->phis = new vn_phi_table_type (23);
4569 table->nary = new vn_nary_op_table_type (23);
4570 table->references = new vn_reference_table_type (23);
4573 /* Free a value number table. */
4575 static void
4576 free_vn_table (vn_tables_t table)
4578 /* Walk over elements and release vectors. */
4579 vn_reference_iterator_type hir;
4580 vn_reference_t vr;
4581 FOR_EACH_HASH_TABLE_ELEMENT (*table->references, vr, vn_reference_t, hir)
4582 vr->operands.release ();
4583 delete table->phis;
4584 table->phis = NULL;
4585 delete table->nary;
4586 table->nary = NULL;
4587 delete table->references;
4588 table->references = NULL;
4591 static void
4592 init_scc_vn (void)
4594 int j;
4595 int *rpo_numbers_temp;
4597 calculate_dominance_info (CDI_DOMINATORS);
4598 mark_dfs_back_edges ();
4600 sccstack.create (0);
4601 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
4603 constant_value_ids = BITMAP_ALLOC (NULL);
4605 next_dfs_num = 1;
4606 next_value_id = 1;
4608 vn_ssa_aux_table.create (num_ssa_names + 1);
4609 /* VEC_alloc doesn't actually grow it to the right size, it just
4610 preallocates the space to do so. */
4611 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
4612 gcc_obstack_init (&vn_ssa_aux_obstack);
4614 shared_lookup_references.create (0);
4615 rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
4616 rpo_numbers_temp =
4617 XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
4618 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4620 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4621 the i'th block in RPO order is bb. We want to map bb's to RPO
4622 numbers, so we need to rearrange this array. */
4623 for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
4624 rpo_numbers[rpo_numbers_temp[j]] = j;
4626 XDELETE (rpo_numbers_temp);
4628 VN_TOP = build_decl (UNKNOWN_LOCATION, VAR_DECL,
4629 get_identifier ("VN_TOP"), error_mark_node);
4631 renumber_gimple_stmt_uids ();
4633 /* Create the valid and optimistic value numbering tables. */
4634 gcc_obstack_init (&vn_tables_obstack);
4635 gcc_obstack_init (&vn_tables_insert_obstack);
4636 valid_info = XCNEW (struct vn_tables_s);
4637 allocate_vn_table (valid_info);
4638 last_inserted_ref = NULL;
4639 last_inserted_phi = NULL;
4640 last_inserted_nary = NULL;
4642 /* Create the VN_INFO structures, and initialize value numbers to
4643 TOP or VARYING for parameters. */
4644 size_t i;
4645 tree name;
4647 FOR_EACH_SSA_NAME (i, name, cfun)
4649 VN_INFO_GET (name)->valnum = VN_TOP;
4650 VN_INFO (name)->needs_insertion = false;
4651 VN_INFO (name)->expr = NULL;
4652 VN_INFO (name)->value_id = 0;
4654 if (!SSA_NAME_IS_DEFAULT_DEF (name))
4655 continue;
4657 switch (TREE_CODE (SSA_NAME_VAR (name)))
4659 case VAR_DECL:
4660 /* All undefined vars are VARYING. */
4661 VN_INFO (name)->valnum = name;
4662 VN_INFO (name)->visited = true;
4663 break;
4665 case PARM_DECL:
4666 /* Parameters are VARYING but we can record a condition
4667 if we know it is a non-NULL pointer. */
4668 VN_INFO (name)->visited = true;
4669 VN_INFO (name)->valnum = name;
4670 if (POINTER_TYPE_P (TREE_TYPE (name))
4671 && nonnull_arg_p (SSA_NAME_VAR (name)))
4673 tree ops[2];
4674 ops[0] = name;
4675 ops[1] = build_int_cst (TREE_TYPE (name), 0);
4676 vn_nary_op_insert_pieces (2, NE_EXPR, boolean_type_node, ops,
4677 boolean_true_node, 0);
4678 if (dump_file && (dump_flags & TDF_DETAILS))
4680 fprintf (dump_file, "Recording ");
4681 print_generic_expr (dump_file, name, TDF_SLIM);
4682 fprintf (dump_file, " != 0\n");
4685 break;
4687 case RESULT_DECL:
4688 /* If the result is passed by invisible reference the default
4689 def is initialized, otherwise it's uninitialized. Still
4690 undefined is varying. */
4691 VN_INFO (name)->visited = true;
4692 VN_INFO (name)->valnum = name;
4693 break;
4695 default:
4696 gcc_unreachable ();
4701 /* Restore SSA info that has been reset on value leaders. */
4703 void
4704 scc_vn_restore_ssa_info (void)
4706 unsigned i;
4707 tree name;
4709 FOR_EACH_SSA_NAME (i, name, cfun)
4711 if (has_VN_INFO (name))
4713 if (VN_INFO (name)->needs_insertion)
4715 else if (POINTER_TYPE_P (TREE_TYPE (name))
4716 && VN_INFO (name)->info.ptr_info)
4717 SSA_NAME_PTR_INFO (name) = VN_INFO (name)->info.ptr_info;
4718 else if (INTEGRAL_TYPE_P (TREE_TYPE (name))
4719 && VN_INFO (name)->info.range_info)
4721 SSA_NAME_RANGE_INFO (name) = VN_INFO (name)->info.range_info;
4722 SSA_NAME_ANTI_RANGE_P (name)
4723 = VN_INFO (name)->range_info_anti_range_p;
4729 void
4730 free_scc_vn (void)
4732 size_t i;
4733 tree name;
4735 delete constant_to_value_id;
4736 constant_to_value_id = NULL;
4737 BITMAP_FREE (constant_value_ids);
4738 shared_lookup_references.release ();
4739 XDELETEVEC (rpo_numbers);
4741 FOR_EACH_SSA_NAME (i, name, cfun)
4743 if (has_VN_INFO (name)
4744 && VN_INFO (name)->needs_insertion)
4745 release_ssa_name (name);
4747 obstack_free (&vn_ssa_aux_obstack, NULL);
4748 vn_ssa_aux_table.release ();
4750 sccstack.release ();
4751 free_vn_table (valid_info);
4752 XDELETE (valid_info);
4753 obstack_free (&vn_tables_obstack, NULL);
4754 obstack_free (&vn_tables_insert_obstack, NULL);
4756 BITMAP_FREE (const_parms);
4759 /* Set *ID according to RESULT. */
4761 static void
4762 set_value_id_for_result (tree result, unsigned int *id)
4764 if (result && TREE_CODE (result) == SSA_NAME)
4765 *id = VN_INFO (result)->value_id;
4766 else if (result && is_gimple_min_invariant (result))
4767 *id = get_or_alloc_constant_value_id (result);
4768 else
4769 *id = get_next_value_id ();
4772 /* Set the value ids in the valid hash tables. */
4774 static void
4775 set_hashtable_value_ids (void)
4777 vn_nary_op_iterator_type hin;
4778 vn_phi_iterator_type hip;
4779 vn_reference_iterator_type hir;
4780 vn_nary_op_t vno;
4781 vn_reference_t vr;
4782 vn_phi_t vp;
4784 /* Now set the value ids of the things we had put in the hash
4785 table. */
4787 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4788 set_value_id_for_result (vno->result, &vno->value_id);
4790 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4791 set_value_id_for_result (vp->result, &vp->value_id);
4793 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4794 hir)
4795 set_value_id_for_result (vr->result, &vr->value_id);
4798 class sccvn_dom_walker : public dom_walker
4800 public:
4801 sccvn_dom_walker ()
4802 : dom_walker (CDI_DOMINATORS, REACHABLE_BLOCKS), cond_stack (0) {}
4804 virtual edge before_dom_children (basic_block);
4805 virtual void after_dom_children (basic_block);
4807 void record_cond (basic_block,
4808 enum tree_code code, tree lhs, tree rhs, bool value);
4809 void record_conds (basic_block,
4810 enum tree_code code, tree lhs, tree rhs, bool value);
4812 auto_vec<std::pair <basic_block, std::pair <vn_nary_op_t, vn_nary_op_t> > >
4813 cond_stack;
4816 /* Record a temporary condition for the BB and its dominated blocks. */
4818 void
4819 sccvn_dom_walker::record_cond (basic_block bb,
4820 enum tree_code code, tree lhs, tree rhs,
4821 bool value)
4823 tree ops[2] = { lhs, rhs };
4824 vn_nary_op_t old = NULL;
4825 if (vn_nary_op_lookup_pieces (2, code, boolean_type_node, ops, &old))
4826 valid_info->nary->remove_elt_with_hash (old, old->hashcode);
4827 vn_nary_op_t cond
4828 = vn_nary_op_insert_pieces (2, code, boolean_type_node, ops,
4829 value
4830 ? boolean_true_node
4831 : boolean_false_node, 0);
4832 if (dump_file && (dump_flags & TDF_DETAILS))
4834 fprintf (dump_file, "Recording temporarily ");
4835 print_generic_expr (dump_file, ops[0], TDF_SLIM);
4836 fprintf (dump_file, " %s ", get_tree_code_name (code));
4837 print_generic_expr (dump_file, ops[1], TDF_SLIM);
4838 fprintf (dump_file, " == %s%s\n",
4839 value ? "true" : "false",
4840 old ? " (old entry saved)" : "");
4842 cond_stack.safe_push (std::make_pair (bb, std::make_pair (cond, old)));
4845 /* Record temporary conditions for the BB and its dominated blocks
4846 according to LHS CODE RHS == VALUE and its dominated conditions. */
4848 void
4849 sccvn_dom_walker::record_conds (basic_block bb,
4850 enum tree_code code, tree lhs, tree rhs,
4851 bool value)
4853 /* Record the original condition. */
4854 record_cond (bb, code, lhs, rhs, value);
4856 if (!value)
4857 return;
4859 /* Record dominated conditions if the condition is true. Note that
4860 the inversion is already recorded. */
4861 switch (code)
4863 case LT_EXPR:
4864 case GT_EXPR:
4865 record_cond (bb, code == LT_EXPR ? LE_EXPR : GE_EXPR, lhs, rhs, true);
4866 record_cond (bb, NE_EXPR, lhs, rhs, true);
4867 record_cond (bb, EQ_EXPR, lhs, rhs, false);
4868 break;
4870 case EQ_EXPR:
4871 record_cond (bb, LE_EXPR, lhs, rhs, true);
4872 record_cond (bb, GE_EXPR, lhs, rhs, true);
4873 record_cond (bb, LT_EXPR, lhs, rhs, false);
4874 record_cond (bb, GT_EXPR, lhs, rhs, false);
4875 break;
4877 default:
4878 break;
4882 /* Restore expressions and values derived from conditionals. */
4884 void
4885 sccvn_dom_walker::after_dom_children (basic_block bb)
4887 while (!cond_stack.is_empty ()
4888 && cond_stack.last ().first == bb)
4890 vn_nary_op_t cond = cond_stack.last ().second.first;
4891 vn_nary_op_t old = cond_stack.last ().second.second;
4892 valid_info->nary->remove_elt_with_hash (cond, cond->hashcode);
4893 if (old)
4894 vn_nary_op_insert_into (old, valid_info->nary, false);
4895 cond_stack.pop ();
4899 /* Value number all statements in BB. */
4901 edge
4902 sccvn_dom_walker::before_dom_children (basic_block bb)
4904 if (dump_file && (dump_flags & TDF_DETAILS))
4905 fprintf (dump_file, "Visiting BB %d\n", bb->index);
4907 /* If we have a single predecessor record the equivalence from a
4908 possible condition on the predecessor edge. */
4909 edge pred_e = single_pred_edge_ignoring_loop_edges (bb, false);
4910 if (pred_e)
4912 /* Check if there are multiple executable successor edges in
4913 the source block. Otherwise there is no additional info
4914 to be recorded. */
4915 edge_iterator ei;
4916 edge e2;
4917 FOR_EACH_EDGE (e2, ei, pred_e->src->succs)
4918 if (e2 != pred_e
4919 && e2->flags & EDGE_EXECUTABLE)
4920 break;
4921 if (e2 && (e2->flags & EDGE_EXECUTABLE))
4923 gimple *stmt = last_stmt (pred_e->src);
4924 if (stmt
4925 && gimple_code (stmt) == GIMPLE_COND)
4927 enum tree_code code = gimple_cond_code (stmt);
4928 tree lhs = gimple_cond_lhs (stmt);
4929 tree rhs = gimple_cond_rhs (stmt);
4930 record_conds (bb, code, lhs, rhs,
4931 (pred_e->flags & EDGE_TRUE_VALUE) != 0);
4932 code = invert_tree_comparison (code, HONOR_NANS (lhs));
4933 if (code != ERROR_MARK)
4934 record_conds (bb, code, lhs, rhs,
4935 (pred_e->flags & EDGE_TRUE_VALUE) == 0);
4940 /* Value-number all defs in the basic-block. */
4941 for (gphi_iterator gsi = gsi_start_phis (bb);
4942 !gsi_end_p (gsi); gsi_next (&gsi))
4944 gphi *phi = gsi.phi ();
4945 tree res = PHI_RESULT (phi);
4946 if (!VN_INFO (res)->visited)
4947 DFS (res);
4949 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
4950 !gsi_end_p (gsi); gsi_next (&gsi))
4952 ssa_op_iter i;
4953 tree op;
4954 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
4955 if (!VN_INFO (op)->visited)
4956 DFS (op);
4959 /* Finally look at the last stmt. */
4960 gimple *stmt = last_stmt (bb);
4961 if (!stmt)
4962 return NULL;
4964 enum gimple_code code = gimple_code (stmt);
4965 if (code != GIMPLE_COND
4966 && code != GIMPLE_SWITCH
4967 && code != GIMPLE_GOTO)
4968 return NULL;
4970 if (dump_file && (dump_flags & TDF_DETAILS))
4972 fprintf (dump_file, "Visiting control stmt ending BB %d: ", bb->index);
4973 print_gimple_stmt (dump_file, stmt, 0);
4976 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
4977 if value-numbering can prove they are not reachable. Handling
4978 computed gotos is also possible. */
4979 tree val;
4980 switch (code)
4982 case GIMPLE_COND:
4984 tree lhs = vn_valueize (gimple_cond_lhs (stmt));
4985 tree rhs = vn_valueize (gimple_cond_rhs (stmt));
4986 val = gimple_simplify (gimple_cond_code (stmt),
4987 boolean_type_node, lhs, rhs,
4988 NULL, vn_valueize);
4989 /* If that didn't simplify to a constant see if we have recorded
4990 temporary expressions from taken edges. */
4991 if (!val || TREE_CODE (val) != INTEGER_CST)
4993 tree ops[2];
4994 ops[0] = lhs;
4995 ops[1] = rhs;
4996 val = vn_nary_op_lookup_pieces (2, gimple_cond_code (stmt),
4997 boolean_type_node, ops, NULL);
4999 break;
5001 case GIMPLE_SWITCH:
5002 val = gimple_switch_index (as_a <gswitch *> (stmt));
5003 break;
5004 case GIMPLE_GOTO:
5005 val = gimple_goto_dest (stmt);
5006 break;
5007 default:
5008 gcc_unreachable ();
5010 if (!val)
5011 return NULL;
5013 edge taken = find_taken_edge (bb, vn_valueize (val));
5014 if (!taken)
5015 return NULL;
5017 if (dump_file && (dump_flags & TDF_DETAILS))
5018 fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
5019 "not executable\n", bb->index, bb->index, taken->dest->index);
5021 return taken;
5024 /* Do SCCVN. Returns true if it finished, false if we bailed out
5025 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
5026 how we use the alias oracle walking during the VN process. */
5028 void
5029 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
5031 size_t i;
5033 default_vn_walk_kind = default_vn_walk_kind_;
5035 init_scc_vn ();
5037 /* Collect pointers we know point to readonly memory. */
5038 const_parms = BITMAP_ALLOC (NULL);
5039 tree fnspec = lookup_attribute ("fn spec",
5040 TYPE_ATTRIBUTES (TREE_TYPE (cfun->decl)));
5041 if (fnspec)
5043 fnspec = TREE_VALUE (TREE_VALUE (fnspec));
5044 i = 1;
5045 for (tree arg = DECL_ARGUMENTS (cfun->decl);
5046 arg; arg = DECL_CHAIN (arg), ++i)
5048 if (i >= (unsigned) TREE_STRING_LENGTH (fnspec))
5049 break;
5050 if (TREE_STRING_POINTER (fnspec)[i] == 'R'
5051 || TREE_STRING_POINTER (fnspec)[i] == 'r')
5053 tree name = ssa_default_def (cfun, arg);
5054 if (name)
5055 bitmap_set_bit (const_parms, SSA_NAME_VERSION (name));
5060 /* Walk all blocks in dominator order, value-numbering stmts
5061 SSA defs and decide whether outgoing edges are not executable. */
5062 sccvn_dom_walker walker;
5063 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5065 /* Initialize the value ids and prune out remaining VN_TOPs
5066 from dead code. */
5067 tree name;
5068 FOR_EACH_SSA_NAME (i, name, cfun)
5070 vn_ssa_aux_t info = VN_INFO (name);
5071 if (!info->visited
5072 || info->valnum == VN_TOP)
5073 info->valnum = name;
5074 if (info->valnum == name)
5075 info->value_id = get_next_value_id ();
5076 else if (is_gimple_min_invariant (info->valnum))
5077 info->value_id = get_or_alloc_constant_value_id (info->valnum);
5080 /* Propagate. */
5081 FOR_EACH_SSA_NAME (i, name, cfun)
5083 vn_ssa_aux_t info = VN_INFO (name);
5084 if (TREE_CODE (info->valnum) == SSA_NAME
5085 && info->valnum != name
5086 && info->value_id != VN_INFO (info->valnum)->value_id)
5087 info->value_id = VN_INFO (info->valnum)->value_id;
5090 set_hashtable_value_ids ();
5092 if (dump_file && (dump_flags & TDF_DETAILS))
5094 fprintf (dump_file, "Value numbers:\n");
5095 FOR_EACH_SSA_NAME (i, name, cfun)
5097 if (VN_INFO (name)->visited
5098 && SSA_VAL (name) != name)
5100 print_generic_expr (dump_file, name);
5101 fprintf (dump_file, " = ");
5102 print_generic_expr (dump_file, SSA_VAL (name));
5103 fprintf (dump_file, "\n");
5109 /* Return the maximum value id we have ever seen. */
5111 unsigned int
5112 get_max_value_id (void)
5114 return next_value_id;
5117 /* Return the next unique value id. */
5119 unsigned int
5120 get_next_value_id (void)
5122 return next_value_id++;
5126 /* Compare two expressions E1 and E2 and return true if they are equal. */
5128 bool
5129 expressions_equal_p (tree e1, tree e2)
5131 /* The obvious case. */
5132 if (e1 == e2)
5133 return true;
5135 /* If either one is VN_TOP consider them equal. */
5136 if (e1 == VN_TOP || e2 == VN_TOP)
5137 return true;
5139 /* If only one of them is null, they cannot be equal. */
5140 if (!e1 || !e2)
5141 return false;
5143 /* Now perform the actual comparison. */
5144 if (TREE_CODE (e1) == TREE_CODE (e2)
5145 && operand_equal_p (e1, e2, OEP_PURE_SAME))
5146 return true;
5148 return false;
5152 /* Return true if the nary operation NARY may trap. This is a copy
5153 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
5155 bool
5156 vn_nary_may_trap (vn_nary_op_t nary)
5158 tree type;
5159 tree rhs2 = NULL_TREE;
5160 bool honor_nans = false;
5161 bool honor_snans = false;
5162 bool fp_operation = false;
5163 bool honor_trapv = false;
5164 bool handled, ret;
5165 unsigned i;
5167 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
5168 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
5169 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
5171 type = nary->type;
5172 fp_operation = FLOAT_TYPE_P (type);
5173 if (fp_operation)
5175 honor_nans = flag_trapping_math && !flag_finite_math_only;
5176 honor_snans = flag_signaling_nans != 0;
5178 else if (INTEGRAL_TYPE_P (type)
5179 && TYPE_OVERFLOW_TRAPS (type))
5180 honor_trapv = true;
5182 if (nary->length >= 2)
5183 rhs2 = nary->op[1];
5184 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
5185 honor_trapv,
5186 honor_nans, honor_snans, rhs2,
5187 &handled);
5188 if (handled
5189 && ret)
5190 return true;
5192 for (i = 0; i < nary->length; ++i)
5193 if (tree_could_trap_p (nary->op[i]))
5194 return true;
5196 return false;
5200 class eliminate_dom_walker : public dom_walker
5202 public:
5203 eliminate_dom_walker (cdi_direction, bitmap);
5204 ~eliminate_dom_walker ();
5206 virtual edge before_dom_children (basic_block);
5207 virtual void after_dom_children (basic_block);
5209 tree eliminate_avail (tree op);
5210 void eliminate_push_avail (tree op);
5211 tree eliminate_insert (gimple_stmt_iterator *gsi, tree val);
5213 bool do_pre;
5214 unsigned int el_todo;
5215 unsigned int eliminations;
5216 unsigned int insertions;
5218 /* SSA names that had their defs inserted by PRE if do_pre. */
5219 bitmap inserted_exprs;
5221 /* Blocks with statements that have had their EH properties changed. */
5222 bitmap need_eh_cleanup;
5224 /* Blocks with statements that have had their AB properties changed. */
5225 bitmap need_ab_cleanup;
5227 auto_vec<gimple *> to_remove;
5228 auto_vec<gimple *> to_fixup;
5229 auto_vec<tree> avail;
5230 auto_vec<tree> avail_stack;
5233 eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction,
5234 bitmap inserted_exprs_)
5235 : dom_walker (direction), do_pre (inserted_exprs_ != NULL),
5236 el_todo (0), eliminations (0), insertions (0),
5237 inserted_exprs (inserted_exprs_)
5239 need_eh_cleanup = BITMAP_ALLOC (NULL);
5240 need_ab_cleanup = BITMAP_ALLOC (NULL);
5243 eliminate_dom_walker::~eliminate_dom_walker ()
5245 BITMAP_FREE (need_eh_cleanup);
5246 BITMAP_FREE (need_ab_cleanup);
5249 /* Return a leader for OP that is available at the current point of the
5250 eliminate domwalk. */
5252 tree
5253 eliminate_dom_walker::eliminate_avail (tree op)
5255 tree valnum = VN_INFO (op)->valnum;
5256 if (TREE_CODE (valnum) == SSA_NAME)
5258 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
5259 return valnum;
5260 if (avail.length () > SSA_NAME_VERSION (valnum))
5261 return avail[SSA_NAME_VERSION (valnum)];
5263 else if (is_gimple_min_invariant (valnum))
5264 return valnum;
5265 return NULL_TREE;
5268 /* At the current point of the eliminate domwalk make OP available. */
5270 void
5271 eliminate_dom_walker::eliminate_push_avail (tree op)
5273 tree valnum = VN_INFO (op)->valnum;
5274 if (TREE_CODE (valnum) == SSA_NAME)
5276 if (avail.length () <= SSA_NAME_VERSION (valnum))
5277 avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1);
5278 tree pushop = op;
5279 if (avail[SSA_NAME_VERSION (valnum)])
5280 pushop = avail[SSA_NAME_VERSION (valnum)];
5281 avail_stack.safe_push (pushop);
5282 avail[SSA_NAME_VERSION (valnum)] = op;
5286 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
5287 the leader for the expression if insertion was successful. */
5289 tree
5290 eliminate_dom_walker::eliminate_insert (gimple_stmt_iterator *gsi, tree val)
5292 /* We can insert a sequence with a single assignment only. */
5293 gimple_seq stmts = VN_INFO (val)->expr;
5294 if (!gimple_seq_singleton_p (stmts))
5295 return NULL_TREE;
5296 gassign *stmt = dyn_cast <gassign *> (gimple_seq_first_stmt (stmts));
5297 if (!stmt
5298 || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
5299 && gimple_assign_rhs_code (stmt) != VIEW_CONVERT_EXPR
5300 && gimple_assign_rhs_code (stmt) != BIT_FIELD_REF
5301 && (gimple_assign_rhs_code (stmt) != BIT_AND_EXPR
5302 || TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)))
5303 return NULL_TREE;
5305 tree op = gimple_assign_rhs1 (stmt);
5306 if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR
5307 || gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5308 op = TREE_OPERAND (op, 0);
5309 tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (op) : op;
5310 if (!leader)
5311 return NULL_TREE;
5313 tree res;
5314 stmts = NULL;
5315 if (gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5316 res = gimple_build (&stmts, BIT_FIELD_REF,
5317 TREE_TYPE (val), leader,
5318 TREE_OPERAND (gimple_assign_rhs1 (stmt), 1),
5319 TREE_OPERAND (gimple_assign_rhs1 (stmt), 2));
5320 else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
5321 res = gimple_build (&stmts, BIT_AND_EXPR,
5322 TREE_TYPE (val), leader, gimple_assign_rhs2 (stmt));
5323 else
5324 res = gimple_build (&stmts, gimple_assign_rhs_code (stmt),
5325 TREE_TYPE (val), leader);
5326 if (TREE_CODE (res) != SSA_NAME
5327 || SSA_NAME_IS_DEFAULT_DEF (res)
5328 || gimple_bb (SSA_NAME_DEF_STMT (res)))
5330 gimple_seq_discard (stmts);
5332 /* During propagation we have to treat SSA info conservatively
5333 and thus we can end up simplifying the inserted expression
5334 at elimination time to sth not defined in stmts. */
5335 /* But then this is a redundancy we failed to detect. Which means
5336 res now has two values. That doesn't play well with how
5337 we track availability here, so give up. */
5338 if (dump_file && (dump_flags & TDF_DETAILS))
5340 if (TREE_CODE (res) == SSA_NAME)
5341 res = eliminate_avail (res);
5342 if (res)
5344 fprintf (dump_file, "Failed to insert expression for value ");
5345 print_generic_expr (dump_file, val);
5346 fprintf (dump_file, " which is really fully redundant to ");
5347 print_generic_expr (dump_file, res);
5348 fprintf (dump_file, "\n");
5352 return NULL_TREE;
5354 else
5356 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
5357 VN_INFO_GET (res)->valnum = val;
5360 insertions++;
5361 if (dump_file && (dump_flags & TDF_DETAILS))
5363 fprintf (dump_file, "Inserted ");
5364 print_gimple_stmt (dump_file, SSA_NAME_DEF_STMT (res), 0);
5367 return res;
5372 /* Perform elimination for the basic-block B during the domwalk. */
5374 edge
5375 eliminate_dom_walker::before_dom_children (basic_block b)
5377 /* Mark new bb. */
5378 avail_stack.safe_push (NULL_TREE);
5380 /* Skip unreachable blocks marked unreachable during the SCCVN domwalk. */
5381 edge_iterator ei;
5382 edge e;
5383 FOR_EACH_EDGE (e, ei, b->preds)
5384 if (e->flags & EDGE_EXECUTABLE)
5385 break;
5386 if (! e)
5387 return NULL;
5389 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
5391 gphi *phi = gsi.phi ();
5392 tree res = PHI_RESULT (phi);
5394 if (virtual_operand_p (res))
5396 gsi_next (&gsi);
5397 continue;
5400 tree sprime = eliminate_avail (res);
5401 if (sprime
5402 && sprime != res)
5404 if (dump_file && (dump_flags & TDF_DETAILS))
5406 fprintf (dump_file, "Replaced redundant PHI node defining ");
5407 print_generic_expr (dump_file, res);
5408 fprintf (dump_file, " with ");
5409 print_generic_expr (dump_file, sprime);
5410 fprintf (dump_file, "\n");
5413 /* If we inserted this PHI node ourself, it's not an elimination. */
5414 if (! inserted_exprs
5415 || ! bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
5416 eliminations++;
5418 /* If we will propagate into all uses don't bother to do
5419 anything. */
5420 if (may_propagate_copy (res, sprime))
5422 /* Mark the PHI for removal. */
5423 to_remove.safe_push (phi);
5424 gsi_next (&gsi);
5425 continue;
5428 remove_phi_node (&gsi, false);
5430 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
5431 sprime = fold_convert (TREE_TYPE (res), sprime);
5432 gimple *stmt = gimple_build_assign (res, sprime);
5433 gimple_stmt_iterator gsi2 = gsi_after_labels (b);
5434 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
5435 continue;
5438 eliminate_push_avail (res);
5439 gsi_next (&gsi);
5442 for (gimple_stmt_iterator gsi = gsi_start_bb (b);
5443 !gsi_end_p (gsi);
5444 gsi_next (&gsi))
5446 tree sprime = NULL_TREE;
5447 gimple *stmt = gsi_stmt (gsi);
5448 tree lhs = gimple_get_lhs (stmt);
5449 if (lhs && TREE_CODE (lhs) == SSA_NAME
5450 && !gimple_has_volatile_ops (stmt)
5451 /* See PR43491. Do not replace a global register variable when
5452 it is a the RHS of an assignment. Do replace local register
5453 variables since gcc does not guarantee a local variable will
5454 be allocated in register.
5455 ??? The fix isn't effective here. This should instead
5456 be ensured by not value-numbering them the same but treating
5457 them like volatiles? */
5458 && !(gimple_assign_single_p (stmt)
5459 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
5460 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))
5461 && is_global_var (gimple_assign_rhs1 (stmt)))))
5463 sprime = eliminate_avail (lhs);
5464 if (!sprime)
5466 /* If there is no existing usable leader but SCCVN thinks
5467 it has an expression it wants to use as replacement,
5468 insert that. */
5469 tree val = VN_INFO (lhs)->valnum;
5470 if (val != VN_TOP
5471 && TREE_CODE (val) == SSA_NAME
5472 && VN_INFO (val)->needs_insertion
5473 && VN_INFO (val)->expr != NULL
5474 && (sprime = eliminate_insert (&gsi, val)) != NULL_TREE)
5475 eliminate_push_avail (sprime);
5478 /* If this now constitutes a copy duplicate points-to
5479 and range info appropriately. This is especially
5480 important for inserted code. See tree-ssa-copy.c
5481 for similar code. */
5482 if (sprime
5483 && TREE_CODE (sprime) == SSA_NAME)
5485 basic_block sprime_b = gimple_bb (SSA_NAME_DEF_STMT (sprime));
5486 if (POINTER_TYPE_P (TREE_TYPE (lhs))
5487 && VN_INFO_PTR_INFO (lhs)
5488 && ! VN_INFO_PTR_INFO (sprime))
5490 duplicate_ssa_name_ptr_info (sprime,
5491 VN_INFO_PTR_INFO (lhs));
5492 if (b != sprime_b)
5493 mark_ptr_info_alignment_unknown
5494 (SSA_NAME_PTR_INFO (sprime));
5496 else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
5497 && VN_INFO_RANGE_INFO (lhs)
5498 && ! VN_INFO_RANGE_INFO (sprime)
5499 && b == sprime_b)
5500 duplicate_ssa_name_range_info (sprime,
5501 VN_INFO_RANGE_TYPE (lhs),
5502 VN_INFO_RANGE_INFO (lhs));
5505 /* Inhibit the use of an inserted PHI on a loop header when
5506 the address of the memory reference is a simple induction
5507 variable. In other cases the vectorizer won't do anything
5508 anyway (either it's loop invariant or a complicated
5509 expression). */
5510 if (sprime
5511 && TREE_CODE (sprime) == SSA_NAME
5512 && do_pre
5513 && (flag_tree_loop_vectorize || flag_tree_parallelize_loops > 1)
5514 && loop_outer (b->loop_father)
5515 && has_zero_uses (sprime)
5516 && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
5517 && gimple_assign_load_p (stmt))
5519 gimple *def_stmt = SSA_NAME_DEF_STMT (sprime);
5520 basic_block def_bb = gimple_bb (def_stmt);
5521 if (gimple_code (def_stmt) == GIMPLE_PHI
5522 && def_bb->loop_father->header == def_bb)
5524 loop_p loop = def_bb->loop_father;
5525 ssa_op_iter iter;
5526 tree op;
5527 bool found = false;
5528 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5530 affine_iv iv;
5531 def_bb = gimple_bb (SSA_NAME_DEF_STMT (op));
5532 if (def_bb
5533 && flow_bb_inside_loop_p (loop, def_bb)
5534 && simple_iv (loop, loop, op, &iv, true))
5536 found = true;
5537 break;
5540 if (found)
5542 if (dump_file && (dump_flags & TDF_DETAILS))
5544 fprintf (dump_file, "Not replacing ");
5545 print_gimple_expr (dump_file, stmt, 0);
5546 fprintf (dump_file, " with ");
5547 print_generic_expr (dump_file, sprime);
5548 fprintf (dump_file, " which would add a loop"
5549 " carried dependence to loop %d\n",
5550 loop->num);
5552 /* Don't keep sprime available. */
5553 sprime = NULL_TREE;
5558 if (sprime)
5560 /* If we can propagate the value computed for LHS into
5561 all uses don't bother doing anything with this stmt. */
5562 if (may_propagate_copy (lhs, sprime))
5564 /* Mark it for removal. */
5565 to_remove.safe_push (stmt);
5567 /* ??? Don't count copy/constant propagations. */
5568 if (gimple_assign_single_p (stmt)
5569 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5570 || gimple_assign_rhs1 (stmt) == sprime))
5571 continue;
5573 if (dump_file && (dump_flags & TDF_DETAILS))
5575 fprintf (dump_file, "Replaced ");
5576 print_gimple_expr (dump_file, stmt, 0);
5577 fprintf (dump_file, " with ");
5578 print_generic_expr (dump_file, sprime);
5579 fprintf (dump_file, " in all uses of ");
5580 print_gimple_stmt (dump_file, stmt, 0);
5583 eliminations++;
5584 continue;
5587 /* If this is an assignment from our leader (which
5588 happens in the case the value-number is a constant)
5589 then there is nothing to do. */
5590 if (gimple_assign_single_p (stmt)
5591 && sprime == gimple_assign_rhs1 (stmt))
5592 continue;
5594 /* Else replace its RHS. */
5595 bool can_make_abnormal_goto
5596 = is_gimple_call (stmt)
5597 && stmt_can_make_abnormal_goto (stmt);
5599 if (dump_file && (dump_flags & TDF_DETAILS))
5601 fprintf (dump_file, "Replaced ");
5602 print_gimple_expr (dump_file, stmt, 0);
5603 fprintf (dump_file, " with ");
5604 print_generic_expr (dump_file, sprime);
5605 fprintf (dump_file, " in ");
5606 print_gimple_stmt (dump_file, stmt, 0);
5609 eliminations++;
5610 gimple *orig_stmt = stmt;
5611 if (!useless_type_conversion_p (TREE_TYPE (lhs),
5612 TREE_TYPE (sprime)))
5613 sprime = fold_convert (TREE_TYPE (lhs), sprime);
5614 tree vdef = gimple_vdef (stmt);
5615 tree vuse = gimple_vuse (stmt);
5616 propagate_tree_value_into_stmt (&gsi, sprime);
5617 stmt = gsi_stmt (gsi);
5618 update_stmt (stmt);
5619 if (vdef != gimple_vdef (stmt))
5620 VN_INFO (vdef)->valnum = vuse;
5622 /* If we removed EH side-effects from the statement, clean
5623 its EH information. */
5624 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
5626 bitmap_set_bit (need_eh_cleanup,
5627 gimple_bb (stmt)->index);
5628 if (dump_file && (dump_flags & TDF_DETAILS))
5629 fprintf (dump_file, " Removed EH side-effects.\n");
5632 /* Likewise for AB side-effects. */
5633 if (can_make_abnormal_goto
5634 && !stmt_can_make_abnormal_goto (stmt))
5636 bitmap_set_bit (need_ab_cleanup,
5637 gimple_bb (stmt)->index);
5638 if (dump_file && (dump_flags & TDF_DETAILS))
5639 fprintf (dump_file, " Removed AB side-effects.\n");
5642 continue;
5646 /* If the statement is a scalar store, see if the expression
5647 has the same value number as its rhs. If so, the store is
5648 dead. */
5649 if (gimple_assign_single_p (stmt)
5650 && !gimple_has_volatile_ops (stmt)
5651 && !is_gimple_reg (gimple_assign_lhs (stmt))
5652 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5653 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
5655 tree val;
5656 tree rhs = gimple_assign_rhs1 (stmt);
5657 vn_reference_t vnresult;
5658 val = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_WALKREWRITE,
5659 &vnresult, false);
5660 if (TREE_CODE (rhs) == SSA_NAME)
5661 rhs = VN_INFO (rhs)->valnum;
5662 if (val
5663 && operand_equal_p (val, rhs, 0))
5665 /* We can only remove the later store if the former aliases
5666 at least all accesses the later one does or if the store
5667 was to readonly memory storing the same value. */
5668 alias_set_type set = get_alias_set (lhs);
5669 if (! vnresult
5670 || vnresult->set == set
5671 || alias_set_subset_of (set, vnresult->set))
5673 if (dump_file && (dump_flags & TDF_DETAILS))
5675 fprintf (dump_file, "Deleted redundant store ");
5676 print_gimple_stmt (dump_file, stmt, 0);
5679 /* Queue stmt for removal. */
5680 to_remove.safe_push (stmt);
5681 continue;
5686 /* If this is a control statement value numbering left edges
5687 unexecuted on force the condition in a way consistent with
5688 that. */
5689 if (gcond *cond = dyn_cast <gcond *> (stmt))
5691 if ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE)
5692 ^ (EDGE_SUCC (b, 1)->flags & EDGE_EXECUTABLE))
5694 if (dump_file && (dump_flags & TDF_DETAILS))
5696 fprintf (dump_file, "Removing unexecutable edge from ");
5697 print_gimple_stmt (dump_file, stmt, 0);
5699 if (((EDGE_SUCC (b, 0)->flags & EDGE_TRUE_VALUE) != 0)
5700 == ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE) != 0))
5701 gimple_cond_make_true (cond);
5702 else
5703 gimple_cond_make_false (cond);
5704 update_stmt (cond);
5705 el_todo |= TODO_cleanup_cfg;
5706 continue;
5710 bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
5711 bool was_noreturn = (is_gimple_call (stmt)
5712 && gimple_call_noreturn_p (stmt));
5713 tree vdef = gimple_vdef (stmt);
5714 tree vuse = gimple_vuse (stmt);
5716 /* If we didn't replace the whole stmt (or propagate the result
5717 into all uses), replace all uses on this stmt with their
5718 leaders. */
5719 bool modified = false;
5720 use_operand_p use_p;
5721 ssa_op_iter iter;
5722 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5724 tree use = USE_FROM_PTR (use_p);
5725 /* ??? The call code above leaves stmt operands un-updated. */
5726 if (TREE_CODE (use) != SSA_NAME)
5727 continue;
5728 tree sprime = eliminate_avail (use);
5729 if (sprime && sprime != use
5730 && may_propagate_copy (use, sprime)
5731 /* We substitute into debug stmts to avoid excessive
5732 debug temporaries created by removed stmts, but we need
5733 to avoid doing so for inserted sprimes as we never want
5734 to create debug temporaries for them. */
5735 && (!inserted_exprs
5736 || TREE_CODE (sprime) != SSA_NAME
5737 || !is_gimple_debug (stmt)
5738 || !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
5740 propagate_value (use_p, sprime);
5741 modified = true;
5745 /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
5746 into which is a requirement for the IPA devirt machinery. */
5747 gimple *old_stmt = stmt;
5748 if (modified)
5750 /* If a formerly non-invariant ADDR_EXPR is turned into an
5751 invariant one it was on a separate stmt. */
5752 if (gimple_assign_single_p (stmt)
5753 && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
5754 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
5755 gimple_stmt_iterator prev = gsi;
5756 gsi_prev (&prev);
5757 if (fold_stmt (&gsi))
5759 /* fold_stmt may have created new stmts inbetween
5760 the previous stmt and the folded stmt. Mark
5761 all defs created there as varying to not confuse
5762 the SCCVN machinery as we're using that even during
5763 elimination. */
5764 if (gsi_end_p (prev))
5765 prev = gsi_start_bb (b);
5766 else
5767 gsi_next (&prev);
5768 if (gsi_stmt (prev) != gsi_stmt (gsi))
5771 tree def;
5772 ssa_op_iter dit;
5773 FOR_EACH_SSA_TREE_OPERAND (def, gsi_stmt (prev),
5774 dit, SSA_OP_ALL_DEFS)
5775 /* As existing DEFs may move between stmts
5776 we have to guard VN_INFO_GET. */
5777 if (! has_VN_INFO (def))
5778 VN_INFO_GET (def)->valnum = def;
5779 if (gsi_stmt (prev) == gsi_stmt (gsi))
5780 break;
5781 gsi_next (&prev);
5783 while (1);
5785 stmt = gsi_stmt (gsi);
5786 /* In case we folded the stmt away schedule the NOP for removal. */
5787 if (gimple_nop_p (stmt))
5788 to_remove.safe_push (stmt);
5791 /* Visit indirect calls and turn them into direct calls if
5792 possible using the devirtualization machinery. Do this before
5793 checking for required EH/abnormal/noreturn cleanup as devird
5794 may expose more of those. */
5795 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
5797 tree fn = gimple_call_fn (call_stmt);
5798 if (fn
5799 && flag_devirtualize
5800 && virtual_method_call_p (fn))
5802 tree otr_type = obj_type_ref_class (fn);
5803 unsigned HOST_WIDE_INT otr_tok
5804 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn));
5805 tree instance;
5806 ipa_polymorphic_call_context context (current_function_decl,
5807 fn, stmt, &instance);
5808 context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn),
5809 otr_type, stmt);
5810 bool final;
5811 vec <cgraph_node *> targets
5812 = possible_polymorphic_call_targets (obj_type_ref_class (fn),
5813 otr_tok, context, &final);
5814 if (dump_file)
5815 dump_possible_polymorphic_call_targets (dump_file,
5816 obj_type_ref_class (fn),
5817 otr_tok, context);
5818 if (final && targets.length () <= 1 && dbg_cnt (devirt))
5820 tree fn;
5821 if (targets.length () == 1)
5822 fn = targets[0]->decl;
5823 else
5824 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5825 if (dump_enabled_p ())
5827 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
5828 "converting indirect call to "
5829 "function %s\n",
5830 lang_hooks.decl_printable_name (fn, 2));
5832 gimple_call_set_fndecl (call_stmt, fn);
5833 /* If changing the call to __builtin_unreachable
5834 or similar noreturn function, adjust gimple_call_fntype
5835 too. */
5836 if (gimple_call_noreturn_p (call_stmt)
5837 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
5838 && TYPE_ARG_TYPES (TREE_TYPE (fn))
5839 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn)))
5840 == void_type_node))
5841 gimple_call_set_fntype (call_stmt, TREE_TYPE (fn));
5842 maybe_remove_unused_call_args (cfun, call_stmt);
5843 modified = true;
5848 if (modified)
5850 /* When changing a call into a noreturn call, cfg cleanup
5851 is needed to fix up the noreturn call. */
5852 if (!was_noreturn
5853 && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
5854 to_fixup.safe_push (stmt);
5855 /* When changing a condition or switch into one we know what
5856 edge will be executed, schedule a cfg cleanup. */
5857 if ((gimple_code (stmt) == GIMPLE_COND
5858 && (gimple_cond_true_p (as_a <gcond *> (stmt))
5859 || gimple_cond_false_p (as_a <gcond *> (stmt))))
5860 || (gimple_code (stmt) == GIMPLE_SWITCH
5861 && TREE_CODE (gimple_switch_index
5862 (as_a <gswitch *> (stmt))) == INTEGER_CST))
5863 el_todo |= TODO_cleanup_cfg;
5864 /* If we removed EH side-effects from the statement, clean
5865 its EH information. */
5866 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
5868 bitmap_set_bit (need_eh_cleanup,
5869 gimple_bb (stmt)->index);
5870 if (dump_file && (dump_flags & TDF_DETAILS))
5871 fprintf (dump_file, " Removed EH side-effects.\n");
5873 /* Likewise for AB side-effects. */
5874 if (can_make_abnormal_goto
5875 && !stmt_can_make_abnormal_goto (stmt))
5877 bitmap_set_bit (need_ab_cleanup,
5878 gimple_bb (stmt)->index);
5879 if (dump_file && (dump_flags & TDF_DETAILS))
5880 fprintf (dump_file, " Removed AB side-effects.\n");
5882 update_stmt (stmt);
5883 if (vdef != gimple_vdef (stmt))
5884 VN_INFO (vdef)->valnum = vuse;
5887 /* Make new values available - for fully redundant LHS we
5888 continue with the next stmt above and skip this. */
5889 def_operand_p defp;
5890 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
5891 eliminate_push_avail (DEF_FROM_PTR (defp));
5894 /* Replace destination PHI arguments. */
5895 FOR_EACH_EDGE (e, ei, b->succs)
5896 if (e->flags & EDGE_EXECUTABLE)
5897 for (gphi_iterator gsi = gsi_start_phis (e->dest);
5898 !gsi_end_p (gsi);
5899 gsi_next (&gsi))
5901 gphi *phi = gsi.phi ();
5902 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
5903 tree arg = USE_FROM_PTR (use_p);
5904 if (TREE_CODE (arg) != SSA_NAME
5905 || virtual_operand_p (arg))
5906 continue;
5907 tree sprime = eliminate_avail (arg);
5908 if (sprime && may_propagate_copy (arg, sprime))
5909 propagate_value (use_p, sprime);
5911 return NULL;
5914 /* Make no longer available leaders no longer available. */
5916 void
5917 eliminate_dom_walker::after_dom_children (basic_block)
5919 tree entry;
5920 while ((entry = avail_stack.pop ()) != NULL_TREE)
5922 tree valnum = VN_INFO (entry)->valnum;
5923 tree old = avail[SSA_NAME_VERSION (valnum)];
5924 if (old == entry)
5925 avail[SSA_NAME_VERSION (valnum)] = NULL_TREE;
5926 else
5927 avail[SSA_NAME_VERSION (valnum)] = entry;
5931 /* Eliminate fully redundant computations. */
5933 unsigned int
5934 vn_eliminate (bitmap inserted_exprs)
5936 eliminate_dom_walker el (CDI_DOMINATORS, inserted_exprs);
5937 el.avail.reserve (num_ssa_names);
5939 el.walk (cfun->cfg->x_entry_block_ptr);
5941 /* We cannot remove stmts during BB walk, especially not release SSA
5942 names there as this confuses the VN machinery. The stmts ending
5943 up in to_remove are either stores or simple copies.
5944 Remove stmts in reverse order to make debug stmt creation possible. */
5945 while (!el.to_remove.is_empty ())
5947 gimple *stmt = el.to_remove.pop ();
5949 if (dump_file && (dump_flags & TDF_DETAILS))
5951 fprintf (dump_file, "Removing dead stmt ");
5952 print_gimple_stmt (dump_file, stmt, 0, TDF_NONE);
5955 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5956 if (gimple_code (stmt) == GIMPLE_PHI)
5957 remove_phi_node (&gsi, true);
5958 else
5960 basic_block bb = gimple_bb (stmt);
5961 unlink_stmt_vdef (stmt);
5962 if (gsi_remove (&gsi, true))
5963 bitmap_set_bit (el.need_eh_cleanup, bb->index);
5964 if (is_gimple_call (stmt) && stmt_can_make_abnormal_goto (stmt))
5965 bitmap_set_bit (el.need_ab_cleanup, bb->index);
5966 release_defs (stmt);
5969 /* Removing a stmt may expose a forwarder block. */
5970 el.el_todo |= TODO_cleanup_cfg;
5973 /* Fixup stmts that became noreturn calls. This may require splitting
5974 blocks and thus isn't possible during the dominator walk. Do this
5975 in reverse order so we don't inadvertedly remove a stmt we want to
5976 fixup by visiting a dominating now noreturn call first. */
5977 while (!el.to_fixup.is_empty ())
5979 gimple *stmt = el.to_fixup.pop ();
5981 if (dump_file && (dump_flags & TDF_DETAILS))
5983 fprintf (dump_file, "Fixing up noreturn call ");
5984 print_gimple_stmt (dump_file, stmt, 0);
5987 if (fixup_noreturn_call (stmt))
5988 el.el_todo |= TODO_cleanup_cfg;
5991 bool do_eh_cleanup = !bitmap_empty_p (el.need_eh_cleanup);
5992 bool do_ab_cleanup = !bitmap_empty_p (el.need_ab_cleanup);
5994 if (do_eh_cleanup)
5995 gimple_purge_all_dead_eh_edges (el.need_eh_cleanup);
5997 if (do_ab_cleanup)
5998 gimple_purge_all_dead_abnormal_call_edges (el.need_ab_cleanup);
6000 if (do_eh_cleanup || do_ab_cleanup)
6001 el.el_todo |= TODO_cleanup_cfg;
6003 statistics_counter_event (cfun, "Eliminated", el.eliminations);
6004 statistics_counter_event (cfun, "Insertions", el.insertions);
6006 return el.el_todo;
6010 namespace {
6012 const pass_data pass_data_fre =
6014 GIMPLE_PASS, /* type */
6015 "fre", /* name */
6016 OPTGROUP_NONE, /* optinfo_flags */
6017 TV_TREE_FRE, /* tv_id */
6018 ( PROP_cfg | PROP_ssa ), /* properties_required */
6019 0, /* properties_provided */
6020 0, /* properties_destroyed */
6021 0, /* todo_flags_start */
6022 0, /* todo_flags_finish */
6025 class pass_fre : public gimple_opt_pass
6027 public:
6028 pass_fre (gcc::context *ctxt)
6029 : gimple_opt_pass (pass_data_fre, ctxt)
6032 /* opt_pass methods: */
6033 opt_pass * clone () { return new pass_fre (m_ctxt); }
6034 virtual bool gate (function *) { return flag_tree_fre != 0; }
6035 virtual unsigned int execute (function *);
6037 }; // class pass_fre
6039 unsigned int
6040 pass_fre::execute (function *)
6042 unsigned int todo = 0;
6044 run_scc_vn (VN_WALKREWRITE);
6046 /* Remove all the redundant expressions. */
6047 todo |= vn_eliminate (NULL);
6049 scc_vn_restore_ssa_info ();
6050 free_scc_vn ();
6052 return todo;
6055 } // anon namespace
6057 gimple_opt_pass *
6058 make_pass_fre (gcc::context *ctxt)
6060 return new pass_fre (ctxt);