PR tree-optimization/85822
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blob96e80c7b5a3058553a8f69921976b9326a001017
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2018 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "insn-config.h"
32 #include "memmodel.h"
33 #include "emit-rtl.h"
34 #include "cgraph.h"
35 #include "gimple-pretty-print.h"
36 #include "alias.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "cfganal.h"
40 #include "tree-inline.h"
41 #include "internal-fn.h"
42 #include "gimple-fold.h"
43 #include "tree-eh.h"
44 #include "gimplify.h"
45 #include "flags.h"
46 #include "dojump.h"
47 #include "explow.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "stmt.h"
51 #include "expr.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "dumpfile.h"
55 #include "cfgloop.h"
56 #include "params.h"
57 #include "tree-ssa-propagate.h"
58 #include "tree-cfg.h"
59 #include "domwalk.h"
60 #include "gimple-iterator.h"
61 #include "gimple-match.h"
62 #include "stringpool.h"
63 #include "attribs.h"
64 #include "tree-pass.h"
65 #include "statistics.h"
66 #include "langhooks.h"
67 #include "ipa-utils.h"
68 #include "dbgcnt.h"
69 #include "tree-cfgcleanup.h"
70 #include "tree-ssa-loop.h"
71 #include "tree-scalar-evolution.h"
72 #include "tree-ssa-sccvn.h"
74 /* This algorithm is based on the SCC algorithm presented by Keith
75 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
76 (http://citeseer.ist.psu.edu/41805.html). In
77 straight line code, it is equivalent to a regular hash based value
78 numbering that is performed in reverse postorder.
80 For code with cycles, there are two alternatives, both of which
81 require keeping the hashtables separate from the actual list of
82 value numbers for SSA names.
84 1. Iterate value numbering in an RPO walk of the blocks, removing
85 all the entries from the hashtable after each iteration (but
86 keeping the SSA name->value number mapping between iterations).
87 Iterate until it does not change.
89 2. Perform value numbering as part of an SCC walk on the SSA graph,
90 iterating only the cycles in the SSA graph until they do not change
91 (using a separate, optimistic hashtable for value numbering the SCC
92 operands).
94 The second is not just faster in practice (because most SSA graph
95 cycles do not involve all the variables in the graph), it also has
96 some nice properties.
98 One of these nice properties is that when we pop an SCC off the
99 stack, we are guaranteed to have processed all the operands coming from
100 *outside of that SCC*, so we do not need to do anything special to
101 ensure they have value numbers.
103 Another nice property is that the SCC walk is done as part of a DFS
104 of the SSA graph, which makes it easy to perform combining and
105 simplifying operations at the same time.
107 The code below is deliberately written in a way that makes it easy
108 to separate the SCC walk from the other work it does.
110 In order to propagate constants through the code, we track which
111 expressions contain constants, and use those while folding. In
112 theory, we could also track expressions whose value numbers are
113 replaced, in case we end up folding based on expression
114 identities.
116 In order to value number memory, we assign value numbers to vuses.
117 This enables us to note that, for example, stores to the same
118 address of the same value from the same starting memory states are
119 equivalent.
120 TODO:
122 1. We can iterate only the changing portions of the SCC's, but
123 I have not seen an SCC big enough for this to be a win.
124 2. If you differentiate between phi nodes for loops and phi nodes
125 for if-then-else, you can properly consider phi nodes in different
126 blocks for equivalence.
127 3. We could value number vuses in more cases, particularly, whole
128 structure copies.
132 static tree *last_vuse_ptr;
133 static vn_lookup_kind vn_walk_kind;
134 static vn_lookup_kind default_vn_walk_kind;
135 bitmap const_parms;
137 /* vn_nary_op hashtable helpers. */
139 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
141 typedef vn_nary_op_s *compare_type;
142 static inline hashval_t hash (const vn_nary_op_s *);
143 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
146 /* Return the computed hashcode for nary operation P1. */
148 inline hashval_t
149 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
151 return vno1->hashcode;
154 /* Compare nary operations P1 and P2 and return true if they are
155 equivalent. */
157 inline bool
158 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
160 return vn_nary_op_eq (vno1, vno2);
163 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
164 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
167 /* vn_phi hashtable helpers. */
169 static int
170 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
172 struct vn_phi_hasher : pointer_hash <vn_phi_s>
174 static inline hashval_t hash (const vn_phi_s *);
175 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
176 static inline void remove (vn_phi_s *);
179 /* Return the computed hashcode for phi operation P1. */
181 inline hashval_t
182 vn_phi_hasher::hash (const vn_phi_s *vp1)
184 return vp1->hashcode;
187 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
189 inline bool
190 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
192 return vn_phi_eq (vp1, vp2);
195 /* Free a phi operation structure VP. */
197 inline void
198 vn_phi_hasher::remove (vn_phi_s *phi)
200 phi->phiargs.release ();
203 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
204 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
207 /* Compare two reference operands P1 and P2 for equality. Return true if
208 they are equal, and false otherwise. */
210 static int
211 vn_reference_op_eq (const void *p1, const void *p2)
213 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
214 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
216 return (vro1->opcode == vro2->opcode
217 /* We do not care for differences in type qualification. */
218 && (vro1->type == vro2->type
219 || (vro1->type && vro2->type
220 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
221 TYPE_MAIN_VARIANT (vro2->type))))
222 && expressions_equal_p (vro1->op0, vro2->op0)
223 && expressions_equal_p (vro1->op1, vro2->op1)
224 && expressions_equal_p (vro1->op2, vro2->op2));
227 /* Free a reference operation structure VP. */
229 static inline void
230 free_reference (vn_reference_s *vr)
232 vr->operands.release ();
236 /* vn_reference hashtable helpers. */
238 struct vn_reference_hasher : pointer_hash <vn_reference_s>
240 static inline hashval_t hash (const vn_reference_s *);
241 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
242 static inline void remove (vn_reference_s *);
245 /* Return the hashcode for a given reference operation P1. */
247 inline hashval_t
248 vn_reference_hasher::hash (const vn_reference_s *vr1)
250 return vr1->hashcode;
253 inline bool
254 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
256 return vn_reference_eq (v, c);
259 inline void
260 vn_reference_hasher::remove (vn_reference_s *v)
262 free_reference (v);
265 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
266 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
269 /* The set of hashtables and alloc_pool's for their items. */
271 typedef struct vn_tables_s
273 vn_nary_op_table_type *nary;
274 vn_phi_table_type *phis;
275 vn_reference_table_type *references;
276 struct obstack nary_obstack;
277 object_allocator<vn_phi_s> *phis_pool;
278 object_allocator<vn_reference_s> *references_pool;
279 } *vn_tables_t;
282 /* vn_constant hashtable helpers. */
284 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
286 static inline hashval_t hash (const vn_constant_s *);
287 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
290 /* Hash table hash function for vn_constant_t. */
292 inline hashval_t
293 vn_constant_hasher::hash (const vn_constant_s *vc1)
295 return vc1->hashcode;
298 /* Hash table equality function for vn_constant_t. */
300 inline bool
301 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
303 if (vc1->hashcode != vc2->hashcode)
304 return false;
306 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
309 static hash_table<vn_constant_hasher> *constant_to_value_id;
310 static bitmap constant_value_ids;
313 /* Valid hashtables storing information we have proven to be
314 correct. */
316 static vn_tables_t valid_info;
318 /* Optimistic hashtables storing information we are making assumptions about
319 during iterations. */
321 static vn_tables_t optimistic_info;
323 /* Pointer to the set of hashtables that is currently being used.
324 Should always point to either the optimistic_info, or the
325 valid_info. */
327 static vn_tables_t current_info;
330 /* Reverse post order index for each basic block. */
332 static int *rpo_numbers;
334 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
336 /* Return the SSA value of the VUSE x, supporting released VDEFs
337 during elimination which will value-number the VDEF to the
338 associated VUSE (but not substitute in the whole lattice). */
340 static inline tree
341 vuse_ssa_val (tree x)
343 if (!x)
344 return NULL_TREE;
348 tree tem = SSA_VAL (x);
349 /* stmt walking can walk over a backedge and reach code we didn't
350 value-number yet. */
351 if (tem == VN_TOP)
352 return x;
353 x = tem;
355 while (SSA_NAME_IN_FREE_LIST (x));
357 return x;
360 /* This represents the top of the VN lattice, which is the universal
361 value. */
363 tree VN_TOP;
365 /* Unique counter for our value ids. */
367 static unsigned int next_value_id;
369 /* Next DFS number and the stack for strongly connected component
370 detection. */
372 static unsigned int next_dfs_num;
373 static vec<tree> sccstack;
377 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
378 are allocated on an obstack for locality reasons, and to free them
379 without looping over the vec. */
381 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
382 static struct obstack vn_ssa_aux_obstack;
384 /* Return whether there is value numbering information for a given SSA name. */
386 bool
387 has_VN_INFO (tree name)
389 if (SSA_NAME_VERSION (name) < vn_ssa_aux_table.length ())
390 return vn_ssa_aux_table[SSA_NAME_VERSION (name)] != NULL;
391 return false;
394 /* Return the value numbering information for a given SSA name. */
396 vn_ssa_aux_t
397 VN_INFO (tree name)
399 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
400 gcc_checking_assert (res);
401 return res;
404 /* Set the value numbering info for a given SSA name to a given
405 value. */
407 static inline void
408 VN_INFO_SET (tree name, vn_ssa_aux_t value)
410 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
413 /* Initialize the value numbering info for a given SSA name.
414 This should be called just once for every SSA name. */
416 vn_ssa_aux_t
417 VN_INFO_GET (tree name)
419 vn_ssa_aux_t newinfo;
421 gcc_assert (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ()
422 || vn_ssa_aux_table[SSA_NAME_VERSION (name)] == NULL);
423 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
424 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
425 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
426 vn_ssa_aux_table.safe_grow_cleared (SSA_NAME_VERSION (name) + 1);
427 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
428 return newinfo;
432 /* Return the vn_kind the expression computed by the stmt should be
433 associated with. */
435 enum vn_kind
436 vn_get_stmt_kind (gimple *stmt)
438 switch (gimple_code (stmt))
440 case GIMPLE_CALL:
441 return VN_REFERENCE;
442 case GIMPLE_PHI:
443 return VN_PHI;
444 case GIMPLE_ASSIGN:
446 enum tree_code code = gimple_assign_rhs_code (stmt);
447 tree rhs1 = gimple_assign_rhs1 (stmt);
448 switch (get_gimple_rhs_class (code))
450 case GIMPLE_UNARY_RHS:
451 case GIMPLE_BINARY_RHS:
452 case GIMPLE_TERNARY_RHS:
453 return VN_NARY;
454 case GIMPLE_SINGLE_RHS:
455 switch (TREE_CODE_CLASS (code))
457 case tcc_reference:
458 /* VOP-less references can go through unary case. */
459 if ((code == REALPART_EXPR
460 || code == IMAGPART_EXPR
461 || code == VIEW_CONVERT_EXPR
462 || code == BIT_FIELD_REF)
463 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
464 return VN_NARY;
466 /* Fallthrough. */
467 case tcc_declaration:
468 return VN_REFERENCE;
470 case tcc_constant:
471 return VN_CONSTANT;
473 default:
474 if (code == ADDR_EXPR)
475 return (is_gimple_min_invariant (rhs1)
476 ? VN_CONSTANT : VN_REFERENCE);
477 else if (code == CONSTRUCTOR)
478 return VN_NARY;
479 return VN_NONE;
481 default:
482 return VN_NONE;
485 default:
486 return VN_NONE;
490 /* Lookup a value id for CONSTANT and return it. If it does not
491 exist returns 0. */
493 unsigned int
494 get_constant_value_id (tree constant)
496 vn_constant_s **slot;
497 struct vn_constant_s vc;
499 vc.hashcode = vn_hash_constant_with_type (constant);
500 vc.constant = constant;
501 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
502 if (slot)
503 return (*slot)->value_id;
504 return 0;
507 /* Lookup a value id for CONSTANT, and if it does not exist, create a
508 new one and return it. If it does exist, return it. */
510 unsigned int
511 get_or_alloc_constant_value_id (tree constant)
513 vn_constant_s **slot;
514 struct vn_constant_s vc;
515 vn_constant_t vcp;
517 vc.hashcode = vn_hash_constant_with_type (constant);
518 vc.constant = constant;
519 slot = constant_to_value_id->find_slot (&vc, INSERT);
520 if (*slot)
521 return (*slot)->value_id;
523 vcp = XNEW (struct vn_constant_s);
524 vcp->hashcode = vc.hashcode;
525 vcp->constant = constant;
526 vcp->value_id = get_next_value_id ();
527 *slot = vcp;
528 bitmap_set_bit (constant_value_ids, vcp->value_id);
529 return vcp->value_id;
532 /* Return true if V is a value id for a constant. */
534 bool
535 value_id_constant_p (unsigned int v)
537 return bitmap_bit_p (constant_value_ids, v);
540 /* Compute the hash for a reference operand VRO1. */
542 static void
543 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
545 hstate.add_int (vro1->opcode);
546 if (vro1->op0)
547 inchash::add_expr (vro1->op0, hstate);
548 if (vro1->op1)
549 inchash::add_expr (vro1->op1, hstate);
550 if (vro1->op2)
551 inchash::add_expr (vro1->op2, hstate);
554 /* Compute a hash for the reference operation VR1 and return it. */
556 static hashval_t
557 vn_reference_compute_hash (const vn_reference_t vr1)
559 inchash::hash hstate;
560 hashval_t result;
561 int i;
562 vn_reference_op_t vro;
563 poly_int64 off = -1;
564 bool deref = false;
566 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
568 if (vro->opcode == MEM_REF)
569 deref = true;
570 else if (vro->opcode != ADDR_EXPR)
571 deref = false;
572 if (maybe_ne (vro->off, -1))
574 if (known_eq (off, -1))
575 off = 0;
576 off += vro->off;
578 else
580 if (maybe_ne (off, -1)
581 && maybe_ne (off, 0))
582 hstate.add_poly_int (off);
583 off = -1;
584 if (deref
585 && vro->opcode == ADDR_EXPR)
587 if (vro->op0)
589 tree op = TREE_OPERAND (vro->op0, 0);
590 hstate.add_int (TREE_CODE (op));
591 inchash::add_expr (op, hstate);
594 else
595 vn_reference_op_compute_hash (vro, hstate);
598 result = hstate.end ();
599 /* ??? We would ICE later if we hash instead of adding that in. */
600 if (vr1->vuse)
601 result += SSA_NAME_VERSION (vr1->vuse);
603 return result;
606 /* Return true if reference operations VR1 and VR2 are equivalent. This
607 means they have the same set of operands and vuses. */
609 bool
610 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
612 unsigned i, j;
614 /* Early out if this is not a hash collision. */
615 if (vr1->hashcode != vr2->hashcode)
616 return false;
618 /* The VOP needs to be the same. */
619 if (vr1->vuse != vr2->vuse)
620 return false;
622 /* If the operands are the same we are done. */
623 if (vr1->operands == vr2->operands)
624 return true;
626 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
627 return false;
629 if (INTEGRAL_TYPE_P (vr1->type)
630 && INTEGRAL_TYPE_P (vr2->type))
632 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
633 return false;
635 else if (INTEGRAL_TYPE_P (vr1->type)
636 && (TYPE_PRECISION (vr1->type)
637 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
638 return false;
639 else if (INTEGRAL_TYPE_P (vr2->type)
640 && (TYPE_PRECISION (vr2->type)
641 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
642 return false;
644 i = 0;
645 j = 0;
648 poly_int64 off1 = 0, off2 = 0;
649 vn_reference_op_t vro1, vro2;
650 vn_reference_op_s tem1, tem2;
651 bool deref1 = false, deref2 = false;
652 for (; vr1->operands.iterate (i, &vro1); i++)
654 if (vro1->opcode == MEM_REF)
655 deref1 = true;
656 /* Do not look through a storage order barrier. */
657 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
658 return false;
659 if (known_eq (vro1->off, -1))
660 break;
661 off1 += vro1->off;
663 for (; vr2->operands.iterate (j, &vro2); j++)
665 if (vro2->opcode == MEM_REF)
666 deref2 = true;
667 /* Do not look through a storage order barrier. */
668 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
669 return false;
670 if (known_eq (vro2->off, -1))
671 break;
672 off2 += vro2->off;
674 if (maybe_ne (off1, off2))
675 return false;
676 if (deref1 && vro1->opcode == ADDR_EXPR)
678 memset (&tem1, 0, sizeof (tem1));
679 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
680 tem1.type = TREE_TYPE (tem1.op0);
681 tem1.opcode = TREE_CODE (tem1.op0);
682 vro1 = &tem1;
683 deref1 = false;
685 if (deref2 && vro2->opcode == ADDR_EXPR)
687 memset (&tem2, 0, sizeof (tem2));
688 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
689 tem2.type = TREE_TYPE (tem2.op0);
690 tem2.opcode = TREE_CODE (tem2.op0);
691 vro2 = &tem2;
692 deref2 = false;
694 if (deref1 != deref2)
695 return false;
696 if (!vn_reference_op_eq (vro1, vro2))
697 return false;
698 ++j;
699 ++i;
701 while (vr1->operands.length () != i
702 || vr2->operands.length () != j);
704 return true;
707 /* Copy the operations present in load/store REF into RESULT, a vector of
708 vn_reference_op_s's. */
710 static void
711 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
713 if (TREE_CODE (ref) == TARGET_MEM_REF)
715 vn_reference_op_s temp;
717 result->reserve (3);
719 memset (&temp, 0, sizeof (temp));
720 temp.type = TREE_TYPE (ref);
721 temp.opcode = TREE_CODE (ref);
722 temp.op0 = TMR_INDEX (ref);
723 temp.op1 = TMR_STEP (ref);
724 temp.op2 = TMR_OFFSET (ref);
725 temp.off = -1;
726 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
727 temp.base = MR_DEPENDENCE_BASE (ref);
728 result->quick_push (temp);
730 memset (&temp, 0, sizeof (temp));
731 temp.type = NULL_TREE;
732 temp.opcode = ERROR_MARK;
733 temp.op0 = TMR_INDEX2 (ref);
734 temp.off = -1;
735 result->quick_push (temp);
737 memset (&temp, 0, sizeof (temp));
738 temp.type = NULL_TREE;
739 temp.opcode = TREE_CODE (TMR_BASE (ref));
740 temp.op0 = TMR_BASE (ref);
741 temp.off = -1;
742 result->quick_push (temp);
743 return;
746 /* For non-calls, store the information that makes up the address. */
747 tree orig = ref;
748 while (ref)
750 vn_reference_op_s temp;
752 memset (&temp, 0, sizeof (temp));
753 temp.type = TREE_TYPE (ref);
754 temp.opcode = TREE_CODE (ref);
755 temp.off = -1;
757 switch (temp.opcode)
759 case MODIFY_EXPR:
760 temp.op0 = TREE_OPERAND (ref, 1);
761 break;
762 case WITH_SIZE_EXPR:
763 temp.op0 = TREE_OPERAND (ref, 1);
764 temp.off = 0;
765 break;
766 case MEM_REF:
767 /* The base address gets its own vn_reference_op_s structure. */
768 temp.op0 = TREE_OPERAND (ref, 1);
769 if (!mem_ref_offset (ref).to_shwi (&temp.off))
770 temp.off = -1;
771 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
772 temp.base = MR_DEPENDENCE_BASE (ref);
773 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
774 break;
775 case BIT_FIELD_REF:
776 /* Record bits, position and storage order. */
777 temp.op0 = TREE_OPERAND (ref, 1);
778 temp.op1 = TREE_OPERAND (ref, 2);
779 if (!multiple_p (bit_field_offset (ref), BITS_PER_UNIT, &temp.off))
780 temp.off = -1;
781 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
782 break;
783 case COMPONENT_REF:
784 /* The field decl is enough to unambiguously specify the field,
785 a matching type is not necessary and a mismatching type
786 is always a spurious difference. */
787 temp.type = NULL_TREE;
788 temp.op0 = TREE_OPERAND (ref, 1);
789 temp.op1 = TREE_OPERAND (ref, 2);
791 tree this_offset = component_ref_field_offset (ref);
792 if (this_offset
793 && poly_int_tree_p (this_offset))
795 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
796 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
798 poly_offset_int off
799 = (wi::to_poly_offset (this_offset)
800 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
801 /* Probibit value-numbering zero offset components
802 of addresses the same before the pass folding
803 __builtin_object_size had a chance to run
804 (checking cfun->after_inlining does the
805 trick here). */
806 if (TREE_CODE (orig) != ADDR_EXPR
807 || maybe_ne (off, 0)
808 || cfun->after_inlining)
809 off.to_shwi (&temp.off);
813 break;
814 case ARRAY_RANGE_REF:
815 case ARRAY_REF:
817 tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
818 /* Record index as operand. */
819 temp.op0 = TREE_OPERAND (ref, 1);
820 /* Always record lower bounds and element size. */
821 temp.op1 = array_ref_low_bound (ref);
822 /* But record element size in units of the type alignment. */
823 temp.op2 = TREE_OPERAND (ref, 3);
824 temp.align = eltype->type_common.align;
825 if (! temp.op2)
826 temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
827 size_int (TYPE_ALIGN_UNIT (eltype)));
828 if (poly_int_tree_p (temp.op0)
829 && poly_int_tree_p (temp.op1)
830 && TREE_CODE (temp.op2) == INTEGER_CST)
832 poly_offset_int off = ((wi::to_poly_offset (temp.op0)
833 - wi::to_poly_offset (temp.op1))
834 * wi::to_offset (temp.op2)
835 * vn_ref_op_align_unit (&temp));
836 off.to_shwi (&temp.off);
839 break;
840 case VAR_DECL:
841 if (DECL_HARD_REGISTER (ref))
843 temp.op0 = ref;
844 break;
846 /* Fallthru. */
847 case PARM_DECL:
848 case CONST_DECL:
849 case RESULT_DECL:
850 /* Canonicalize decls to MEM[&decl] which is what we end up with
851 when valueizing MEM[ptr] with ptr = &decl. */
852 temp.opcode = MEM_REF;
853 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
854 temp.off = 0;
855 result->safe_push (temp);
856 temp.opcode = ADDR_EXPR;
857 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
858 temp.type = TREE_TYPE (temp.op0);
859 temp.off = -1;
860 break;
861 case STRING_CST:
862 case INTEGER_CST:
863 case COMPLEX_CST:
864 case VECTOR_CST:
865 case REAL_CST:
866 case FIXED_CST:
867 case CONSTRUCTOR:
868 case SSA_NAME:
869 temp.op0 = ref;
870 break;
871 case ADDR_EXPR:
872 if (is_gimple_min_invariant (ref))
874 temp.op0 = ref;
875 break;
877 break;
878 /* These are only interesting for their operands, their
879 existence, and their type. They will never be the last
880 ref in the chain of references (IE they require an
881 operand), so we don't have to put anything
882 for op* as it will be handled by the iteration */
883 case REALPART_EXPR:
884 temp.off = 0;
885 break;
886 case VIEW_CONVERT_EXPR:
887 temp.off = 0;
888 temp.reverse = storage_order_barrier_p (ref);
889 break;
890 case IMAGPART_EXPR:
891 /* This is only interesting for its constant offset. */
892 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
893 break;
894 default:
895 gcc_unreachable ();
897 result->safe_push (temp);
899 if (REFERENCE_CLASS_P (ref)
900 || TREE_CODE (ref) == MODIFY_EXPR
901 || TREE_CODE (ref) == WITH_SIZE_EXPR
902 || (TREE_CODE (ref) == ADDR_EXPR
903 && !is_gimple_min_invariant (ref)))
904 ref = TREE_OPERAND (ref, 0);
905 else
906 ref = NULL_TREE;
910 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
911 operands in *OPS, the reference alias set SET and the reference type TYPE.
912 Return true if something useful was produced. */
914 bool
915 ao_ref_init_from_vn_reference (ao_ref *ref,
916 alias_set_type set, tree type,
917 vec<vn_reference_op_s> ops)
919 vn_reference_op_t op;
920 unsigned i;
921 tree base = NULL_TREE;
922 tree *op0_p = &base;
923 poly_offset_int offset = 0;
924 poly_offset_int max_size;
925 poly_offset_int size = -1;
926 tree size_tree = NULL_TREE;
927 alias_set_type base_alias_set = -1;
929 /* First get the final access size from just the outermost expression. */
930 op = &ops[0];
931 if (op->opcode == COMPONENT_REF)
932 size_tree = DECL_SIZE (op->op0);
933 else if (op->opcode == BIT_FIELD_REF)
934 size_tree = op->op0;
935 else
937 machine_mode mode = TYPE_MODE (type);
938 if (mode == BLKmode)
939 size_tree = TYPE_SIZE (type);
940 else
941 size = GET_MODE_BITSIZE (mode);
943 if (size_tree != NULL_TREE
944 && poly_int_tree_p (size_tree))
945 size = wi::to_poly_offset (size_tree);
947 /* Initially, maxsize is the same as the accessed element size.
948 In the following it will only grow (or become -1). */
949 max_size = size;
951 /* Compute cumulative bit-offset for nested component-refs and array-refs,
952 and find the ultimate containing object. */
953 FOR_EACH_VEC_ELT (ops, i, op)
955 switch (op->opcode)
957 /* These may be in the reference ops, but we cannot do anything
958 sensible with them here. */
959 case ADDR_EXPR:
960 /* Apart from ADDR_EXPR arguments to MEM_REF. */
961 if (base != NULL_TREE
962 && TREE_CODE (base) == MEM_REF
963 && op->op0
964 && DECL_P (TREE_OPERAND (op->op0, 0)))
966 vn_reference_op_t pop = &ops[i-1];
967 base = TREE_OPERAND (op->op0, 0);
968 if (known_eq (pop->off, -1))
970 max_size = -1;
971 offset = 0;
973 else
974 offset += pop->off * BITS_PER_UNIT;
975 op0_p = NULL;
976 break;
978 /* Fallthru. */
979 case CALL_EXPR:
980 return false;
982 /* Record the base objects. */
983 case MEM_REF:
984 base_alias_set = get_deref_alias_set (op->op0);
985 *op0_p = build2 (MEM_REF, op->type,
986 NULL_TREE, op->op0);
987 MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
988 MR_DEPENDENCE_BASE (*op0_p) = op->base;
989 op0_p = &TREE_OPERAND (*op0_p, 0);
990 break;
992 case VAR_DECL:
993 case PARM_DECL:
994 case RESULT_DECL:
995 case SSA_NAME:
996 *op0_p = op->op0;
997 op0_p = NULL;
998 break;
1000 /* And now the usual component-reference style ops. */
1001 case BIT_FIELD_REF:
1002 offset += wi::to_offset (op->op1);
1003 break;
1005 case COMPONENT_REF:
1007 tree field = op->op0;
1008 /* We do not have a complete COMPONENT_REF tree here so we
1009 cannot use component_ref_field_offset. Do the interesting
1010 parts manually. */
1011 tree this_offset = DECL_FIELD_OFFSET (field);
1013 if (op->op1 || !poly_int_tree_p (this_offset))
1014 max_size = -1;
1015 else
1017 poly_offset_int woffset = (wi::to_poly_offset (this_offset)
1018 << LOG2_BITS_PER_UNIT);
1019 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1020 offset += woffset;
1022 break;
1025 case ARRAY_RANGE_REF:
1026 case ARRAY_REF:
1027 /* We recorded the lower bound and the element size. */
1028 if (!poly_int_tree_p (op->op0)
1029 || !poly_int_tree_p (op->op1)
1030 || TREE_CODE (op->op2) != INTEGER_CST)
1031 max_size = -1;
1032 else
1034 poly_offset_int woffset
1035 = wi::sext (wi::to_poly_offset (op->op0)
1036 - wi::to_poly_offset (op->op1),
1037 TYPE_PRECISION (TREE_TYPE (op->op0)));
1038 woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
1039 woffset <<= LOG2_BITS_PER_UNIT;
1040 offset += woffset;
1042 break;
1044 case REALPART_EXPR:
1045 break;
1047 case IMAGPART_EXPR:
1048 offset += size;
1049 break;
1051 case VIEW_CONVERT_EXPR:
1052 break;
1054 case STRING_CST:
1055 case INTEGER_CST:
1056 case COMPLEX_CST:
1057 case VECTOR_CST:
1058 case REAL_CST:
1059 case CONSTRUCTOR:
1060 case CONST_DECL:
1061 return false;
1063 default:
1064 return false;
1068 if (base == NULL_TREE)
1069 return false;
1071 ref->ref = NULL_TREE;
1072 ref->base = base;
1073 ref->ref_alias_set = set;
1074 if (base_alias_set != -1)
1075 ref->base_alias_set = base_alias_set;
1076 else
1077 ref->base_alias_set = get_alias_set (base);
1078 /* We discount volatiles from value-numbering elsewhere. */
1079 ref->volatile_p = false;
1081 if (!size.to_shwi (&ref->size) || maybe_lt (ref->size, 0))
1083 ref->offset = 0;
1084 ref->size = -1;
1085 ref->max_size = -1;
1086 return true;
1089 if (!offset.to_shwi (&ref->offset))
1091 ref->offset = 0;
1092 ref->max_size = -1;
1093 return true;
1096 if (!max_size.to_shwi (&ref->max_size) || maybe_lt (ref->max_size, 0))
1097 ref->max_size = -1;
1099 return true;
1102 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1103 vn_reference_op_s's. */
1105 static void
1106 copy_reference_ops_from_call (gcall *call,
1107 vec<vn_reference_op_s> *result)
1109 vn_reference_op_s temp;
1110 unsigned i;
1111 tree lhs = gimple_call_lhs (call);
1112 int lr;
1114 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1115 different. By adding the lhs here in the vector, we ensure that the
1116 hashcode is different, guaranteeing a different value number. */
1117 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1119 memset (&temp, 0, sizeof (temp));
1120 temp.opcode = MODIFY_EXPR;
1121 temp.type = TREE_TYPE (lhs);
1122 temp.op0 = lhs;
1123 temp.off = -1;
1124 result->safe_push (temp);
1127 /* Copy the type, opcode, function, static chain and EH region, if any. */
1128 memset (&temp, 0, sizeof (temp));
1129 temp.type = gimple_call_return_type (call);
1130 temp.opcode = CALL_EXPR;
1131 temp.op0 = gimple_call_fn (call);
1132 temp.op1 = gimple_call_chain (call);
1133 if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1134 temp.op2 = size_int (lr);
1135 temp.off = -1;
1136 if (gimple_call_with_bounds_p (call))
1137 temp.with_bounds = 1;
1138 result->safe_push (temp);
1140 /* Copy the call arguments. As they can be references as well,
1141 just chain them together. */
1142 for (i = 0; i < gimple_call_num_args (call); ++i)
1144 tree callarg = gimple_call_arg (call, i);
1145 copy_reference_ops_from_ref (callarg, result);
1149 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1150 *I_P to point to the last element of the replacement. */
1151 static bool
1152 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1153 unsigned int *i_p)
1155 unsigned int i = *i_p;
1156 vn_reference_op_t op = &(*ops)[i];
1157 vn_reference_op_t mem_op = &(*ops)[i - 1];
1158 tree addr_base;
1159 poly_int64 addr_offset = 0;
1161 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1162 from .foo.bar to the preceding MEM_REF offset and replace the
1163 address with &OBJ. */
1164 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1165 &addr_offset);
1166 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1167 if (addr_base != TREE_OPERAND (op->op0, 0))
1169 poly_offset_int off
1170 = (poly_offset_int::from (wi::to_poly_wide (mem_op->op0),
1171 SIGNED)
1172 + addr_offset);
1173 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1174 op->op0 = build_fold_addr_expr (addr_base);
1175 if (tree_fits_shwi_p (mem_op->op0))
1176 mem_op->off = tree_to_shwi (mem_op->op0);
1177 else
1178 mem_op->off = -1;
1179 return true;
1181 return false;
1184 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1185 *I_P to point to the last element of the replacement. */
1186 static bool
1187 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1188 unsigned int *i_p)
1190 unsigned int i = *i_p;
1191 vn_reference_op_t op = &(*ops)[i];
1192 vn_reference_op_t mem_op = &(*ops)[i - 1];
1193 gimple *def_stmt;
1194 enum tree_code code;
1195 poly_offset_int off;
1197 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1198 if (!is_gimple_assign (def_stmt))
1199 return false;
1201 code = gimple_assign_rhs_code (def_stmt);
1202 if (code != ADDR_EXPR
1203 && code != POINTER_PLUS_EXPR)
1204 return false;
1206 off = poly_offset_int::from (wi::to_poly_wide (mem_op->op0), SIGNED);
1208 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1209 from .foo.bar to the preceding MEM_REF offset and replace the
1210 address with &OBJ. */
1211 if (code == ADDR_EXPR)
1213 tree addr, addr_base;
1214 poly_int64 addr_offset;
1216 addr = gimple_assign_rhs1 (def_stmt);
1217 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1218 &addr_offset);
1219 /* If that didn't work because the address isn't invariant propagate
1220 the reference tree from the address operation in case the current
1221 dereference isn't offsetted. */
1222 if (!addr_base
1223 && *i_p == ops->length () - 1
1224 && known_eq (off, 0)
1225 /* This makes us disable this transform for PRE where the
1226 reference ops might be also used for code insertion which
1227 is invalid. */
1228 && default_vn_walk_kind == VN_WALKREWRITE)
1230 auto_vec<vn_reference_op_s, 32> tem;
1231 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1232 /* Make sure to preserve TBAA info. The only objects not
1233 wrapped in MEM_REFs that can have their address taken are
1234 STRING_CSTs. */
1235 if (tem.length () >= 2
1236 && tem[tem.length () - 2].opcode == MEM_REF)
1238 vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1239 new_mem_op->op0
1240 = wide_int_to_tree (TREE_TYPE (mem_op->op0),
1241 wi::to_poly_wide (new_mem_op->op0));
1243 else
1244 gcc_assert (tem.last ().opcode == STRING_CST);
1245 ops->pop ();
1246 ops->pop ();
1247 ops->safe_splice (tem);
1248 --*i_p;
1249 return true;
1251 if (!addr_base
1252 || TREE_CODE (addr_base) != MEM_REF
1253 || (TREE_CODE (TREE_OPERAND (addr_base, 0)) == SSA_NAME
1254 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (addr_base, 0))))
1255 return false;
1257 off += addr_offset;
1258 off += mem_ref_offset (addr_base);
1259 op->op0 = TREE_OPERAND (addr_base, 0);
1261 else
1263 tree ptr, ptroff;
1264 ptr = gimple_assign_rhs1 (def_stmt);
1265 ptroff = gimple_assign_rhs2 (def_stmt);
1266 if (TREE_CODE (ptr) != SSA_NAME
1267 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1268 || TREE_CODE (ptroff) != INTEGER_CST)
1269 return false;
1271 off += wi::to_offset (ptroff);
1272 op->op0 = ptr;
1275 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1276 if (tree_fits_shwi_p (mem_op->op0))
1277 mem_op->off = tree_to_shwi (mem_op->op0);
1278 else
1279 mem_op->off = -1;
1280 if (TREE_CODE (op->op0) == SSA_NAME)
1281 op->op0 = SSA_VAL (op->op0);
1282 if (TREE_CODE (op->op0) != SSA_NAME)
1283 op->opcode = TREE_CODE (op->op0);
1285 /* And recurse. */
1286 if (TREE_CODE (op->op0) == SSA_NAME)
1287 vn_reference_maybe_forwprop_address (ops, i_p);
1288 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1289 vn_reference_fold_indirect (ops, i_p);
1290 return true;
1293 /* Optimize the reference REF to a constant if possible or return
1294 NULL_TREE if not. */
1296 tree
1297 fully_constant_vn_reference_p (vn_reference_t ref)
1299 vec<vn_reference_op_s> operands = ref->operands;
1300 vn_reference_op_t op;
1302 /* Try to simplify the translated expression if it is
1303 a call to a builtin function with at most two arguments. */
1304 op = &operands[0];
1305 if (op->opcode == CALL_EXPR
1306 && TREE_CODE (op->op0) == ADDR_EXPR
1307 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1308 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1309 && operands.length () >= 2
1310 && operands.length () <= 3)
1312 vn_reference_op_t arg0, arg1 = NULL;
1313 bool anyconst = false;
1314 arg0 = &operands[1];
1315 if (operands.length () > 2)
1316 arg1 = &operands[2];
1317 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1318 || (arg0->opcode == ADDR_EXPR
1319 && is_gimple_min_invariant (arg0->op0)))
1320 anyconst = true;
1321 if (arg1
1322 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1323 || (arg1->opcode == ADDR_EXPR
1324 && is_gimple_min_invariant (arg1->op0))))
1325 anyconst = true;
1326 if (anyconst)
1328 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1329 arg1 ? 2 : 1,
1330 arg0->op0,
1331 arg1 ? arg1->op0 : NULL);
1332 if (folded
1333 && TREE_CODE (folded) == NOP_EXPR)
1334 folded = TREE_OPERAND (folded, 0);
1335 if (folded
1336 && is_gimple_min_invariant (folded))
1337 return folded;
1341 /* Simplify reads from constants or constant initializers. */
1342 else if (BITS_PER_UNIT == 8
1343 && is_gimple_reg_type (ref->type)
1344 && (!INTEGRAL_TYPE_P (ref->type)
1345 || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
1347 poly_int64 off = 0;
1348 HOST_WIDE_INT size;
1349 if (INTEGRAL_TYPE_P (ref->type))
1350 size = TYPE_PRECISION (ref->type);
1351 else
1352 size = tree_to_shwi (TYPE_SIZE (ref->type));
1353 if (size % BITS_PER_UNIT != 0
1354 || size > MAX_BITSIZE_MODE_ANY_MODE)
1355 return NULL_TREE;
1356 size /= BITS_PER_UNIT;
1357 unsigned i;
1358 for (i = 0; i < operands.length (); ++i)
1360 if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1362 ++i;
1363 break;
1365 if (known_eq (operands[i].off, -1))
1366 return NULL_TREE;
1367 off += operands[i].off;
1368 if (operands[i].opcode == MEM_REF)
1370 ++i;
1371 break;
1374 vn_reference_op_t base = &operands[--i];
1375 tree ctor = error_mark_node;
1376 tree decl = NULL_TREE;
1377 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1378 ctor = base->op0;
1379 else if (base->opcode == MEM_REF
1380 && base[1].opcode == ADDR_EXPR
1381 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1382 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL
1383 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == STRING_CST))
1385 decl = TREE_OPERAND (base[1].op0, 0);
1386 if (TREE_CODE (decl) == STRING_CST)
1387 ctor = decl;
1388 else
1389 ctor = ctor_for_folding (decl);
1391 if (ctor == NULL_TREE)
1392 return build_zero_cst (ref->type);
1393 else if (ctor != error_mark_node)
1395 HOST_WIDE_INT const_off;
1396 if (decl)
1398 tree res = fold_ctor_reference (ref->type, ctor,
1399 off * BITS_PER_UNIT,
1400 size * BITS_PER_UNIT, decl);
1401 if (res)
1403 STRIP_USELESS_TYPE_CONVERSION (res);
1404 if (is_gimple_min_invariant (res))
1405 return res;
1408 else if (off.is_constant (&const_off))
1410 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1411 int len = native_encode_expr (ctor, buf, size, const_off);
1412 if (len > 0)
1413 return native_interpret_expr (ref->type, buf, len);
1418 return NULL_TREE;
1421 /* Return true if OPS contain a storage order barrier. */
1423 static bool
1424 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1426 vn_reference_op_t op;
1427 unsigned i;
1429 FOR_EACH_VEC_ELT (ops, i, op)
1430 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1431 return true;
1433 return false;
1436 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1437 structures into their value numbers. This is done in-place, and
1438 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1439 whether any operands were valueized. */
1441 static vec<vn_reference_op_s>
1442 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1444 vn_reference_op_t vro;
1445 unsigned int i;
1447 *valueized_anything = false;
1449 FOR_EACH_VEC_ELT (orig, i, vro)
1451 if (vro->opcode == SSA_NAME
1452 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1454 tree tem = SSA_VAL (vro->op0);
1455 if (tem != vro->op0)
1457 *valueized_anything = true;
1458 vro->op0 = tem;
1460 /* If it transforms from an SSA_NAME to a constant, update
1461 the opcode. */
1462 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1463 vro->opcode = TREE_CODE (vro->op0);
1465 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1467 tree tem = SSA_VAL (vro->op1);
1468 if (tem != vro->op1)
1470 *valueized_anything = true;
1471 vro->op1 = tem;
1474 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1476 tree tem = SSA_VAL (vro->op2);
1477 if (tem != vro->op2)
1479 *valueized_anything = true;
1480 vro->op2 = tem;
1483 /* If it transforms from an SSA_NAME to an address, fold with
1484 a preceding indirect reference. */
1485 if (i > 0
1486 && vro->op0
1487 && TREE_CODE (vro->op0) == ADDR_EXPR
1488 && orig[i - 1].opcode == MEM_REF)
1490 if (vn_reference_fold_indirect (&orig, &i))
1491 *valueized_anything = true;
1493 else if (i > 0
1494 && vro->opcode == SSA_NAME
1495 && orig[i - 1].opcode == MEM_REF)
1497 if (vn_reference_maybe_forwprop_address (&orig, &i))
1498 *valueized_anything = true;
1500 /* If it transforms a non-constant ARRAY_REF into a constant
1501 one, adjust the constant offset. */
1502 else if (vro->opcode == ARRAY_REF
1503 && known_eq (vro->off, -1)
1504 && poly_int_tree_p (vro->op0)
1505 && poly_int_tree_p (vro->op1)
1506 && TREE_CODE (vro->op2) == INTEGER_CST)
1508 poly_offset_int off = ((wi::to_poly_offset (vro->op0)
1509 - wi::to_poly_offset (vro->op1))
1510 * wi::to_offset (vro->op2)
1511 * vn_ref_op_align_unit (vro));
1512 off.to_shwi (&vro->off);
1516 return orig;
1519 static vec<vn_reference_op_s>
1520 valueize_refs (vec<vn_reference_op_s> orig)
1522 bool tem;
1523 return valueize_refs_1 (orig, &tem);
1526 static vec<vn_reference_op_s> shared_lookup_references;
1528 /* Create a vector of vn_reference_op_s structures from REF, a
1529 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1530 this function. *VALUEIZED_ANYTHING will specify whether any
1531 operands were valueized. */
1533 static vec<vn_reference_op_s>
1534 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1536 if (!ref)
1537 return vNULL;
1538 shared_lookup_references.truncate (0);
1539 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1540 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1541 valueized_anything);
1542 return shared_lookup_references;
1545 /* Create a vector of vn_reference_op_s structures from CALL, a
1546 call statement. The vector is shared among all callers of
1547 this function. */
1549 static vec<vn_reference_op_s>
1550 valueize_shared_reference_ops_from_call (gcall *call)
1552 if (!call)
1553 return vNULL;
1554 shared_lookup_references.truncate (0);
1555 copy_reference_ops_from_call (call, &shared_lookup_references);
1556 shared_lookup_references = valueize_refs (shared_lookup_references);
1557 return shared_lookup_references;
1560 /* Lookup a SCCVN reference operation VR in the current hash table.
1561 Returns the resulting value number if it exists in the hash table,
1562 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1563 vn_reference_t stored in the hashtable if something is found. */
1565 static tree
1566 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1568 vn_reference_s **slot;
1569 hashval_t hash;
1571 hash = vr->hashcode;
1572 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1573 if (!slot && current_info == optimistic_info)
1574 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1575 if (slot)
1577 if (vnresult)
1578 *vnresult = (vn_reference_t)*slot;
1579 return ((vn_reference_t)*slot)->result;
1582 return NULL_TREE;
1585 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1586 with the current VUSE and performs the expression lookup. */
1588 static void *
1589 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1590 unsigned int cnt, void *vr_)
1592 vn_reference_t vr = (vn_reference_t)vr_;
1593 vn_reference_s **slot;
1594 hashval_t hash;
1596 /* This bounds the stmt walks we perform on reference lookups
1597 to O(1) instead of O(N) where N is the number of dominating
1598 stores. */
1599 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1600 return (void *)-1;
1602 if (last_vuse_ptr)
1603 *last_vuse_ptr = vuse;
1605 /* Fixup vuse and hash. */
1606 if (vr->vuse)
1607 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1608 vr->vuse = vuse_ssa_val (vuse);
1609 if (vr->vuse)
1610 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1612 hash = vr->hashcode;
1613 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1614 if (!slot && current_info == optimistic_info)
1615 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1616 if (slot)
1617 return *slot;
1619 return NULL;
1622 /* Lookup an existing or insert a new vn_reference entry into the
1623 value table for the VUSE, SET, TYPE, OPERANDS reference which
1624 has the value VALUE which is either a constant or an SSA name. */
1626 static vn_reference_t
1627 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1628 alias_set_type set,
1629 tree type,
1630 vec<vn_reference_op_s,
1631 va_heap> operands,
1632 tree value)
1634 vn_reference_s vr1;
1635 vn_reference_t result;
1636 unsigned value_id;
1637 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1638 vr1.operands = operands;
1639 vr1.type = type;
1640 vr1.set = set;
1641 vr1.hashcode = vn_reference_compute_hash (&vr1);
1642 if (vn_reference_lookup_1 (&vr1, &result))
1643 return result;
1644 if (TREE_CODE (value) == SSA_NAME)
1645 value_id = VN_INFO (value)->value_id;
1646 else
1647 value_id = get_or_alloc_constant_value_id (value);
1648 return vn_reference_insert_pieces (vuse, set, type,
1649 operands.copy (), value, value_id);
1652 static vn_nary_op_t vn_nary_op_insert_stmt (gimple *stmt, tree result);
1653 static unsigned mprts_hook_cnt;
1655 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
1657 static tree
1658 vn_lookup_simplify_result (code_helper rcode, tree type, tree *ops_)
1660 if (!rcode.is_tree_code ())
1661 return NULL_TREE;
1662 tree *ops = ops_;
1663 unsigned int length = TREE_CODE_LENGTH ((tree_code) rcode);
1664 if (rcode == CONSTRUCTOR
1665 /* ??? We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
1666 and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree. */
1667 && TREE_CODE (ops_[0]) == CONSTRUCTOR)
1669 length = CONSTRUCTOR_NELTS (ops_[0]);
1670 ops = XALLOCAVEC (tree, length);
1671 for (unsigned i = 0; i < length; ++i)
1672 ops[i] = CONSTRUCTOR_ELT (ops_[0], i)->value;
1674 vn_nary_op_t vnresult = NULL;
1675 tree res = vn_nary_op_lookup_pieces (length, (tree_code) rcode,
1676 type, ops, &vnresult);
1677 /* We can end up endlessly recursing simplifications if the lookup above
1678 presents us with a def-use chain that mirrors the original simplification.
1679 See PR80887 for an example. Limit successful lookup artificially
1680 to 10 times if we are called as mprts_hook. */
1681 if (res
1682 && mprts_hook
1683 && --mprts_hook_cnt == 0)
1685 if (dump_file && (dump_flags & TDF_DETAILS))
1686 fprintf (dump_file, "Resetting mprts_hook after too many "
1687 "invocations.\n");
1688 mprts_hook = NULL;
1690 return res;
1693 /* Return a value-number for RCODE OPS... either by looking up an existing
1694 value-number for the simplified result or by inserting the operation if
1695 INSERT is true. */
1697 static tree
1698 vn_nary_build_or_lookup_1 (code_helper rcode, tree type, tree *ops,
1699 bool insert)
1701 tree result = NULL_TREE;
1702 /* We will be creating a value number for
1703 RCODE (OPS...).
1704 So first simplify and lookup this expression to see if it
1705 is already available. */
1706 mprts_hook = vn_lookup_simplify_result;
1707 mprts_hook_cnt = 9;
1708 bool res = false;
1709 switch (TREE_CODE_LENGTH ((tree_code) rcode))
1711 case 1:
1712 res = gimple_resimplify1 (NULL, &rcode, type, ops, vn_valueize);
1713 break;
1714 case 2:
1715 res = gimple_resimplify2 (NULL, &rcode, type, ops, vn_valueize);
1716 break;
1717 case 3:
1718 res = gimple_resimplify3 (NULL, &rcode, type, ops, vn_valueize);
1719 break;
1721 mprts_hook = NULL;
1722 gimple *new_stmt = NULL;
1723 if (res
1724 && gimple_simplified_result_is_gimple_val (rcode, ops))
1725 /* The expression is already available. */
1726 result = ops[0];
1727 else
1729 tree val = vn_lookup_simplify_result (rcode, type, ops);
1730 if (!val && insert)
1732 gimple_seq stmts = NULL;
1733 result = maybe_push_res_to_seq (rcode, type, ops, &stmts);
1734 if (result)
1736 gcc_assert (gimple_seq_singleton_p (stmts));
1737 new_stmt = gimple_seq_first_stmt (stmts);
1740 else
1741 /* The expression is already available. */
1742 result = val;
1744 if (new_stmt)
1746 /* The expression is not yet available, value-number lhs to
1747 the new SSA_NAME we created. */
1748 /* Initialize value-number information properly. */
1749 VN_INFO_GET (result)->valnum = result;
1750 VN_INFO (result)->value_id = get_next_value_id ();
1751 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
1752 new_stmt);
1753 VN_INFO (result)->needs_insertion = true;
1754 /* ??? PRE phi-translation inserts NARYs without corresponding
1755 SSA name result. Re-use those but set their result according
1756 to the stmt we just built. */
1757 vn_nary_op_t nary = NULL;
1758 vn_nary_op_lookup_stmt (new_stmt, &nary);
1759 if (nary)
1761 gcc_assert (nary->result == NULL_TREE);
1762 nary->result = gimple_assign_lhs (new_stmt);
1764 /* As all "inserted" statements are singleton SCCs, insert
1765 to the valid table. This is strictly needed to
1766 avoid re-generating new value SSA_NAMEs for the same
1767 expression during SCC iteration over and over (the
1768 optimistic table gets cleared after each iteration).
1769 We do not need to insert into the optimistic table, as
1770 lookups there will fall back to the valid table. */
1771 else if (current_info == optimistic_info)
1773 current_info = valid_info;
1774 vn_nary_op_insert_stmt (new_stmt, result);
1775 current_info = optimistic_info;
1777 else
1778 vn_nary_op_insert_stmt (new_stmt, result);
1779 if (dump_file && (dump_flags & TDF_DETAILS))
1781 fprintf (dump_file, "Inserting name ");
1782 print_generic_expr (dump_file, result);
1783 fprintf (dump_file, " for expression ");
1784 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
1785 fprintf (dump_file, "\n");
1788 return result;
1791 /* Return a value-number for RCODE OPS... either by looking up an existing
1792 value-number for the simplified result or by inserting the operation. */
1794 static tree
1795 vn_nary_build_or_lookup (code_helper rcode, tree type, tree *ops)
1797 return vn_nary_build_or_lookup_1 (rcode, type, ops, true);
1800 /* Try to simplify the expression RCODE OPS... of type TYPE and return
1801 its value if present. */
1803 tree
1804 vn_nary_simplify (vn_nary_op_t nary)
1806 if (nary->length > 3)
1807 return NULL_TREE;
1808 tree ops[3];
1809 memcpy (ops, nary->op, sizeof (tree) * nary->length);
1810 return vn_nary_build_or_lookup_1 (nary->opcode, nary->type, ops, false);
1814 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1815 from the statement defining VUSE and if not successful tries to
1816 translate *REFP and VR_ through an aggregate copy at the definition
1817 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
1818 of *REF and *VR. If only disambiguation was performed then
1819 *DISAMBIGUATE_ONLY is set to true. */
1821 static void *
1822 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1823 bool *disambiguate_only)
1825 vn_reference_t vr = (vn_reference_t)vr_;
1826 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
1827 tree base = ao_ref_base (ref);
1828 HOST_WIDE_INT offseti, maxsizei;
1829 static vec<vn_reference_op_s> lhs_ops;
1830 ao_ref lhs_ref;
1831 bool lhs_ref_ok = false;
1832 poly_int64 copy_size;
1834 /* If the reference is based on a parameter that was determined as
1835 pointing to readonly memory it doesn't change. */
1836 if (TREE_CODE (base) == MEM_REF
1837 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
1838 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
1839 && bitmap_bit_p (const_parms,
1840 SSA_NAME_VERSION (TREE_OPERAND (base, 0))))
1842 *disambiguate_only = true;
1843 return NULL;
1846 /* First try to disambiguate after value-replacing in the definitions LHS. */
1847 if (is_gimple_assign (def_stmt))
1849 tree lhs = gimple_assign_lhs (def_stmt);
1850 bool valueized_anything = false;
1851 /* Avoid re-allocation overhead. */
1852 lhs_ops.truncate (0);
1853 copy_reference_ops_from_ref (lhs, &lhs_ops);
1854 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1855 if (valueized_anything)
1857 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1858 get_alias_set (lhs),
1859 TREE_TYPE (lhs), lhs_ops);
1860 if (lhs_ref_ok
1861 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1863 *disambiguate_only = true;
1864 return NULL;
1867 else
1869 ao_ref_init (&lhs_ref, lhs);
1870 lhs_ref_ok = true;
1873 /* If we reach a clobbering statement try to skip it and see if
1874 we find a VN result with exactly the same value as the
1875 possible clobber. In this case we can ignore the clobber
1876 and return the found value.
1877 Note that we don't need to worry about partial overlapping
1878 accesses as we then can use TBAA to disambiguate against the
1879 clobbering statement when looking up a load (thus the
1880 VN_WALKREWRITE guard). */
1881 if (vn_walk_kind == VN_WALKREWRITE
1882 && is_gimple_reg_type (TREE_TYPE (lhs))
1883 && types_compatible_p (TREE_TYPE (lhs), vr->type))
1885 tree *saved_last_vuse_ptr = last_vuse_ptr;
1886 /* Do not update last_vuse_ptr in vn_reference_lookup_2. */
1887 last_vuse_ptr = NULL;
1888 tree saved_vuse = vr->vuse;
1889 hashval_t saved_hashcode = vr->hashcode;
1890 void *res = vn_reference_lookup_2 (ref,
1891 gimple_vuse (def_stmt), 0, vr);
1892 /* Need to restore vr->vuse and vr->hashcode. */
1893 vr->vuse = saved_vuse;
1894 vr->hashcode = saved_hashcode;
1895 last_vuse_ptr = saved_last_vuse_ptr;
1896 if (res && res != (void *)-1)
1898 vn_reference_t vnresult = (vn_reference_t) res;
1899 if (vnresult->result
1900 && operand_equal_p (vnresult->result,
1901 gimple_assign_rhs1 (def_stmt), 0))
1902 return res;
1906 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1907 && gimple_call_num_args (def_stmt) <= 4)
1909 /* For builtin calls valueize its arguments and call the
1910 alias oracle again. Valueization may improve points-to
1911 info of pointers and constify size and position arguments.
1912 Originally this was motivated by PR61034 which has
1913 conditional calls to free falsely clobbering ref because
1914 of imprecise points-to info of the argument. */
1915 tree oldargs[4];
1916 bool valueized_anything = false;
1917 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1919 oldargs[i] = gimple_call_arg (def_stmt, i);
1920 tree val = vn_valueize (oldargs[i]);
1921 if (val != oldargs[i])
1923 gimple_call_set_arg (def_stmt, i, val);
1924 valueized_anything = true;
1927 if (valueized_anything)
1929 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1930 ref);
1931 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1932 gimple_call_set_arg (def_stmt, i, oldargs[i]);
1933 if (!res)
1935 *disambiguate_only = true;
1936 return NULL;
1941 if (*disambiguate_only)
1942 return (void *)-1;
1944 /* If we cannot constrain the size of the reference we cannot
1945 test if anything kills it. */
1946 if (!ref->max_size_known_p ())
1947 return (void *)-1;
1949 poly_int64 offset = ref->offset;
1950 poly_int64 maxsize = ref->max_size;
1952 /* We can't deduce anything useful from clobbers. */
1953 if (gimple_clobber_p (def_stmt))
1954 return (void *)-1;
1956 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1957 from that definition.
1958 1) Memset. */
1959 if (is_gimple_reg_type (vr->type)
1960 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1961 && (integer_zerop (gimple_call_arg (def_stmt, 1))
1962 || ((TREE_CODE (gimple_call_arg (def_stmt, 1)) == INTEGER_CST
1963 || (INTEGRAL_TYPE_P (vr->type) && known_eq (ref->size, 8)))
1964 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1965 && known_eq (ref->size, maxsize)
1966 && offset.is_constant (&offseti)
1967 && offseti % BITS_PER_UNIT == 0))
1968 && poly_int_tree_p (gimple_call_arg (def_stmt, 2))
1969 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
1970 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME))
1972 tree base2;
1973 poly_int64 offset2, size2, maxsize2;
1974 bool reverse;
1975 tree ref2 = gimple_call_arg (def_stmt, 0);
1976 if (TREE_CODE (ref2) == SSA_NAME)
1978 ref2 = SSA_VAL (ref2);
1979 if (TREE_CODE (ref2) == SSA_NAME
1980 && (TREE_CODE (base) != MEM_REF
1981 || TREE_OPERAND (base, 0) != ref2))
1983 gimple *def_stmt = SSA_NAME_DEF_STMT (ref2);
1984 if (gimple_assign_single_p (def_stmt)
1985 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
1986 ref2 = gimple_assign_rhs1 (def_stmt);
1989 if (TREE_CODE (ref2) == ADDR_EXPR)
1991 ref2 = TREE_OPERAND (ref2, 0);
1992 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
1993 &reverse);
1994 if (!known_size_p (maxsize2)
1995 || !operand_equal_p (base, base2, OEP_ADDRESS_OF))
1996 return (void *)-1;
1998 else if (TREE_CODE (ref2) == SSA_NAME)
2000 poly_int64 soff;
2001 if (TREE_CODE (base) != MEM_REF
2002 || !(mem_ref_offset (base) << LOG2_BITS_PER_UNIT).to_shwi (&soff))
2003 return (void *)-1;
2004 offset += soff;
2005 offset2 = 0;
2006 if (TREE_OPERAND (base, 0) != ref2)
2008 gimple *def = SSA_NAME_DEF_STMT (ref2);
2009 if (is_gimple_assign (def)
2010 && gimple_assign_rhs_code (def) == POINTER_PLUS_EXPR
2011 && gimple_assign_rhs1 (def) == TREE_OPERAND (base, 0)
2012 && poly_int_tree_p (gimple_assign_rhs2 (def))
2013 && (wi::to_poly_offset (gimple_assign_rhs2 (def))
2014 << LOG2_BITS_PER_UNIT).to_shwi (&offset2))
2016 ref2 = gimple_assign_rhs1 (def);
2017 if (TREE_CODE (ref2) == SSA_NAME)
2018 ref2 = SSA_VAL (ref2);
2020 else
2021 return (void *)-1;
2024 else
2025 return (void *)-1;
2026 tree len = gimple_call_arg (def_stmt, 2);
2027 if (known_subrange_p (offset, maxsize, offset2,
2028 wi::to_poly_offset (len) << LOG2_BITS_PER_UNIT))
2030 tree val;
2031 if (integer_zerop (gimple_call_arg (def_stmt, 1)))
2032 val = build_zero_cst (vr->type);
2033 else if (INTEGRAL_TYPE_P (vr->type)
2034 && known_eq (ref->size, 8))
2036 code_helper rcode = NOP_EXPR;
2037 tree ops[3] = {};
2038 ops[0] = gimple_call_arg (def_stmt, 1);
2039 val = vn_nary_build_or_lookup (rcode, vr->type, ops);
2040 if (!val
2041 || (TREE_CODE (val) == SSA_NAME
2042 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2043 return (void *)-1;
2045 else
2047 unsigned len = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (vr->type));
2048 unsigned char *buf = XALLOCAVEC (unsigned char, len);
2049 memset (buf, TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 1)),
2050 len);
2051 val = native_interpret_expr (vr->type, buf, len);
2052 if (!val)
2053 return (void *)-1;
2055 return vn_reference_lookup_or_insert_for_pieces
2056 (vuse, vr->set, vr->type, vr->operands, val);
2060 /* 2) Assignment from an empty CONSTRUCTOR. */
2061 else if (is_gimple_reg_type (vr->type)
2062 && gimple_assign_single_p (def_stmt)
2063 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
2064 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
2066 tree base2;
2067 poly_int64 offset2, size2, maxsize2;
2068 bool reverse;
2069 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2070 &offset2, &size2, &maxsize2, &reverse);
2071 if (known_size_p (maxsize2)
2072 && operand_equal_p (base, base2, 0)
2073 && known_subrange_p (offset, maxsize, offset2, size2))
2075 tree val = build_zero_cst (vr->type);
2076 return vn_reference_lookup_or_insert_for_pieces
2077 (vuse, vr->set, vr->type, vr->operands, val);
2081 /* 3) Assignment from a constant. We can use folds native encode/interpret
2082 routines to extract the assigned bits. */
2083 else if (known_eq (ref->size, maxsize)
2084 && is_gimple_reg_type (vr->type)
2085 && !contains_storage_order_barrier_p (vr->operands)
2086 && gimple_assign_single_p (def_stmt)
2087 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
2088 /* native_encode and native_decode operate on arrays of bytes
2089 and so fundamentally need a compile-time size and offset. */
2090 && maxsize.is_constant (&maxsizei)
2091 && maxsizei % BITS_PER_UNIT == 0
2092 && offset.is_constant (&offseti)
2093 && offseti % BITS_PER_UNIT == 0
2094 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
2095 || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
2096 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
2098 tree base2;
2099 HOST_WIDE_INT offset2, size2;
2100 bool reverse;
2101 base2 = get_ref_base_and_extent_hwi (gimple_assign_lhs (def_stmt),
2102 &offset2, &size2, &reverse);
2103 if (base2
2104 && !reverse
2105 && size2 % BITS_PER_UNIT == 0
2106 && offset2 % BITS_PER_UNIT == 0
2107 && operand_equal_p (base, base2, 0)
2108 && known_subrange_p (offseti, maxsizei, offset2, size2))
2110 /* We support up to 512-bit values (for V8DFmode). */
2111 unsigned char buffer[64];
2112 int len;
2114 tree rhs = gimple_assign_rhs1 (def_stmt);
2115 if (TREE_CODE (rhs) == SSA_NAME)
2116 rhs = SSA_VAL (rhs);
2117 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
2118 buffer, sizeof (buffer),
2119 (offseti - offset2) / BITS_PER_UNIT);
2120 if (len > 0 && len * BITS_PER_UNIT >= maxsizei)
2122 tree type = vr->type;
2123 /* Make sure to interpret in a type that has a range
2124 covering the whole access size. */
2125 if (INTEGRAL_TYPE_P (vr->type)
2126 && maxsizei != TYPE_PRECISION (vr->type))
2127 type = build_nonstandard_integer_type (maxsizei,
2128 TYPE_UNSIGNED (type));
2129 tree val = native_interpret_expr (type, buffer,
2130 maxsizei / BITS_PER_UNIT);
2131 /* If we chop off bits because the types precision doesn't
2132 match the memory access size this is ok when optimizing
2133 reads but not when called from the DSE code during
2134 elimination. */
2135 if (val
2136 && type != vr->type)
2138 if (! int_fits_type_p (val, vr->type))
2139 val = NULL_TREE;
2140 else
2141 val = fold_convert (vr->type, val);
2144 if (val)
2145 return vn_reference_lookup_or_insert_for_pieces
2146 (vuse, vr->set, vr->type, vr->operands, val);
2151 /* 4) Assignment from an SSA name which definition we may be able
2152 to access pieces from. */
2153 else if (known_eq (ref->size, maxsize)
2154 && is_gimple_reg_type (vr->type)
2155 && !contains_storage_order_barrier_p (vr->operands)
2156 && gimple_assign_single_p (def_stmt)
2157 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
2159 tree base2;
2160 poly_int64 offset2, size2, maxsize2;
2161 bool reverse;
2162 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2163 &offset2, &size2, &maxsize2,
2164 &reverse);
2165 if (!reverse
2166 && known_size_p (maxsize2)
2167 && known_eq (maxsize2, size2)
2168 && operand_equal_p (base, base2, 0)
2169 && known_subrange_p (offset, maxsize, offset2, size2)
2170 /* ??? We can't handle bitfield precision extracts without
2171 either using an alternate type for the BIT_FIELD_REF and
2172 then doing a conversion or possibly adjusting the offset
2173 according to endianness. */
2174 && (! INTEGRAL_TYPE_P (vr->type)
2175 || known_eq (ref->size, TYPE_PRECISION (vr->type)))
2176 && multiple_p (ref->size, BITS_PER_UNIT))
2178 code_helper rcode = BIT_FIELD_REF;
2179 tree ops[3];
2180 ops[0] = SSA_VAL (gimple_assign_rhs1 (def_stmt));
2181 ops[1] = bitsize_int (ref->size);
2182 ops[2] = bitsize_int (offset - offset2);
2183 tree val = vn_nary_build_or_lookup (rcode, vr->type, ops);
2184 if (val
2185 && (TREE_CODE (val) != SSA_NAME
2186 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2188 vn_reference_t res = vn_reference_lookup_or_insert_for_pieces
2189 (vuse, vr->set, vr->type, vr->operands, val);
2190 return res;
2195 /* 5) For aggregate copies translate the reference through them if
2196 the copy kills ref. */
2197 else if (vn_walk_kind == VN_WALKREWRITE
2198 && gimple_assign_single_p (def_stmt)
2199 && (DECL_P (gimple_assign_rhs1 (def_stmt))
2200 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
2201 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
2203 tree base2;
2204 int i, j, k;
2205 auto_vec<vn_reference_op_s> rhs;
2206 vn_reference_op_t vro;
2207 ao_ref r;
2209 if (!lhs_ref_ok)
2210 return (void *)-1;
2212 /* See if the assignment kills REF. */
2213 base2 = ao_ref_base (&lhs_ref);
2214 if (!lhs_ref.max_size_known_p ()
2215 || (base != base2
2216 && (TREE_CODE (base) != MEM_REF
2217 || TREE_CODE (base2) != MEM_REF
2218 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
2219 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
2220 TREE_OPERAND (base2, 1))))
2221 || !stmt_kills_ref_p (def_stmt, ref))
2222 return (void *)-1;
2224 /* Find the common base of ref and the lhs. lhs_ops already
2225 contains valueized operands for the lhs. */
2226 i = vr->operands.length () - 1;
2227 j = lhs_ops.length () - 1;
2228 while (j >= 0 && i >= 0
2229 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
2231 i--;
2232 j--;
2235 /* ??? The innermost op should always be a MEM_REF and we already
2236 checked that the assignment to the lhs kills vr. Thus for
2237 aggregate copies using char[] types the vn_reference_op_eq
2238 may fail when comparing types for compatibility. But we really
2239 don't care here - further lookups with the rewritten operands
2240 will simply fail if we messed up types too badly. */
2241 poly_int64 extra_off = 0;
2242 if (j == 0 && i >= 0
2243 && lhs_ops[0].opcode == MEM_REF
2244 && maybe_ne (lhs_ops[0].off, -1))
2246 if (known_eq (lhs_ops[0].off, vr->operands[i].off))
2247 i--, j--;
2248 else if (vr->operands[i].opcode == MEM_REF
2249 && maybe_ne (vr->operands[i].off, -1))
2251 extra_off = vr->operands[i].off - lhs_ops[0].off;
2252 i--, j--;
2256 /* i now points to the first additional op.
2257 ??? LHS may not be completely contained in VR, one or more
2258 VIEW_CONVERT_EXPRs could be in its way. We could at least
2259 try handling outermost VIEW_CONVERT_EXPRs. */
2260 if (j != -1)
2261 return (void *)-1;
2263 /* Punt if the additional ops contain a storage order barrier. */
2264 for (k = i; k >= 0; k--)
2266 vro = &vr->operands[k];
2267 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
2268 return (void *)-1;
2271 /* Now re-write REF to be based on the rhs of the assignment. */
2272 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
2274 /* Apply an extra offset to the inner MEM_REF of the RHS. */
2275 if (maybe_ne (extra_off, 0))
2277 if (rhs.length () < 2
2278 || rhs[0].opcode != MEM_REF
2279 || known_eq (rhs[0].off, -1))
2280 return (void *)-1;
2281 rhs[0].off += extra_off;
2282 rhs[0].op0 = int_const_binop (PLUS_EXPR, rhs[0].op0,
2283 build_int_cst (TREE_TYPE (rhs[0].op0),
2284 extra_off));
2287 /* We need to pre-pend vr->operands[0..i] to rhs. */
2288 vec<vn_reference_op_s> old = vr->operands;
2289 if (i + 1 + rhs.length () > vr->operands.length ())
2290 vr->operands.safe_grow (i + 1 + rhs.length ());
2291 else
2292 vr->operands.truncate (i + 1 + rhs.length ());
2293 FOR_EACH_VEC_ELT (rhs, j, vro)
2294 vr->operands[i + 1 + j] = *vro;
2295 vr->operands = valueize_refs (vr->operands);
2296 if (old == shared_lookup_references)
2297 shared_lookup_references = vr->operands;
2298 vr->hashcode = vn_reference_compute_hash (vr);
2300 /* Try folding the new reference to a constant. */
2301 tree val = fully_constant_vn_reference_p (vr);
2302 if (val)
2303 return vn_reference_lookup_or_insert_for_pieces
2304 (vuse, vr->set, vr->type, vr->operands, val);
2306 /* Adjust *ref from the new operands. */
2307 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2308 return (void *)-1;
2309 /* This can happen with bitfields. */
2310 if (maybe_ne (ref->size, r.size))
2311 return (void *)-1;
2312 *ref = r;
2314 /* Do not update last seen VUSE after translating. */
2315 last_vuse_ptr = NULL;
2317 /* Keep looking for the adjusted *REF / VR pair. */
2318 return NULL;
2321 /* 6) For memcpy copies translate the reference through them if
2322 the copy kills ref. */
2323 else if (vn_walk_kind == VN_WALKREWRITE
2324 && is_gimple_reg_type (vr->type)
2325 /* ??? Handle BCOPY as well. */
2326 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2327 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2328 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2329 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2330 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2331 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2332 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2333 && poly_int_tree_p (gimple_call_arg (def_stmt, 2), &copy_size))
2335 tree lhs, rhs;
2336 ao_ref r;
2337 poly_int64 rhs_offset, lhs_offset;
2338 vn_reference_op_s op;
2339 poly_uint64 mem_offset;
2340 poly_int64 at, byte_maxsize;
2342 /* Only handle non-variable, addressable refs. */
2343 if (maybe_ne (ref->size, maxsize)
2344 || !multiple_p (offset, BITS_PER_UNIT, &at)
2345 || !multiple_p (maxsize, BITS_PER_UNIT, &byte_maxsize))
2346 return (void *)-1;
2348 /* Extract a pointer base and an offset for the destination. */
2349 lhs = gimple_call_arg (def_stmt, 0);
2350 lhs_offset = 0;
2351 if (TREE_CODE (lhs) == SSA_NAME)
2353 lhs = SSA_VAL (lhs);
2354 if (TREE_CODE (lhs) == SSA_NAME)
2356 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
2357 if (gimple_assign_single_p (def_stmt)
2358 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2359 lhs = gimple_assign_rhs1 (def_stmt);
2362 if (TREE_CODE (lhs) == ADDR_EXPR)
2364 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2365 &lhs_offset);
2366 if (!tem)
2367 return (void *)-1;
2368 if (TREE_CODE (tem) == MEM_REF
2369 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
2371 lhs = TREE_OPERAND (tem, 0);
2372 if (TREE_CODE (lhs) == SSA_NAME)
2373 lhs = SSA_VAL (lhs);
2374 lhs_offset += mem_offset;
2376 else if (DECL_P (tem))
2377 lhs = build_fold_addr_expr (tem);
2378 else
2379 return (void *)-1;
2381 if (TREE_CODE (lhs) != SSA_NAME
2382 && TREE_CODE (lhs) != ADDR_EXPR)
2383 return (void *)-1;
2385 /* Extract a pointer base and an offset for the source. */
2386 rhs = gimple_call_arg (def_stmt, 1);
2387 rhs_offset = 0;
2388 if (TREE_CODE (rhs) == SSA_NAME)
2389 rhs = SSA_VAL (rhs);
2390 if (TREE_CODE (rhs) == ADDR_EXPR)
2392 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2393 &rhs_offset);
2394 if (!tem)
2395 return (void *)-1;
2396 if (TREE_CODE (tem) == MEM_REF
2397 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
2399 rhs = TREE_OPERAND (tem, 0);
2400 rhs_offset += mem_offset;
2402 else if (DECL_P (tem)
2403 || TREE_CODE (tem) == STRING_CST)
2404 rhs = build_fold_addr_expr (tem);
2405 else
2406 return (void *)-1;
2408 if (TREE_CODE (rhs) != SSA_NAME
2409 && TREE_CODE (rhs) != ADDR_EXPR)
2410 return (void *)-1;
2412 /* The bases of the destination and the references have to agree. */
2413 if (TREE_CODE (base) == MEM_REF)
2415 if (TREE_OPERAND (base, 0) != lhs
2416 || !poly_int_tree_p (TREE_OPERAND (base, 1), &mem_offset))
2417 return (void *) -1;
2418 at += mem_offset;
2420 else if (!DECL_P (base)
2421 || TREE_CODE (lhs) != ADDR_EXPR
2422 || TREE_OPERAND (lhs, 0) != base)
2423 return (void *)-1;
2425 /* If the access is completely outside of the memcpy destination
2426 area there is no aliasing. */
2427 if (!ranges_maybe_overlap_p (lhs_offset, copy_size, at, byte_maxsize))
2428 return NULL;
2429 /* And the access has to be contained within the memcpy destination. */
2430 if (!known_subrange_p (at, byte_maxsize, lhs_offset, copy_size))
2431 return (void *)-1;
2433 /* Make room for 2 operands in the new reference. */
2434 if (vr->operands.length () < 2)
2436 vec<vn_reference_op_s> old = vr->operands;
2437 vr->operands.safe_grow_cleared (2);
2438 if (old == shared_lookup_references)
2439 shared_lookup_references = vr->operands;
2441 else
2442 vr->operands.truncate (2);
2444 /* The looked-through reference is a simple MEM_REF. */
2445 memset (&op, 0, sizeof (op));
2446 op.type = vr->type;
2447 op.opcode = MEM_REF;
2448 op.op0 = build_int_cst (ptr_type_node, at - lhs_offset + rhs_offset);
2449 op.off = at - lhs_offset + rhs_offset;
2450 vr->operands[0] = op;
2451 op.type = TREE_TYPE (rhs);
2452 op.opcode = TREE_CODE (rhs);
2453 op.op0 = rhs;
2454 op.off = -1;
2455 vr->operands[1] = op;
2456 vr->hashcode = vn_reference_compute_hash (vr);
2458 /* Try folding the new reference to a constant. */
2459 tree val = fully_constant_vn_reference_p (vr);
2460 if (val)
2461 return vn_reference_lookup_or_insert_for_pieces
2462 (vuse, vr->set, vr->type, vr->operands, val);
2464 /* Adjust *ref from the new operands. */
2465 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2466 return (void *)-1;
2467 /* This can happen with bitfields. */
2468 if (maybe_ne (ref->size, r.size))
2469 return (void *)-1;
2470 *ref = r;
2472 /* Do not update last seen VUSE after translating. */
2473 last_vuse_ptr = NULL;
2475 /* Keep looking for the adjusted *REF / VR pair. */
2476 return NULL;
2479 /* Bail out and stop walking. */
2480 return (void *)-1;
2483 /* Return a reference op vector from OP that can be used for
2484 vn_reference_lookup_pieces. The caller is responsible for releasing
2485 the vector. */
2487 vec<vn_reference_op_s>
2488 vn_reference_operands_for_lookup (tree op)
2490 bool valueized;
2491 return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
2494 /* Lookup a reference operation by it's parts, in the current hash table.
2495 Returns the resulting value number if it exists in the hash table,
2496 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2497 vn_reference_t stored in the hashtable if something is found. */
2499 tree
2500 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2501 vec<vn_reference_op_s> operands,
2502 vn_reference_t *vnresult, vn_lookup_kind kind)
2504 struct vn_reference_s vr1;
2505 vn_reference_t tmp;
2506 tree cst;
2508 if (!vnresult)
2509 vnresult = &tmp;
2510 *vnresult = NULL;
2512 vr1.vuse = vuse_ssa_val (vuse);
2513 shared_lookup_references.truncate (0);
2514 shared_lookup_references.safe_grow (operands.length ());
2515 memcpy (shared_lookup_references.address (),
2516 operands.address (),
2517 sizeof (vn_reference_op_s)
2518 * operands.length ());
2519 vr1.operands = operands = shared_lookup_references
2520 = valueize_refs (shared_lookup_references);
2521 vr1.type = type;
2522 vr1.set = set;
2523 vr1.hashcode = vn_reference_compute_hash (&vr1);
2524 if ((cst = fully_constant_vn_reference_p (&vr1)))
2525 return cst;
2527 vn_reference_lookup_1 (&vr1, vnresult);
2528 if (!*vnresult
2529 && kind != VN_NOWALK
2530 && vr1.vuse)
2532 ao_ref r;
2533 vn_walk_kind = kind;
2534 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2535 *vnresult =
2536 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2537 vn_reference_lookup_2,
2538 vn_reference_lookup_3,
2539 vuse_ssa_val, &vr1);
2540 gcc_checking_assert (vr1.operands == shared_lookup_references);
2543 if (*vnresult)
2544 return (*vnresult)->result;
2546 return NULL_TREE;
2549 /* Lookup OP in the current hash table, and return the resulting value
2550 number if it exists in the hash table. Return NULL_TREE if it does
2551 not exist in the hash table or if the result field of the structure
2552 was NULL.. VNRESULT will be filled in with the vn_reference_t
2553 stored in the hashtable if one exists. When TBAA_P is false assume
2554 we are looking up a store and treat it as having alias-set zero. */
2556 tree
2557 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2558 vn_reference_t *vnresult, bool tbaa_p)
2560 vec<vn_reference_op_s> operands;
2561 struct vn_reference_s vr1;
2562 tree cst;
2563 bool valuezied_anything;
2565 if (vnresult)
2566 *vnresult = NULL;
2568 vr1.vuse = vuse_ssa_val (vuse);
2569 vr1.operands = operands
2570 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2571 vr1.type = TREE_TYPE (op);
2572 vr1.set = tbaa_p ? get_alias_set (op) : 0;
2573 vr1.hashcode = vn_reference_compute_hash (&vr1);
2574 if ((cst = fully_constant_vn_reference_p (&vr1)))
2575 return cst;
2577 if (kind != VN_NOWALK
2578 && vr1.vuse)
2580 vn_reference_t wvnresult;
2581 ao_ref r;
2582 /* Make sure to use a valueized reference if we valueized anything.
2583 Otherwise preserve the full reference for advanced TBAA. */
2584 if (!valuezied_anything
2585 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2586 vr1.operands))
2587 ao_ref_init (&r, op);
2588 if (! tbaa_p)
2589 r.ref_alias_set = r.base_alias_set = 0;
2590 vn_walk_kind = kind;
2591 wvnresult =
2592 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2593 vn_reference_lookup_2,
2594 vn_reference_lookup_3,
2595 vuse_ssa_val, &vr1);
2596 gcc_checking_assert (vr1.operands == shared_lookup_references);
2597 if (wvnresult)
2599 if (vnresult)
2600 *vnresult = wvnresult;
2601 return wvnresult->result;
2604 return NULL_TREE;
2607 return vn_reference_lookup_1 (&vr1, vnresult);
2610 /* Lookup CALL in the current hash table and return the entry in
2611 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2613 void
2614 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2615 vn_reference_t vr)
2617 if (vnresult)
2618 *vnresult = NULL;
2620 tree vuse = gimple_vuse (call);
2622 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2623 vr->operands = valueize_shared_reference_ops_from_call (call);
2624 vr->type = gimple_expr_type (call);
2625 vr->set = 0;
2626 vr->hashcode = vn_reference_compute_hash (vr);
2627 vn_reference_lookup_1 (vr, vnresult);
2630 /* Insert OP into the current hash table with a value number of
2631 RESULT, and return the resulting reference structure we created. */
2633 static vn_reference_t
2634 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2636 vn_reference_s **slot;
2637 vn_reference_t vr1;
2638 bool tem;
2640 vr1 = current_info->references_pool->allocate ();
2641 if (TREE_CODE (result) == SSA_NAME)
2642 vr1->value_id = VN_INFO (result)->value_id;
2643 else
2644 vr1->value_id = get_or_alloc_constant_value_id (result);
2645 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2646 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2647 vr1->type = TREE_TYPE (op);
2648 vr1->set = get_alias_set (op);
2649 vr1->hashcode = vn_reference_compute_hash (vr1);
2650 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2651 vr1->result_vdef = vdef;
2653 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2654 INSERT);
2656 /* Because we lookup stores using vuses, and value number failures
2657 using the vdefs (see visit_reference_op_store for how and why),
2658 it's possible that on failure we may try to insert an already
2659 inserted store. This is not wrong, there is no ssa name for a
2660 store that we could use as a differentiator anyway. Thus, unlike
2661 the other lookup functions, you cannot gcc_assert (!*slot)
2662 here. */
2664 /* But free the old slot in case of a collision. */
2665 if (*slot)
2666 free_reference (*slot);
2668 *slot = vr1;
2669 return vr1;
2672 /* Insert a reference by it's pieces into the current hash table with
2673 a value number of RESULT. Return the resulting reference
2674 structure we created. */
2676 vn_reference_t
2677 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2678 vec<vn_reference_op_s> operands,
2679 tree result, unsigned int value_id)
2682 vn_reference_s **slot;
2683 vn_reference_t vr1;
2685 vr1 = current_info->references_pool->allocate ();
2686 vr1->value_id = value_id;
2687 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2688 vr1->operands = valueize_refs (operands);
2689 vr1->type = type;
2690 vr1->set = set;
2691 vr1->hashcode = vn_reference_compute_hash (vr1);
2692 if (result && TREE_CODE (result) == SSA_NAME)
2693 result = SSA_VAL (result);
2694 vr1->result = result;
2696 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2697 INSERT);
2699 /* At this point we should have all the things inserted that we have
2700 seen before, and we should never try inserting something that
2701 already exists. */
2702 gcc_assert (!*slot);
2703 if (*slot)
2704 free_reference (*slot);
2706 *slot = vr1;
2707 return vr1;
2710 /* Compute and return the hash value for nary operation VBO1. */
2712 static hashval_t
2713 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2715 inchash::hash hstate;
2716 unsigned i;
2718 for (i = 0; i < vno1->length; ++i)
2719 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2720 vno1->op[i] = SSA_VAL (vno1->op[i]);
2722 if (((vno1->length == 2
2723 && commutative_tree_code (vno1->opcode))
2724 || (vno1->length == 3
2725 && commutative_ternary_tree_code (vno1->opcode)))
2726 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2727 std::swap (vno1->op[0], vno1->op[1]);
2728 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
2729 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2731 std::swap (vno1->op[0], vno1->op[1]);
2732 vno1->opcode = swap_tree_comparison (vno1->opcode);
2735 hstate.add_int (vno1->opcode);
2736 for (i = 0; i < vno1->length; ++i)
2737 inchash::add_expr (vno1->op[i], hstate);
2739 return hstate.end ();
2742 /* Compare nary operations VNO1 and VNO2 and return true if they are
2743 equivalent. */
2745 bool
2746 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2748 unsigned i;
2750 if (vno1->hashcode != vno2->hashcode)
2751 return false;
2753 if (vno1->length != vno2->length)
2754 return false;
2756 if (vno1->opcode != vno2->opcode
2757 || !types_compatible_p (vno1->type, vno2->type))
2758 return false;
2760 for (i = 0; i < vno1->length; ++i)
2761 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2762 return false;
2764 /* BIT_INSERT_EXPR has an implict operand as the type precision
2765 of op1. Need to check to make sure they are the same. */
2766 if (vno1->opcode == BIT_INSERT_EXPR
2767 && TREE_CODE (vno1->op[1]) == INTEGER_CST
2768 && TYPE_PRECISION (TREE_TYPE (vno1->op[1]))
2769 != TYPE_PRECISION (TREE_TYPE (vno2->op[1])))
2770 return false;
2772 return true;
2775 /* Initialize VNO from the pieces provided. */
2777 static void
2778 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2779 enum tree_code code, tree type, tree *ops)
2781 vno->opcode = code;
2782 vno->length = length;
2783 vno->type = type;
2784 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2787 /* Initialize VNO from OP. */
2789 static void
2790 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2792 unsigned i;
2794 vno->opcode = TREE_CODE (op);
2795 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2796 vno->type = TREE_TYPE (op);
2797 for (i = 0; i < vno->length; ++i)
2798 vno->op[i] = TREE_OPERAND (op, i);
2801 /* Return the number of operands for a vn_nary ops structure from STMT. */
2803 static unsigned int
2804 vn_nary_length_from_stmt (gimple *stmt)
2806 switch (gimple_assign_rhs_code (stmt))
2808 case REALPART_EXPR:
2809 case IMAGPART_EXPR:
2810 case VIEW_CONVERT_EXPR:
2811 return 1;
2813 case BIT_FIELD_REF:
2814 return 3;
2816 case CONSTRUCTOR:
2817 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2819 default:
2820 return gimple_num_ops (stmt) - 1;
2824 /* Initialize VNO from STMT. */
2826 static void
2827 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
2829 unsigned i;
2831 vno->opcode = gimple_assign_rhs_code (stmt);
2832 vno->type = gimple_expr_type (stmt);
2833 switch (vno->opcode)
2835 case REALPART_EXPR:
2836 case IMAGPART_EXPR:
2837 case VIEW_CONVERT_EXPR:
2838 vno->length = 1;
2839 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2840 break;
2842 case BIT_FIELD_REF:
2843 vno->length = 3;
2844 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2845 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2846 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2847 break;
2849 case CONSTRUCTOR:
2850 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2851 for (i = 0; i < vno->length; ++i)
2852 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2853 break;
2855 default:
2856 gcc_checking_assert (!gimple_assign_single_p (stmt));
2857 vno->length = gimple_num_ops (stmt) - 1;
2858 for (i = 0; i < vno->length; ++i)
2859 vno->op[i] = gimple_op (stmt, i + 1);
2863 /* Compute the hashcode for VNO and look for it in the hash table;
2864 return the resulting value number if it exists in the hash table.
2865 Return NULL_TREE if it does not exist in the hash table or if the
2866 result field of the operation is NULL. VNRESULT will contain the
2867 vn_nary_op_t from the hashtable if it exists. */
2869 static tree
2870 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2872 vn_nary_op_s **slot;
2874 if (vnresult)
2875 *vnresult = NULL;
2877 vno->hashcode = vn_nary_op_compute_hash (vno);
2878 slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2879 NO_INSERT);
2880 if (!slot && current_info == optimistic_info)
2881 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2882 NO_INSERT);
2883 if (!slot)
2884 return NULL_TREE;
2885 if (vnresult)
2886 *vnresult = *slot;
2887 return (*slot)->result;
2890 /* Lookup a n-ary operation by its pieces and return the resulting value
2891 number if it exists in the hash table. Return NULL_TREE if it does
2892 not exist in the hash table or if the result field of the operation
2893 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2894 if it exists. */
2896 tree
2897 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2898 tree type, tree *ops, vn_nary_op_t *vnresult)
2900 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2901 sizeof_vn_nary_op (length));
2902 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2903 return vn_nary_op_lookup_1 (vno1, vnresult);
2906 /* Lookup OP in the current hash table, and return the resulting value
2907 number if it exists in the hash table. Return NULL_TREE if it does
2908 not exist in the hash table or if the result field of the operation
2909 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2910 if it exists. */
2912 tree
2913 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2915 vn_nary_op_t vno1
2916 = XALLOCAVAR (struct vn_nary_op_s,
2917 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2918 init_vn_nary_op_from_op (vno1, op);
2919 return vn_nary_op_lookup_1 (vno1, vnresult);
2922 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2923 value number if it exists in the hash table. Return NULL_TREE if
2924 it does not exist in the hash table. VNRESULT will contain the
2925 vn_nary_op_t from the hashtable if it exists. */
2927 tree
2928 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
2930 vn_nary_op_t vno1
2931 = XALLOCAVAR (struct vn_nary_op_s,
2932 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2933 init_vn_nary_op_from_stmt (vno1, stmt);
2934 return vn_nary_op_lookup_1 (vno1, vnresult);
2937 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2939 static vn_nary_op_t
2940 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2942 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2945 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2946 obstack. */
2948 static vn_nary_op_t
2949 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2951 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2952 &current_info->nary_obstack);
2954 vno1->value_id = value_id;
2955 vno1->length = length;
2956 vno1->result = result;
2958 return vno1;
2961 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2962 VNO->HASHCODE first. */
2964 static vn_nary_op_t
2965 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
2966 bool compute_hash)
2968 vn_nary_op_s **slot;
2970 if (compute_hash)
2971 vno->hashcode = vn_nary_op_compute_hash (vno);
2973 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
2974 /* While we do not want to insert things twice it's awkward to
2975 avoid it in the case where visit_nary_op pattern-matches stuff
2976 and ends up simplifying the replacement to itself. We then
2977 get two inserts, one from visit_nary_op and one from
2978 vn_nary_build_or_lookup.
2979 So allow inserts with the same value number. */
2980 if (*slot && (*slot)->result == vno->result)
2981 return *slot;
2983 gcc_assert (!*slot);
2985 *slot = vno;
2986 return vno;
2989 /* Insert a n-ary operation into the current hash table using it's
2990 pieces. Return the vn_nary_op_t structure we created and put in
2991 the hashtable. */
2993 vn_nary_op_t
2994 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2995 tree type, tree *ops,
2996 tree result, unsigned int value_id)
2998 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2999 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
3000 return vn_nary_op_insert_into (vno1, current_info->nary, true);
3003 /* Insert OP into the current hash table with a value number of
3004 RESULT. Return the vn_nary_op_t structure we created and put in
3005 the hashtable. */
3007 vn_nary_op_t
3008 vn_nary_op_insert (tree op, tree result)
3010 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
3011 vn_nary_op_t vno1;
3013 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
3014 init_vn_nary_op_from_op (vno1, op);
3015 return vn_nary_op_insert_into (vno1, current_info->nary, true);
3018 /* Insert the rhs of STMT into the current hash table with a value number of
3019 RESULT. */
3021 static vn_nary_op_t
3022 vn_nary_op_insert_stmt (gimple *stmt, tree result)
3024 vn_nary_op_t vno1
3025 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
3026 result, VN_INFO (result)->value_id);
3027 init_vn_nary_op_from_stmt (vno1, stmt);
3028 return vn_nary_op_insert_into (vno1, current_info->nary, true);
3031 /* Compute a hashcode for PHI operation VP1 and return it. */
3033 static inline hashval_t
3034 vn_phi_compute_hash (vn_phi_t vp1)
3036 inchash::hash hstate (vp1->phiargs.length () > 2
3037 ? vp1->block->index : vp1->phiargs.length ());
3038 tree phi1op;
3039 tree type;
3040 edge e;
3041 edge_iterator ei;
3043 /* If all PHI arguments are constants we need to distinguish
3044 the PHI node via its type. */
3045 type = vp1->type;
3046 hstate.merge_hash (vn_hash_type (type));
3048 FOR_EACH_EDGE (e, ei, vp1->block->preds)
3050 /* Don't hash backedge values they need to be handled as VN_TOP
3051 for optimistic value-numbering. */
3052 if (e->flags & EDGE_DFS_BACK)
3053 continue;
3055 phi1op = vp1->phiargs[e->dest_idx];
3056 if (phi1op == VN_TOP)
3057 continue;
3058 inchash::add_expr (phi1op, hstate);
3061 return hstate.end ();
3065 /* Return true if COND1 and COND2 represent the same condition, set
3066 *INVERTED_P if one needs to be inverted to make it the same as
3067 the other. */
3069 static bool
3070 cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
3071 gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
3073 enum tree_code code1 = gimple_cond_code (cond1);
3074 enum tree_code code2 = gimple_cond_code (cond2);
3076 *inverted_p = false;
3077 if (code1 == code2)
3079 else if (code1 == swap_tree_comparison (code2))
3080 std::swap (lhs2, rhs2);
3081 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
3082 *inverted_p = true;
3083 else if (code1 == invert_tree_comparison
3084 (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
3086 std::swap (lhs2, rhs2);
3087 *inverted_p = true;
3089 else
3090 return false;
3092 return ((expressions_equal_p (lhs1, lhs2)
3093 && expressions_equal_p (rhs1, rhs2))
3094 || (commutative_tree_code (code1)
3095 && expressions_equal_p (lhs1, rhs2)
3096 && expressions_equal_p (rhs1, lhs2)));
3099 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
3101 static int
3102 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
3104 if (vp1->hashcode != vp2->hashcode)
3105 return false;
3107 if (vp1->block != vp2->block)
3109 if (vp1->phiargs.length () != vp2->phiargs.length ())
3110 return false;
3112 switch (vp1->phiargs.length ())
3114 case 1:
3115 /* Single-arg PHIs are just copies. */
3116 break;
3118 case 2:
3120 /* Rule out backedges into the PHI. */
3121 if (vp1->block->loop_father->header == vp1->block
3122 || vp2->block->loop_father->header == vp2->block)
3123 return false;
3125 /* If the PHI nodes do not have compatible types
3126 they are not the same. */
3127 if (!types_compatible_p (vp1->type, vp2->type))
3128 return false;
3130 basic_block idom1
3131 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3132 basic_block idom2
3133 = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
3134 /* If the immediate dominator end in switch stmts multiple
3135 values may end up in the same PHI arg via intermediate
3136 CFG merges. */
3137 if (EDGE_COUNT (idom1->succs) != 2
3138 || EDGE_COUNT (idom2->succs) != 2)
3139 return false;
3141 /* Verify the controlling stmt is the same. */
3142 gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1));
3143 gcond *last2 = safe_dyn_cast <gcond *> (last_stmt (idom2));
3144 if (! last1 || ! last2)
3145 return false;
3146 bool inverted_p;
3147 if (! cond_stmts_equal_p (last1, vp1->cclhs, vp1->ccrhs,
3148 last2, vp2->cclhs, vp2->ccrhs,
3149 &inverted_p))
3150 return false;
3152 /* Get at true/false controlled edges into the PHI. */
3153 edge te1, te2, fe1, fe2;
3154 if (! extract_true_false_controlled_edges (idom1, vp1->block,
3155 &te1, &fe1)
3156 || ! extract_true_false_controlled_edges (idom2, vp2->block,
3157 &te2, &fe2))
3158 return false;
3160 /* Swap edges if the second condition is the inverted of the
3161 first. */
3162 if (inverted_p)
3163 std::swap (te2, fe2);
3165 /* ??? Handle VN_TOP specially. */
3166 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
3167 vp2->phiargs[te2->dest_idx])
3168 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
3169 vp2->phiargs[fe2->dest_idx]))
3170 return false;
3172 return true;
3175 default:
3176 return false;
3180 /* If the PHI nodes do not have compatible types
3181 they are not the same. */
3182 if (!types_compatible_p (vp1->type, vp2->type))
3183 return false;
3185 /* Any phi in the same block will have it's arguments in the
3186 same edge order, because of how we store phi nodes. */
3187 int i;
3188 tree phi1op;
3189 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
3191 tree phi2op = vp2->phiargs[i];
3192 if (phi1op == VN_TOP || phi2op == VN_TOP)
3193 continue;
3194 if (!expressions_equal_p (phi1op, phi2op))
3195 return false;
3198 return true;
3201 static vec<tree> shared_lookup_phiargs;
3203 /* Lookup PHI in the current hash table, and return the resulting
3204 value number if it exists in the hash table. Return NULL_TREE if
3205 it does not exist in the hash table. */
3207 static tree
3208 vn_phi_lookup (gimple *phi)
3210 vn_phi_s **slot;
3211 struct vn_phi_s vp1;
3212 edge e;
3213 edge_iterator ei;
3215 shared_lookup_phiargs.truncate (0);
3216 shared_lookup_phiargs.safe_grow (gimple_phi_num_args (phi));
3218 /* Canonicalize the SSA_NAME's to their value number. */
3219 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3221 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3222 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3223 shared_lookup_phiargs[e->dest_idx] = def;
3225 vp1.type = TREE_TYPE (gimple_phi_result (phi));
3226 vp1.phiargs = shared_lookup_phiargs;
3227 vp1.block = gimple_bb (phi);
3228 /* Extract values of the controlling condition. */
3229 vp1.cclhs = NULL_TREE;
3230 vp1.ccrhs = NULL_TREE;
3231 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1.block);
3232 if (EDGE_COUNT (idom1->succs) == 2)
3233 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3235 vp1.cclhs = vn_valueize (gimple_cond_lhs (last1));
3236 vp1.ccrhs = vn_valueize (gimple_cond_rhs (last1));
3238 vp1.hashcode = vn_phi_compute_hash (&vp1);
3239 slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3240 NO_INSERT);
3241 if (!slot && current_info == optimistic_info)
3242 slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3243 NO_INSERT);
3244 if (!slot)
3245 return NULL_TREE;
3246 return (*slot)->result;
3249 /* Insert PHI into the current hash table with a value number of
3250 RESULT. */
3252 static vn_phi_t
3253 vn_phi_insert (gimple *phi, tree result)
3255 vn_phi_s **slot;
3256 vn_phi_t vp1 = current_info->phis_pool->allocate ();
3257 vec<tree> args = vNULL;
3258 edge e;
3259 edge_iterator ei;
3261 args.safe_grow (gimple_phi_num_args (phi));
3263 /* Canonicalize the SSA_NAME's to their value number. */
3264 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3266 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3267 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3268 args[e->dest_idx] = def;
3270 vp1->value_id = VN_INFO (result)->value_id;
3271 vp1->type = TREE_TYPE (gimple_phi_result (phi));
3272 vp1->phiargs = args;
3273 vp1->block = gimple_bb (phi);
3274 /* Extract values of the controlling condition. */
3275 vp1->cclhs = NULL_TREE;
3276 vp1->ccrhs = NULL_TREE;
3277 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3278 if (EDGE_COUNT (idom1->succs) == 2)
3279 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3281 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
3282 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
3284 vp1->result = result;
3285 vp1->hashcode = vn_phi_compute_hash (vp1);
3287 slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
3289 /* Because we iterate over phi operations more than once, it's
3290 possible the slot might already exist here, hence no assert.*/
3291 *slot = vp1;
3292 return vp1;
3296 /* Print set of components in strongly connected component SCC to OUT. */
3298 static void
3299 print_scc (FILE *out, vec<tree> scc)
3301 tree var;
3302 unsigned int i;
3304 fprintf (out, "SCC consists of %u:", scc.length ());
3305 FOR_EACH_VEC_ELT (scc, i, var)
3307 fprintf (out, " ");
3308 print_generic_expr (out, var);
3310 fprintf (out, "\n");
3313 /* Return true if BB1 is dominated by BB2 taking into account edges
3314 that are not executable. */
3316 static bool
3317 dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
3319 edge_iterator ei;
3320 edge e;
3322 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3323 return true;
3325 /* Before iterating we'd like to know if there exists a
3326 (executable) path from bb2 to bb1 at all, if not we can
3327 directly return false. For now simply iterate once. */
3329 /* Iterate to the single executable bb1 predecessor. */
3330 if (EDGE_COUNT (bb1->preds) > 1)
3332 edge prede = NULL;
3333 FOR_EACH_EDGE (e, ei, bb1->preds)
3334 if (e->flags & EDGE_EXECUTABLE)
3336 if (prede)
3338 prede = NULL;
3339 break;
3341 prede = e;
3343 if (prede)
3345 bb1 = prede->src;
3347 /* Re-do the dominance check with changed bb1. */
3348 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3349 return true;
3353 /* Iterate to the single executable bb2 successor. */
3354 edge succe = NULL;
3355 FOR_EACH_EDGE (e, ei, bb2->succs)
3356 if (e->flags & EDGE_EXECUTABLE)
3358 if (succe)
3360 succe = NULL;
3361 break;
3363 succe = e;
3365 if (succe)
3367 /* Verify the reached block is only reached through succe.
3368 If there is only one edge we can spare us the dominator
3369 check and iterate directly. */
3370 if (EDGE_COUNT (succe->dest->preds) > 1)
3372 FOR_EACH_EDGE (e, ei, succe->dest->preds)
3373 if (e != succe
3374 && (e->flags & EDGE_EXECUTABLE))
3376 succe = NULL;
3377 break;
3380 if (succe)
3382 bb2 = succe->dest;
3384 /* Re-do the dominance check with changed bb2. */
3385 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3386 return true;
3390 /* We could now iterate updating bb1 / bb2. */
3391 return false;
3394 /* Set the value number of FROM to TO, return true if it has changed
3395 as a result. */
3397 static inline bool
3398 set_ssa_val_to (tree from, tree to)
3400 tree currval = SSA_VAL (from);
3401 poly_int64 toff, coff;
3403 /* The only thing we allow as value numbers are ssa_names
3404 and invariants. So assert that here. We don't allow VN_TOP
3405 as visiting a stmt should produce a value-number other than
3406 that.
3407 ??? Still VN_TOP can happen for unreachable code, so force
3408 it to varying in that case. Not all code is prepared to
3409 get VN_TOP on valueization. */
3410 if (to == VN_TOP)
3412 if (dump_file && (dump_flags & TDF_DETAILS))
3413 fprintf (dump_file, "Forcing value number to varying on "
3414 "receiving VN_TOP\n");
3415 to = from;
3418 gcc_assert (to != NULL_TREE
3419 && ((TREE_CODE (to) == SSA_NAME
3420 && (to == from || SSA_VAL (to) == to))
3421 || is_gimple_min_invariant (to)));
3423 if (from != to)
3425 if (currval == from)
3427 if (dump_file && (dump_flags & TDF_DETAILS))
3429 fprintf (dump_file, "Not changing value number of ");
3430 print_generic_expr (dump_file, from);
3431 fprintf (dump_file, " from VARYING to ");
3432 print_generic_expr (dump_file, to);
3433 fprintf (dump_file, "\n");
3435 return false;
3437 else if (currval != VN_TOP
3438 && ! is_gimple_min_invariant (currval)
3439 && is_gimple_min_invariant (to))
3441 if (dump_file && (dump_flags & TDF_DETAILS))
3443 fprintf (dump_file, "Forcing VARYING instead of changing "
3444 "value number of ");
3445 print_generic_expr (dump_file, from);
3446 fprintf (dump_file, " from ");
3447 print_generic_expr (dump_file, currval);
3448 fprintf (dump_file, " (non-constant) to ");
3449 print_generic_expr (dump_file, to);
3450 fprintf (dump_file, " (constant)\n");
3452 to = from;
3454 else if (TREE_CODE (to) == SSA_NAME
3455 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
3456 to = from;
3459 if (dump_file && (dump_flags & TDF_DETAILS))
3461 fprintf (dump_file, "Setting value number of ");
3462 print_generic_expr (dump_file, from);
3463 fprintf (dump_file, " to ");
3464 print_generic_expr (dump_file, to);
3467 if (currval != to
3468 && !operand_equal_p (currval, to, 0)
3469 /* Different undefined SSA names are not actually different. See
3470 PR82320 for a testcase were we'd otherwise not terminate iteration. */
3471 && !(TREE_CODE (currval) == SSA_NAME
3472 && TREE_CODE (to) == SSA_NAME
3473 && ssa_undefined_value_p (currval, false)
3474 && ssa_undefined_value_p (to, false))
3475 /* ??? For addresses involving volatile objects or types operand_equal_p
3476 does not reliably detect ADDR_EXPRs as equal. We know we are only
3477 getting invariant gimple addresses here, so can use
3478 get_addr_base_and_unit_offset to do this comparison. */
3479 && !(TREE_CODE (currval) == ADDR_EXPR
3480 && TREE_CODE (to) == ADDR_EXPR
3481 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
3482 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
3483 && known_eq (coff, toff)))
3485 if (dump_file && (dump_flags & TDF_DETAILS))
3486 fprintf (dump_file, " (changed)\n");
3488 /* If we equate two SSA names we have to make the side-band info
3489 of the leader conservative (and remember whatever original value
3490 was present). */
3491 if (TREE_CODE (to) == SSA_NAME)
3493 if (INTEGRAL_TYPE_P (TREE_TYPE (to))
3494 && SSA_NAME_RANGE_INFO (to))
3496 if (SSA_NAME_IS_DEFAULT_DEF (to)
3497 || dominated_by_p_w_unex
3498 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3499 gimple_bb (SSA_NAME_DEF_STMT (to))))
3500 /* Keep the info from the dominator. */
3502 else
3504 /* Save old info. */
3505 if (! VN_INFO (to)->info.range_info)
3507 VN_INFO (to)->info.range_info = SSA_NAME_RANGE_INFO (to);
3508 VN_INFO (to)->range_info_anti_range_p
3509 = SSA_NAME_ANTI_RANGE_P (to);
3511 /* Rather than allocating memory and unioning the info
3512 just clear it. */
3513 if (dump_file && (dump_flags & TDF_DETAILS))
3515 fprintf (dump_file, "clearing range info of ");
3516 print_generic_expr (dump_file, to);
3517 fprintf (dump_file, "\n");
3519 SSA_NAME_RANGE_INFO (to) = NULL;
3522 else if (POINTER_TYPE_P (TREE_TYPE (to))
3523 && SSA_NAME_PTR_INFO (to))
3525 if (SSA_NAME_IS_DEFAULT_DEF (to)
3526 || dominated_by_p_w_unex
3527 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3528 gimple_bb (SSA_NAME_DEF_STMT (to))))
3529 /* Keep the info from the dominator. */
3531 else if (! SSA_NAME_PTR_INFO (from)
3532 /* Handle the case of trivially equivalent info. */
3533 || memcmp (SSA_NAME_PTR_INFO (to),
3534 SSA_NAME_PTR_INFO (from),
3535 sizeof (ptr_info_def)) != 0)
3537 /* Save old info. */
3538 if (! VN_INFO (to)->info.ptr_info)
3539 VN_INFO (to)->info.ptr_info = SSA_NAME_PTR_INFO (to);
3540 /* Rather than allocating memory and unioning the info
3541 just clear it. */
3542 if (dump_file && (dump_flags & TDF_DETAILS))
3544 fprintf (dump_file, "clearing points-to info of ");
3545 print_generic_expr (dump_file, to);
3546 fprintf (dump_file, "\n");
3548 SSA_NAME_PTR_INFO (to) = NULL;
3553 VN_INFO (from)->valnum = to;
3554 return true;
3556 if (dump_file && (dump_flags & TDF_DETAILS))
3557 fprintf (dump_file, "\n");
3558 return false;
3561 /* Mark as processed all the definitions in the defining stmt of USE, or
3562 the USE itself. */
3564 static void
3565 mark_use_processed (tree use)
3567 ssa_op_iter iter;
3568 def_operand_p defp;
3569 gimple *stmt = SSA_NAME_DEF_STMT (use);
3571 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
3573 VN_INFO (use)->use_processed = true;
3574 return;
3577 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3579 tree def = DEF_FROM_PTR (defp);
3581 VN_INFO (def)->use_processed = true;
3585 /* Set all definitions in STMT to value number to themselves.
3586 Return true if a value number changed. */
3588 static bool
3589 defs_to_varying (gimple *stmt)
3591 bool changed = false;
3592 ssa_op_iter iter;
3593 def_operand_p defp;
3595 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3597 tree def = DEF_FROM_PTR (defp);
3598 changed |= set_ssa_val_to (def, def);
3600 return changed;
3603 /* Visit a copy between LHS and RHS, return true if the value number
3604 changed. */
3606 static bool
3607 visit_copy (tree lhs, tree rhs)
3609 /* Valueize. */
3610 rhs = SSA_VAL (rhs);
3612 return set_ssa_val_to (lhs, rhs);
3615 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
3616 is the same. */
3618 static tree
3619 valueized_wider_op (tree wide_type, tree op)
3621 if (TREE_CODE (op) == SSA_NAME)
3622 op = SSA_VAL (op);
3624 /* Either we have the op widened available. */
3625 tree ops[3] = {};
3626 ops[0] = op;
3627 tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
3628 wide_type, ops, NULL);
3629 if (tem)
3630 return tem;
3632 /* Or the op is truncated from some existing value. */
3633 if (TREE_CODE (op) == SSA_NAME)
3635 gimple *def = SSA_NAME_DEF_STMT (op);
3636 if (is_gimple_assign (def)
3637 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3639 tem = gimple_assign_rhs1 (def);
3640 if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)))
3642 if (TREE_CODE (tem) == SSA_NAME)
3643 tem = SSA_VAL (tem);
3644 return tem;
3649 /* For constants simply extend it. */
3650 if (TREE_CODE (op) == INTEGER_CST)
3651 return wide_int_to_tree (wide_type, wi::to_wide (op));
3653 return NULL_TREE;
3656 /* Visit a nary operator RHS, value number it, and return true if the
3657 value number of LHS has changed as a result. */
3659 static bool
3660 visit_nary_op (tree lhs, gassign *stmt)
3662 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
3663 if (result)
3664 return set_ssa_val_to (lhs, result);
3666 /* Do some special pattern matching for redundancies of operations
3667 in different types. */
3668 enum tree_code code = gimple_assign_rhs_code (stmt);
3669 tree type = TREE_TYPE (lhs);
3670 tree rhs1 = gimple_assign_rhs1 (stmt);
3671 switch (code)
3673 CASE_CONVERT:
3674 /* Match arithmetic done in a different type where we can easily
3675 substitute the result from some earlier sign-changed or widened
3676 operation. */
3677 if (INTEGRAL_TYPE_P (type)
3678 && TREE_CODE (rhs1) == SSA_NAME
3679 /* We only handle sign-changes or zero-extension -> & mask. */
3680 && ((TYPE_UNSIGNED (TREE_TYPE (rhs1))
3681 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs1)))
3682 || TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (rhs1))))
3684 gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
3685 if (def
3686 && (gimple_assign_rhs_code (def) == PLUS_EXPR
3687 || gimple_assign_rhs_code (def) == MINUS_EXPR
3688 || gimple_assign_rhs_code (def) == MULT_EXPR))
3690 tree ops[3] = {};
3691 /* Either we have the op widened available. */
3692 ops[0] = valueized_wider_op (type,
3693 gimple_assign_rhs1 (def));
3694 if (ops[0])
3695 ops[1] = valueized_wider_op (type,
3696 gimple_assign_rhs2 (def));
3697 if (ops[0] && ops[1])
3699 ops[0] = vn_nary_op_lookup_pieces
3700 (2, gimple_assign_rhs_code (def), type, ops, NULL);
3701 /* We have wider operation available. */
3702 if (ops[0])
3704 unsigned lhs_prec = TYPE_PRECISION (type);
3705 unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1));
3706 if (lhs_prec == rhs_prec)
3708 ops[1] = NULL_TREE;
3709 result = vn_nary_build_or_lookup (NOP_EXPR,
3710 type, ops);
3711 if (result)
3713 bool changed = set_ssa_val_to (lhs, result);
3714 vn_nary_op_insert_stmt (stmt, result);
3715 return changed;
3718 else
3720 ops[1] = wide_int_to_tree (type,
3721 wi::mask (rhs_prec, false,
3722 lhs_prec));
3723 result = vn_nary_build_or_lookup (BIT_AND_EXPR,
3724 TREE_TYPE (lhs),
3725 ops);
3726 if (result)
3728 bool changed = set_ssa_val_to (lhs, result);
3729 vn_nary_op_insert_stmt (stmt, result);
3730 return changed;
3737 default:;
3740 bool changed = set_ssa_val_to (lhs, lhs);
3741 vn_nary_op_insert_stmt (stmt, lhs);
3742 return changed;
3745 /* Visit a call STMT storing into LHS. Return true if the value number
3746 of the LHS has changed as a result. */
3748 static bool
3749 visit_reference_op_call (tree lhs, gcall *stmt)
3751 bool changed = false;
3752 struct vn_reference_s vr1;
3753 vn_reference_t vnresult = NULL;
3754 tree vdef = gimple_vdef (stmt);
3756 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
3757 if (lhs && TREE_CODE (lhs) != SSA_NAME)
3758 lhs = NULL_TREE;
3760 vn_reference_lookup_call (stmt, &vnresult, &vr1);
3761 if (vnresult)
3763 if (vnresult->result_vdef && vdef)
3764 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
3765 else if (vdef)
3766 /* If the call was discovered to be pure or const reflect
3767 that as far as possible. */
3768 changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt)));
3770 if (!vnresult->result && lhs)
3771 vnresult->result = lhs;
3773 if (vnresult->result && lhs)
3774 changed |= set_ssa_val_to (lhs, vnresult->result);
3776 else
3778 vn_reference_t vr2;
3779 vn_reference_s **slot;
3780 tree vdef_val = vdef;
3781 if (vdef)
3783 /* If we value numbered an indirect functions function to
3784 one not clobbering memory value number its VDEF to its
3785 VUSE. */
3786 tree fn = gimple_call_fn (stmt);
3787 if (fn && TREE_CODE (fn) == SSA_NAME)
3789 fn = SSA_VAL (fn);
3790 if (TREE_CODE (fn) == ADDR_EXPR
3791 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
3792 && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
3793 & (ECF_CONST | ECF_PURE)))
3794 vdef_val = vuse_ssa_val (gimple_vuse (stmt));
3796 changed |= set_ssa_val_to (vdef, vdef_val);
3798 if (lhs)
3799 changed |= set_ssa_val_to (lhs, lhs);
3800 vr2 = current_info->references_pool->allocate ();
3801 vr2->vuse = vr1.vuse;
3802 /* As we are not walking the virtual operand chain we know the
3803 shared_lookup_references are still original so we can re-use
3804 them here. */
3805 vr2->operands = vr1.operands.copy ();
3806 vr2->type = vr1.type;
3807 vr2->set = vr1.set;
3808 vr2->hashcode = vr1.hashcode;
3809 vr2->result = lhs;
3810 vr2->result_vdef = vdef_val;
3811 slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3812 INSERT);
3813 gcc_assert (!*slot);
3814 *slot = vr2;
3817 return changed;
3820 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3821 and return true if the value number of the LHS has changed as a result. */
3823 static bool
3824 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
3826 bool changed = false;
3827 tree last_vuse;
3828 tree result;
3830 last_vuse = gimple_vuse (stmt);
3831 last_vuse_ptr = &last_vuse;
3832 result = vn_reference_lookup (op, gimple_vuse (stmt),
3833 default_vn_walk_kind, NULL, true);
3834 last_vuse_ptr = NULL;
3836 /* We handle type-punning through unions by value-numbering based
3837 on offset and size of the access. Be prepared to handle a
3838 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3839 if (result
3840 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3842 /* We will be setting the value number of lhs to the value number
3843 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3844 So first simplify and lookup this expression to see if it
3845 is already available. */
3846 code_helper rcode = VIEW_CONVERT_EXPR;
3847 tree ops[3] = { result };
3848 result = vn_nary_build_or_lookup (rcode, TREE_TYPE (op), ops);
3851 if (result)
3852 changed = set_ssa_val_to (lhs, result);
3853 else
3855 changed = set_ssa_val_to (lhs, lhs);
3856 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
3859 return changed;
3863 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3864 and return true if the value number of the LHS has changed as a result. */
3866 static bool
3867 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
3869 bool changed = false;
3870 vn_reference_t vnresult = NULL;
3871 tree assign;
3872 bool resultsame = false;
3873 tree vuse = gimple_vuse (stmt);
3874 tree vdef = gimple_vdef (stmt);
3876 if (TREE_CODE (op) == SSA_NAME)
3877 op = SSA_VAL (op);
3879 /* First we want to lookup using the *vuses* from the store and see
3880 if there the last store to this location with the same address
3881 had the same value.
3883 The vuses represent the memory state before the store. If the
3884 memory state, address, and value of the store is the same as the
3885 last store to this location, then this store will produce the
3886 same memory state as that store.
3888 In this case the vdef versions for this store are value numbered to those
3889 vuse versions, since they represent the same memory state after
3890 this store.
3892 Otherwise, the vdefs for the store are used when inserting into
3893 the table, since the store generates a new memory state. */
3895 vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
3896 if (vnresult
3897 && vnresult->result)
3899 tree result = vnresult->result;
3900 if (TREE_CODE (result) == SSA_NAME)
3901 result = SSA_VAL (result);
3902 resultsame = expressions_equal_p (result, op);
3903 if (resultsame)
3905 /* If the TBAA state isn't compatible for downstream reads
3906 we cannot value-number the VDEFs the same. */
3907 alias_set_type set = get_alias_set (lhs);
3908 if (vnresult->set != set
3909 && ! alias_set_subset_of (set, vnresult->set))
3910 resultsame = false;
3914 if (!resultsame)
3916 /* Only perform the following when being called from PRE
3917 which embeds tail merging. */
3918 if (default_vn_walk_kind == VN_WALK)
3920 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3921 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
3922 if (vnresult)
3924 VN_INFO (vdef)->use_processed = true;
3925 return set_ssa_val_to (vdef, vnresult->result_vdef);
3929 if (dump_file && (dump_flags & TDF_DETAILS))
3931 fprintf (dump_file, "No store match\n");
3932 fprintf (dump_file, "Value numbering store ");
3933 print_generic_expr (dump_file, lhs);
3934 fprintf (dump_file, " to ");
3935 print_generic_expr (dump_file, op);
3936 fprintf (dump_file, "\n");
3938 /* Have to set value numbers before insert, since insert is
3939 going to valueize the references in-place. */
3940 if (vdef)
3941 changed |= set_ssa_val_to (vdef, vdef);
3943 /* Do not insert structure copies into the tables. */
3944 if (is_gimple_min_invariant (op)
3945 || is_gimple_reg (op))
3946 vn_reference_insert (lhs, op, vdef, NULL);
3948 /* Only perform the following when being called from PRE
3949 which embeds tail merging. */
3950 if (default_vn_walk_kind == VN_WALK)
3952 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3953 vn_reference_insert (assign, lhs, vuse, vdef);
3956 else
3958 /* We had a match, so value number the vdef to have the value
3959 number of the vuse it came from. */
3961 if (dump_file && (dump_flags & TDF_DETAILS))
3962 fprintf (dump_file, "Store matched earlier value, "
3963 "value numbering store vdefs to matching vuses.\n");
3965 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3968 return changed;
3971 /* Visit and value number PHI, return true if the value number
3972 changed. */
3974 static bool
3975 visit_phi (gimple *phi)
3977 tree result, sameval = VN_TOP, seen_undef = NULL_TREE;
3978 unsigned n_executable = 0;
3979 bool allsame = true;
3980 edge_iterator ei;
3981 edge e;
3983 /* TODO: We could check for this in init_sccvn, and replace this
3984 with a gcc_assert. */
3985 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3986 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3988 /* See if all non-TOP arguments have the same value. TOP is
3989 equivalent to everything, so we can ignore it. */
3990 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3991 if (e->flags & EDGE_EXECUTABLE)
3993 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3995 ++n_executable;
3996 if (TREE_CODE (def) == SSA_NAME)
3997 def = SSA_VAL (def);
3998 if (def == VN_TOP)
4000 /* Ignore undefined defs for sameval but record one. */
4001 else if (TREE_CODE (def) == SSA_NAME
4002 && ssa_undefined_value_p (def, false))
4003 seen_undef = def;
4004 else if (sameval == VN_TOP)
4005 sameval = def;
4006 else if (!expressions_equal_p (def, sameval))
4008 allsame = false;
4009 break;
4014 /* If none of the edges was executable keep the value-number at VN_TOP,
4015 if only a single edge is exectuable use its value. */
4016 if (n_executable <= 1)
4017 result = seen_undef ? seen_undef : sameval;
4018 /* If we saw only undefined values and VN_TOP use one of the
4019 undefined values. */
4020 else if (sameval == VN_TOP)
4021 result = seen_undef ? seen_undef : sameval;
4022 /* First see if it is equivalent to a phi node in this block. We prefer
4023 this as it allows IV elimination - see PRs 66502 and 67167. */
4024 else if ((result = vn_phi_lookup (phi)))
4026 /* If all values are the same use that, unless we've seen undefined
4027 values as well and the value isn't constant.
4028 CCP/copyprop have the same restriction to not remove uninit warnings. */
4029 else if (allsame
4030 && (! seen_undef || is_gimple_min_invariant (sameval)))
4031 result = sameval;
4032 else
4034 result = PHI_RESULT (phi);
4035 /* Only insert PHIs that are varying, for constant value numbers
4036 we mess up equivalences otherwise as we are only comparing
4037 the immediate controlling predicates. */
4038 vn_phi_insert (phi, result);
4041 return set_ssa_val_to (PHI_RESULT (phi), result);
4044 /* Try to simplify RHS using equivalences and constant folding. */
4046 static tree
4047 try_to_simplify (gassign *stmt)
4049 enum tree_code code = gimple_assign_rhs_code (stmt);
4050 tree tem;
4052 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
4053 in this case, there is no point in doing extra work. */
4054 if (code == SSA_NAME)
4055 return NULL_TREE;
4057 /* First try constant folding based on our current lattice. */
4058 mprts_hook = vn_lookup_simplify_result;
4059 mprts_hook_cnt = 9;
4060 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
4061 mprts_hook = NULL;
4062 if (tem
4063 && (TREE_CODE (tem) == SSA_NAME
4064 || is_gimple_min_invariant (tem)))
4065 return tem;
4067 return NULL_TREE;
4070 /* Visit and value number USE, return true if the value number
4071 changed. */
4073 static bool
4074 visit_use (tree use)
4076 bool changed = false;
4077 gimple *stmt = SSA_NAME_DEF_STMT (use);
4079 mark_use_processed (use);
4081 gcc_assert (!SSA_NAME_IN_FREE_LIST (use)
4082 && !SSA_NAME_IS_DEFAULT_DEF (use));
4084 if (dump_file && (dump_flags & TDF_DETAILS))
4086 fprintf (dump_file, "Value numbering ");
4087 print_generic_expr (dump_file, use);
4088 fprintf (dump_file, " stmt = ");
4089 print_gimple_stmt (dump_file, stmt, 0);
4092 if (gimple_code (stmt) == GIMPLE_PHI)
4093 changed = visit_phi (stmt);
4094 else if (gimple_has_volatile_ops (stmt))
4095 changed = defs_to_varying (stmt);
4096 else if (gassign *ass = dyn_cast <gassign *> (stmt))
4098 enum tree_code code = gimple_assign_rhs_code (ass);
4099 tree lhs = gimple_assign_lhs (ass);
4100 tree rhs1 = gimple_assign_rhs1 (ass);
4101 tree simplified;
4103 /* Shortcut for copies. Simplifying copies is pointless,
4104 since we copy the expression and value they represent. */
4105 if (code == SSA_NAME
4106 && TREE_CODE (lhs) == SSA_NAME)
4108 changed = visit_copy (lhs, rhs1);
4109 goto done;
4111 simplified = try_to_simplify (ass);
4112 if (simplified)
4114 if (dump_file && (dump_flags & TDF_DETAILS))
4116 fprintf (dump_file, "RHS ");
4117 print_gimple_expr (dump_file, ass, 0);
4118 fprintf (dump_file, " simplified to ");
4119 print_generic_expr (dump_file, simplified);
4120 fprintf (dump_file, "\n");
4123 /* Setting value numbers to constants will occasionally
4124 screw up phi congruence because constants are not
4125 uniquely associated with a single ssa name that can be
4126 looked up. */
4127 if (simplified
4128 && is_gimple_min_invariant (simplified)
4129 && TREE_CODE (lhs) == SSA_NAME)
4131 changed = set_ssa_val_to (lhs, simplified);
4132 goto done;
4134 else if (simplified
4135 && TREE_CODE (simplified) == SSA_NAME
4136 && TREE_CODE (lhs) == SSA_NAME)
4138 changed = visit_copy (lhs, simplified);
4139 goto done;
4142 if ((TREE_CODE (lhs) == SSA_NAME
4143 /* We can substitute SSA_NAMEs that are live over
4144 abnormal edges with their constant value. */
4145 && !(gimple_assign_copy_p (ass)
4146 && is_gimple_min_invariant (rhs1))
4147 && !(simplified
4148 && is_gimple_min_invariant (simplified))
4149 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4150 /* Stores or copies from SSA_NAMEs that are live over
4151 abnormal edges are a problem. */
4152 || (code == SSA_NAME
4153 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
4154 changed = defs_to_varying (ass);
4155 else if (REFERENCE_CLASS_P (lhs)
4156 || DECL_P (lhs))
4157 changed = visit_reference_op_store (lhs, rhs1, ass);
4158 else if (TREE_CODE (lhs) == SSA_NAME)
4160 if ((gimple_assign_copy_p (ass)
4161 && is_gimple_min_invariant (rhs1))
4162 || (simplified
4163 && is_gimple_min_invariant (simplified)))
4165 if (simplified)
4166 changed = set_ssa_val_to (lhs, simplified);
4167 else
4168 changed = set_ssa_val_to (lhs, rhs1);
4170 else
4172 /* Visit the original statement. */
4173 switch (vn_get_stmt_kind (ass))
4175 case VN_NARY:
4176 changed = visit_nary_op (lhs, ass);
4177 break;
4178 case VN_REFERENCE:
4179 changed = visit_reference_op_load (lhs, rhs1, ass);
4180 break;
4181 default:
4182 changed = defs_to_varying (ass);
4183 break;
4187 else
4188 changed = defs_to_varying (ass);
4190 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
4192 tree lhs = gimple_call_lhs (call_stmt);
4193 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4195 /* Try constant folding based on our current lattice. */
4196 tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
4197 vn_valueize);
4198 if (simplified)
4200 if (dump_file && (dump_flags & TDF_DETAILS))
4202 fprintf (dump_file, "call ");
4203 print_gimple_expr (dump_file, call_stmt, 0);
4204 fprintf (dump_file, " simplified to ");
4205 print_generic_expr (dump_file, simplified);
4206 fprintf (dump_file, "\n");
4209 /* Setting value numbers to constants will occasionally
4210 screw up phi congruence because constants are not
4211 uniquely associated with a single ssa name that can be
4212 looked up. */
4213 if (simplified
4214 && is_gimple_min_invariant (simplified))
4216 changed = set_ssa_val_to (lhs, simplified);
4217 if (gimple_vdef (call_stmt))
4218 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4219 SSA_VAL (gimple_vuse (call_stmt)));
4220 goto done;
4222 else if (simplified
4223 && TREE_CODE (simplified) == SSA_NAME)
4225 changed = visit_copy (lhs, simplified);
4226 if (gimple_vdef (call_stmt))
4227 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4228 SSA_VAL (gimple_vuse (call_stmt)));
4229 goto done;
4231 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4233 changed = defs_to_varying (call_stmt);
4234 goto done;
4238 /* Pick up flags from a devirtualization target. */
4239 tree fn = gimple_call_fn (stmt);
4240 int extra_fnflags = 0;
4241 if (fn && TREE_CODE (fn) == SSA_NAME)
4243 fn = SSA_VAL (fn);
4244 if (TREE_CODE (fn) == ADDR_EXPR
4245 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
4246 extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0));
4248 if (!gimple_call_internal_p (call_stmt)
4249 && (/* Calls to the same function with the same vuse
4250 and the same operands do not necessarily return the same
4251 value, unless they're pure or const. */
4252 ((gimple_call_flags (call_stmt) | extra_fnflags)
4253 & (ECF_PURE | ECF_CONST))
4254 /* If calls have a vdef, subsequent calls won't have
4255 the same incoming vuse. So, if 2 calls with vdef have the
4256 same vuse, we know they're not subsequent.
4257 We can value number 2 calls to the same function with the
4258 same vuse and the same operands which are not subsequent
4259 the same, because there is no code in the program that can
4260 compare the 2 values... */
4261 || (gimple_vdef (call_stmt)
4262 /* ... unless the call returns a pointer which does
4263 not alias with anything else. In which case the
4264 information that the values are distinct are encoded
4265 in the IL. */
4266 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
4267 /* Only perform the following when being called from PRE
4268 which embeds tail merging. */
4269 && default_vn_walk_kind == VN_WALK)))
4270 changed = visit_reference_op_call (lhs, call_stmt);
4271 else
4272 changed = defs_to_varying (call_stmt);
4274 else
4275 changed = defs_to_varying (stmt);
4276 done:
4277 return changed;
4280 /* Compare two operands by reverse postorder index */
4282 static int
4283 compare_ops (const void *pa, const void *pb)
4285 const tree opa = *((const tree *)pa);
4286 const tree opb = *((const tree *)pb);
4287 gimple *opstmta = SSA_NAME_DEF_STMT (opa);
4288 gimple *opstmtb = SSA_NAME_DEF_STMT (opb);
4289 basic_block bba;
4290 basic_block bbb;
4292 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
4293 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4294 else if (gimple_nop_p (opstmta))
4295 return -1;
4296 else if (gimple_nop_p (opstmtb))
4297 return 1;
4299 bba = gimple_bb (opstmta);
4300 bbb = gimple_bb (opstmtb);
4302 if (!bba && !bbb)
4303 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4304 else if (!bba)
4305 return -1;
4306 else if (!bbb)
4307 return 1;
4309 if (bba == bbb)
4311 if (gimple_code (opstmta) == GIMPLE_PHI
4312 && gimple_code (opstmtb) == GIMPLE_PHI)
4313 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4314 else if (gimple_code (opstmta) == GIMPLE_PHI)
4315 return -1;
4316 else if (gimple_code (opstmtb) == GIMPLE_PHI)
4317 return 1;
4318 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
4319 return gimple_uid (opstmta) - gimple_uid (opstmtb);
4320 else
4321 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4323 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
4326 /* Sort an array containing members of a strongly connected component
4327 SCC so that the members are ordered by RPO number.
4328 This means that when the sort is complete, iterating through the
4329 array will give you the members in RPO order. */
4331 static void
4332 sort_scc (vec<tree> scc)
4334 scc.qsort (compare_ops);
4337 /* Insert the no longer used nary ONARY to the hash INFO. */
4339 static void
4340 copy_nary (vn_nary_op_t onary, vn_tables_t info)
4342 size_t size = sizeof_vn_nary_op (onary->length);
4343 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
4344 &info->nary_obstack);
4345 memcpy (nary, onary, size);
4346 vn_nary_op_insert_into (nary, info->nary, false);
4349 /* Insert the no longer used phi OPHI to the hash INFO. */
4351 static void
4352 copy_phi (vn_phi_t ophi, vn_tables_t info)
4354 vn_phi_t phi = info->phis_pool->allocate ();
4355 vn_phi_s **slot;
4356 memcpy (phi, ophi, sizeof (*phi));
4357 ophi->phiargs.create (0);
4358 slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
4359 gcc_assert (!*slot);
4360 *slot = phi;
4363 /* Insert the no longer used reference OREF to the hash INFO. */
4365 static void
4366 copy_reference (vn_reference_t oref, vn_tables_t info)
4368 vn_reference_t ref;
4369 vn_reference_s **slot;
4370 ref = info->references_pool->allocate ();
4371 memcpy (ref, oref, sizeof (*ref));
4372 oref->operands.create (0);
4373 slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
4374 if (*slot)
4375 free_reference (*slot);
4376 *slot = ref;
4379 /* Process a strongly connected component in the SSA graph. */
4381 static void
4382 process_scc (vec<tree> scc)
4384 tree var;
4385 unsigned int i;
4386 unsigned int iterations = 0;
4387 bool changed = true;
4388 vn_nary_op_iterator_type hin;
4389 vn_phi_iterator_type hip;
4390 vn_reference_iterator_type hir;
4391 vn_nary_op_t nary;
4392 vn_phi_t phi;
4393 vn_reference_t ref;
4395 /* If the SCC has a single member, just visit it. */
4396 if (scc.length () == 1)
4398 tree use = scc[0];
4399 if (VN_INFO (use)->use_processed)
4400 return;
4401 /* We need to make sure it doesn't form a cycle itself, which can
4402 happen for self-referential PHI nodes. In that case we would
4403 end up inserting an expression with VN_TOP operands into the
4404 valid table which makes us derive bogus equivalences later.
4405 The cheapest way to check this is to assume it for all PHI nodes. */
4406 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
4407 /* Fallthru to iteration. */ ;
4408 else
4410 visit_use (use);
4411 return;
4415 if (dump_file && (dump_flags & TDF_DETAILS))
4416 print_scc (dump_file, scc);
4418 /* Iterate over the SCC with the optimistic table until it stops
4419 changing. */
4420 current_info = optimistic_info;
4421 while (changed)
4423 changed = false;
4424 iterations++;
4425 if (dump_file && (dump_flags & TDF_DETAILS))
4426 fprintf (dump_file, "Starting iteration %d\n", iterations);
4427 /* As we are value-numbering optimistically we have to
4428 clear the expression tables and the simplified expressions
4429 in each iteration until we converge. */
4430 optimistic_info->nary->empty ();
4431 optimistic_info->phis->empty ();
4432 optimistic_info->references->empty ();
4433 obstack_free (&optimistic_info->nary_obstack, NULL);
4434 gcc_obstack_init (&optimistic_info->nary_obstack);
4435 optimistic_info->phis_pool->release ();
4436 optimistic_info->references_pool->release ();
4437 FOR_EACH_VEC_ELT (scc, i, var)
4438 gcc_assert (!VN_INFO (var)->needs_insertion
4439 && VN_INFO (var)->expr == NULL);
4440 FOR_EACH_VEC_ELT (scc, i, var)
4441 changed |= visit_use (var);
4444 if (dump_file && (dump_flags & TDF_DETAILS))
4445 fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
4446 statistics_histogram_event (cfun, "SCC iterations", iterations);
4448 /* Finally, copy the contents of the no longer used optimistic
4449 table to the valid table. */
4450 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
4451 copy_nary (nary, valid_info);
4452 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
4453 copy_phi (phi, valid_info);
4454 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
4455 ref, vn_reference_t, hir)
4456 copy_reference (ref, valid_info);
4458 current_info = valid_info;
4462 /* Pop the components of the found SCC for NAME off the SCC stack
4463 and process them. Returns true if all went well, false if
4464 we run into resource limits. */
4466 static void
4467 extract_and_process_scc_for_name (tree name)
4469 auto_vec<tree> scc;
4470 tree x;
4472 /* Found an SCC, pop the components off the SCC stack and
4473 process them. */
4476 x = sccstack.pop ();
4478 VN_INFO (x)->on_sccstack = false;
4479 scc.safe_push (x);
4480 } while (x != name);
4482 /* Drop all defs in the SCC to varying in case a SCC turns out to be
4483 incredibly large.
4484 ??? Just switch to a non-optimistic mode that avoids any iteration. */
4485 if (scc.length () > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
4487 if (dump_file)
4489 print_scc (dump_file, scc);
4490 fprintf (dump_file, "WARNING: Giving up value-numbering SCC due to "
4491 "size %u exceeding %u\n", scc.length (),
4492 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
4494 tree var;
4495 unsigned i;
4496 FOR_EACH_VEC_ELT (scc, i, var)
4498 gimple *def = SSA_NAME_DEF_STMT (var);
4499 mark_use_processed (var);
4500 if (SSA_NAME_IS_DEFAULT_DEF (var)
4501 || gimple_code (def) == GIMPLE_PHI)
4502 set_ssa_val_to (var, var);
4503 else
4504 defs_to_varying (def);
4506 return;
4509 if (scc.length () > 1)
4510 sort_scc (scc);
4512 process_scc (scc);
4515 /* Depth first search on NAME to discover and process SCC's in the SSA
4516 graph.
4517 Execution of this algorithm relies on the fact that the SCC's are
4518 popped off the stack in topological order.
4519 Returns true if successful, false if we stopped processing SCC's due
4520 to resource constraints. */
4522 static void
4523 DFS (tree name)
4525 auto_vec<ssa_op_iter> itervec;
4526 auto_vec<tree> namevec;
4527 use_operand_p usep = NULL;
4528 gimple *defstmt;
4529 tree use;
4530 ssa_op_iter iter;
4532 start_over:
4533 /* SCC info */
4534 VN_INFO (name)->dfsnum = next_dfs_num++;
4535 VN_INFO (name)->visited = true;
4536 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
4538 sccstack.safe_push (name);
4539 VN_INFO (name)->on_sccstack = true;
4540 defstmt = SSA_NAME_DEF_STMT (name);
4542 /* Recursively DFS on our operands, looking for SCC's. */
4543 if (!gimple_nop_p (defstmt))
4545 /* Push a new iterator. */
4546 if (gphi *phi = dyn_cast <gphi *> (defstmt))
4547 usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
4548 else
4549 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
4551 else
4552 clear_and_done_ssa_iter (&iter);
4554 while (1)
4556 /* If we are done processing uses of a name, go up the stack
4557 of iterators and process SCCs as we found them. */
4558 if (op_iter_done (&iter))
4560 /* See if we found an SCC. */
4561 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
4562 extract_and_process_scc_for_name (name);
4564 /* Check if we are done. */
4565 if (namevec.is_empty ())
4566 return;
4568 /* Restore the last use walker and continue walking there. */
4569 use = name;
4570 name = namevec.pop ();
4571 memcpy (&iter, &itervec.last (),
4572 sizeof (ssa_op_iter));
4573 itervec.pop ();
4574 goto continue_walking;
4577 use = USE_FROM_PTR (usep);
4579 /* Since we handle phi nodes, we will sometimes get
4580 invariants in the use expression. */
4581 if (TREE_CODE (use) == SSA_NAME)
4583 if (! (VN_INFO (use)->visited))
4585 /* Recurse by pushing the current use walking state on
4586 the stack and starting over. */
4587 itervec.safe_push (iter);
4588 namevec.safe_push (name);
4589 name = use;
4590 goto start_over;
4592 continue_walking:
4593 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
4594 VN_INFO (use)->low);
4596 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
4597 && VN_INFO (use)->on_sccstack)
4599 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
4600 VN_INFO (name)->low);
4604 usep = op_iter_next_use (&iter);
4608 /* Allocate a value number table. */
4610 static void
4611 allocate_vn_table (vn_tables_t table)
4613 table->phis = new vn_phi_table_type (23);
4614 table->nary = new vn_nary_op_table_type (23);
4615 table->references = new vn_reference_table_type (23);
4617 gcc_obstack_init (&table->nary_obstack);
4618 table->phis_pool = new object_allocator<vn_phi_s> ("VN phis");
4619 table->references_pool = new object_allocator<vn_reference_s>
4620 ("VN references");
4623 /* Free a value number table. */
4625 static void
4626 free_vn_table (vn_tables_t table)
4628 delete table->phis;
4629 table->phis = NULL;
4630 delete table->nary;
4631 table->nary = NULL;
4632 delete table->references;
4633 table->references = NULL;
4634 obstack_free (&table->nary_obstack, NULL);
4635 delete table->phis_pool;
4636 delete table->references_pool;
4639 static void
4640 init_scc_vn (void)
4642 int j;
4643 int *rpo_numbers_temp;
4645 calculate_dominance_info (CDI_DOMINATORS);
4646 mark_dfs_back_edges ();
4648 sccstack.create (0);
4649 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
4651 constant_value_ids = BITMAP_ALLOC (NULL);
4653 next_dfs_num = 1;
4654 next_value_id = 1;
4656 vn_ssa_aux_table.create (num_ssa_names + 1);
4657 /* VEC_alloc doesn't actually grow it to the right size, it just
4658 preallocates the space to do so. */
4659 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
4660 gcc_obstack_init (&vn_ssa_aux_obstack);
4662 shared_lookup_phiargs.create (0);
4663 shared_lookup_references.create (0);
4664 rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
4665 rpo_numbers_temp =
4666 XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
4667 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4669 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4670 the i'th block in RPO order is bb. We want to map bb's to RPO
4671 numbers, so we need to rearrange this array. */
4672 for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
4673 rpo_numbers[rpo_numbers_temp[j]] = j;
4675 XDELETE (rpo_numbers_temp);
4677 VN_TOP = build_decl (UNKNOWN_LOCATION, VAR_DECL,
4678 get_identifier ("VN_TOP"), error_mark_node);
4680 renumber_gimple_stmt_uids ();
4682 /* Create the valid and optimistic value numbering tables. */
4683 valid_info = XCNEW (struct vn_tables_s);
4684 allocate_vn_table (valid_info);
4685 optimistic_info = XCNEW (struct vn_tables_s);
4686 allocate_vn_table (optimistic_info);
4687 current_info = valid_info;
4689 /* Create the VN_INFO structures, and initialize value numbers to
4690 TOP or VARYING for parameters. */
4691 size_t i;
4692 tree name;
4694 FOR_EACH_SSA_NAME (i, name, cfun)
4696 VN_INFO_GET (name)->valnum = VN_TOP;
4697 VN_INFO (name)->needs_insertion = false;
4698 VN_INFO (name)->expr = NULL;
4699 VN_INFO (name)->value_id = 0;
4701 if (!SSA_NAME_IS_DEFAULT_DEF (name))
4702 continue;
4704 switch (TREE_CODE (SSA_NAME_VAR (name)))
4706 case VAR_DECL:
4707 /* All undefined vars are VARYING. */
4708 VN_INFO (name)->valnum = name;
4709 VN_INFO (name)->visited = true;
4710 break;
4712 case PARM_DECL:
4713 /* Parameters are VARYING but we can record a condition
4714 if we know it is a non-NULL pointer. */
4715 VN_INFO (name)->visited = true;
4716 VN_INFO (name)->valnum = name;
4717 if (POINTER_TYPE_P (TREE_TYPE (name))
4718 && nonnull_arg_p (SSA_NAME_VAR (name)))
4720 tree ops[2];
4721 ops[0] = name;
4722 ops[1] = build_int_cst (TREE_TYPE (name), 0);
4723 vn_nary_op_insert_pieces (2, NE_EXPR, boolean_type_node, ops,
4724 boolean_true_node, 0);
4725 if (dump_file && (dump_flags & TDF_DETAILS))
4727 fprintf (dump_file, "Recording ");
4728 print_generic_expr (dump_file, name, TDF_SLIM);
4729 fprintf (dump_file, " != 0\n");
4732 break;
4734 case RESULT_DECL:
4735 /* If the result is passed by invisible reference the default
4736 def is initialized, otherwise it's uninitialized. Still
4737 undefined is varying. */
4738 VN_INFO (name)->visited = true;
4739 VN_INFO (name)->valnum = name;
4740 break;
4742 default:
4743 gcc_unreachable ();
4748 /* Restore SSA info that has been reset on value leaders. */
4750 void
4751 scc_vn_restore_ssa_info (void)
4753 unsigned i;
4754 tree name;
4756 FOR_EACH_SSA_NAME (i, name, cfun)
4758 if (has_VN_INFO (name))
4760 if (VN_INFO (name)->needs_insertion)
4762 else if (POINTER_TYPE_P (TREE_TYPE (name))
4763 && VN_INFO (name)->info.ptr_info)
4764 SSA_NAME_PTR_INFO (name) = VN_INFO (name)->info.ptr_info;
4765 else if (INTEGRAL_TYPE_P (TREE_TYPE (name))
4766 && VN_INFO (name)->info.range_info)
4768 SSA_NAME_RANGE_INFO (name) = VN_INFO (name)->info.range_info;
4769 SSA_NAME_ANTI_RANGE_P (name)
4770 = VN_INFO (name)->range_info_anti_range_p;
4776 void
4777 free_scc_vn (void)
4779 size_t i;
4780 tree name;
4782 delete constant_to_value_id;
4783 constant_to_value_id = NULL;
4784 BITMAP_FREE (constant_value_ids);
4785 shared_lookup_phiargs.release ();
4786 shared_lookup_references.release ();
4787 XDELETEVEC (rpo_numbers);
4789 FOR_EACH_SSA_NAME (i, name, cfun)
4791 if (has_VN_INFO (name)
4792 && VN_INFO (name)->needs_insertion)
4793 release_ssa_name (name);
4795 obstack_free (&vn_ssa_aux_obstack, NULL);
4796 vn_ssa_aux_table.release ();
4798 sccstack.release ();
4799 free_vn_table (valid_info);
4800 XDELETE (valid_info);
4801 free_vn_table (optimistic_info);
4802 XDELETE (optimistic_info);
4804 BITMAP_FREE (const_parms);
4807 /* Set *ID according to RESULT. */
4809 static void
4810 set_value_id_for_result (tree result, unsigned int *id)
4812 if (result && TREE_CODE (result) == SSA_NAME)
4813 *id = VN_INFO (result)->value_id;
4814 else if (result && is_gimple_min_invariant (result))
4815 *id = get_or_alloc_constant_value_id (result);
4816 else
4817 *id = get_next_value_id ();
4820 /* Set the value ids in the valid hash tables. */
4822 static void
4823 set_hashtable_value_ids (void)
4825 vn_nary_op_iterator_type hin;
4826 vn_phi_iterator_type hip;
4827 vn_reference_iterator_type hir;
4828 vn_nary_op_t vno;
4829 vn_reference_t vr;
4830 vn_phi_t vp;
4832 /* Now set the value ids of the things we had put in the hash
4833 table. */
4835 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4836 set_value_id_for_result (vno->result, &vno->value_id);
4838 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4839 set_value_id_for_result (vp->result, &vp->value_id);
4841 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4842 hir)
4843 set_value_id_for_result (vr->result, &vr->value_id);
4846 class sccvn_dom_walker : public dom_walker
4848 public:
4849 sccvn_dom_walker ()
4850 : dom_walker (CDI_DOMINATORS, REACHABLE_BLOCKS), cond_stack (0) {}
4852 virtual edge before_dom_children (basic_block);
4853 virtual void after_dom_children (basic_block);
4855 void record_cond (basic_block,
4856 enum tree_code code, tree lhs, tree rhs, bool value);
4857 void record_conds (basic_block,
4858 enum tree_code code, tree lhs, tree rhs, bool value);
4860 auto_vec<std::pair <basic_block, std::pair <vn_nary_op_t, vn_nary_op_t> > >
4861 cond_stack;
4864 /* Record a temporary condition for the BB and its dominated blocks. */
4866 void
4867 sccvn_dom_walker::record_cond (basic_block bb,
4868 enum tree_code code, tree lhs, tree rhs,
4869 bool value)
4871 tree ops[2] = { lhs, rhs };
4872 vn_nary_op_t old = NULL;
4873 if (vn_nary_op_lookup_pieces (2, code, boolean_type_node, ops, &old))
4874 current_info->nary->remove_elt_with_hash (old, old->hashcode);
4875 vn_nary_op_t cond
4876 = vn_nary_op_insert_pieces (2, code, boolean_type_node, ops,
4877 value
4878 ? boolean_true_node
4879 : boolean_false_node, 0);
4880 if (dump_file && (dump_flags & TDF_DETAILS))
4882 fprintf (dump_file, "Recording temporarily ");
4883 print_generic_expr (dump_file, ops[0], TDF_SLIM);
4884 fprintf (dump_file, " %s ", get_tree_code_name (code));
4885 print_generic_expr (dump_file, ops[1], TDF_SLIM);
4886 fprintf (dump_file, " == %s%s\n",
4887 value ? "true" : "false",
4888 old ? " (old entry saved)" : "");
4890 cond_stack.safe_push (std::make_pair (bb, std::make_pair (cond, old)));
4893 /* Record temporary conditions for the BB and its dominated blocks
4894 according to LHS CODE RHS == VALUE and its dominated conditions. */
4896 void
4897 sccvn_dom_walker::record_conds (basic_block bb,
4898 enum tree_code code, tree lhs, tree rhs,
4899 bool value)
4901 /* Record the original condition. */
4902 record_cond (bb, code, lhs, rhs, value);
4904 if (!value)
4905 return;
4907 /* Record dominated conditions if the condition is true. Note that
4908 the inversion is already recorded. */
4909 switch (code)
4911 case LT_EXPR:
4912 case GT_EXPR:
4913 record_cond (bb, code == LT_EXPR ? LE_EXPR : GE_EXPR, lhs, rhs, true);
4914 record_cond (bb, NE_EXPR, lhs, rhs, true);
4915 record_cond (bb, EQ_EXPR, lhs, rhs, false);
4916 break;
4918 case EQ_EXPR:
4919 record_cond (bb, LE_EXPR, lhs, rhs, true);
4920 record_cond (bb, GE_EXPR, lhs, rhs, true);
4921 record_cond (bb, LT_EXPR, lhs, rhs, false);
4922 record_cond (bb, GT_EXPR, lhs, rhs, false);
4923 break;
4925 default:
4926 break;
4930 /* Restore expressions and values derived from conditionals. */
4932 void
4933 sccvn_dom_walker::after_dom_children (basic_block bb)
4935 while (!cond_stack.is_empty ()
4936 && cond_stack.last ().first == bb)
4938 vn_nary_op_t cond = cond_stack.last ().second.first;
4939 vn_nary_op_t old = cond_stack.last ().second.second;
4940 current_info->nary->remove_elt_with_hash (cond, cond->hashcode);
4941 if (old)
4942 vn_nary_op_insert_into (old, current_info->nary, false);
4943 cond_stack.pop ();
4947 /* Value number all statements in BB. */
4949 edge
4950 sccvn_dom_walker::before_dom_children (basic_block bb)
4952 if (dump_file && (dump_flags & TDF_DETAILS))
4953 fprintf (dump_file, "Visiting BB %d\n", bb->index);
4955 /* If we have a single predecessor record the equivalence from a
4956 possible condition on the predecessor edge. */
4957 edge pred_e = single_pred_edge_ignoring_loop_edges (bb, false);
4958 if (pred_e)
4960 /* Check if there are multiple executable successor edges in
4961 the source block. Otherwise there is no additional info
4962 to be recorded. */
4963 edge_iterator ei;
4964 edge e2;
4965 FOR_EACH_EDGE (e2, ei, pred_e->src->succs)
4966 if (e2 != pred_e
4967 && e2->flags & EDGE_EXECUTABLE)
4968 break;
4969 if (e2 && (e2->flags & EDGE_EXECUTABLE))
4971 gimple *stmt = last_stmt (pred_e->src);
4972 if (stmt
4973 && gimple_code (stmt) == GIMPLE_COND)
4975 enum tree_code code = gimple_cond_code (stmt);
4976 tree lhs = gimple_cond_lhs (stmt);
4977 tree rhs = gimple_cond_rhs (stmt);
4978 record_conds (bb, code, lhs, rhs,
4979 (pred_e->flags & EDGE_TRUE_VALUE) != 0);
4980 code = invert_tree_comparison (code, HONOR_NANS (lhs));
4981 if (code != ERROR_MARK)
4982 record_conds (bb, code, lhs, rhs,
4983 (pred_e->flags & EDGE_TRUE_VALUE) == 0);
4988 /* Value-number all defs in the basic-block. */
4989 for (gphi_iterator gsi = gsi_start_phis (bb);
4990 !gsi_end_p (gsi); gsi_next (&gsi))
4992 gphi *phi = gsi.phi ();
4993 tree res = PHI_RESULT (phi);
4994 if (!VN_INFO (res)->visited)
4995 DFS (res);
4997 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
4998 !gsi_end_p (gsi); gsi_next (&gsi))
5000 ssa_op_iter i;
5001 tree op;
5002 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
5003 if (!VN_INFO (op)->visited)
5004 DFS (op);
5007 /* Finally look at the last stmt. */
5008 gimple *stmt = last_stmt (bb);
5009 if (!stmt)
5010 return NULL;
5012 enum gimple_code code = gimple_code (stmt);
5013 if (code != GIMPLE_COND
5014 && code != GIMPLE_SWITCH
5015 && code != GIMPLE_GOTO)
5016 return NULL;
5018 if (dump_file && (dump_flags & TDF_DETAILS))
5020 fprintf (dump_file, "Visiting control stmt ending BB %d: ", bb->index);
5021 print_gimple_stmt (dump_file, stmt, 0);
5024 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
5025 if value-numbering can prove they are not reachable. Handling
5026 computed gotos is also possible. */
5027 tree val;
5028 switch (code)
5030 case GIMPLE_COND:
5032 tree lhs = vn_valueize (gimple_cond_lhs (stmt));
5033 tree rhs = vn_valueize (gimple_cond_rhs (stmt));
5034 val = gimple_simplify (gimple_cond_code (stmt),
5035 boolean_type_node, lhs, rhs,
5036 NULL, vn_valueize);
5037 /* If that didn't simplify to a constant see if we have recorded
5038 temporary expressions from taken edges. */
5039 if (!val || TREE_CODE (val) != INTEGER_CST)
5041 tree ops[2];
5042 ops[0] = lhs;
5043 ops[1] = rhs;
5044 val = vn_nary_op_lookup_pieces (2, gimple_cond_code (stmt),
5045 boolean_type_node, ops, NULL);
5047 break;
5049 case GIMPLE_SWITCH:
5050 val = gimple_switch_index (as_a <gswitch *> (stmt));
5051 break;
5052 case GIMPLE_GOTO:
5053 val = gimple_goto_dest (stmt);
5054 break;
5055 default:
5056 gcc_unreachable ();
5058 if (!val)
5059 return NULL;
5061 edge taken = find_taken_edge (bb, vn_valueize (val));
5062 if (!taken)
5063 return NULL;
5065 if (dump_file && (dump_flags & TDF_DETAILS))
5066 fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
5067 "not executable\n", bb->index, bb->index, taken->dest->index);
5069 return taken;
5072 /* Do SCCVN. Returns true if it finished, false if we bailed out
5073 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
5074 how we use the alias oracle walking during the VN process. */
5076 void
5077 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
5079 size_t i;
5081 default_vn_walk_kind = default_vn_walk_kind_;
5083 init_scc_vn ();
5085 /* Collect pointers we know point to readonly memory. */
5086 const_parms = BITMAP_ALLOC (NULL);
5087 tree fnspec = lookup_attribute ("fn spec",
5088 TYPE_ATTRIBUTES (TREE_TYPE (cfun->decl)));
5089 if (fnspec)
5091 fnspec = TREE_VALUE (TREE_VALUE (fnspec));
5092 i = 1;
5093 for (tree arg = DECL_ARGUMENTS (cfun->decl);
5094 arg; arg = DECL_CHAIN (arg), ++i)
5096 if (i >= (unsigned) TREE_STRING_LENGTH (fnspec))
5097 break;
5098 if (TREE_STRING_POINTER (fnspec)[i] == 'R'
5099 || TREE_STRING_POINTER (fnspec)[i] == 'r')
5101 tree name = ssa_default_def (cfun, arg);
5102 if (name)
5103 bitmap_set_bit (const_parms, SSA_NAME_VERSION (name));
5108 /* Walk all blocks in dominator order, value-numbering stmts
5109 SSA defs and decide whether outgoing edges are not executable. */
5110 sccvn_dom_walker walker;
5111 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5113 /* Initialize the value ids and prune out remaining VN_TOPs
5114 from dead code. */
5115 tree name;
5116 FOR_EACH_SSA_NAME (i, name, cfun)
5118 vn_ssa_aux_t info = VN_INFO (name);
5119 if (!info->visited
5120 || info->valnum == VN_TOP)
5121 info->valnum = name;
5122 if (info->valnum == name)
5123 info->value_id = get_next_value_id ();
5124 else if (is_gimple_min_invariant (info->valnum))
5125 info->value_id = get_or_alloc_constant_value_id (info->valnum);
5128 /* Propagate. */
5129 FOR_EACH_SSA_NAME (i, name, cfun)
5131 vn_ssa_aux_t info = VN_INFO (name);
5132 if (TREE_CODE (info->valnum) == SSA_NAME
5133 && info->valnum != name
5134 && info->value_id != VN_INFO (info->valnum)->value_id)
5135 info->value_id = VN_INFO (info->valnum)->value_id;
5138 set_hashtable_value_ids ();
5140 if (dump_file && (dump_flags & TDF_DETAILS))
5142 fprintf (dump_file, "Value numbers:\n");
5143 FOR_EACH_SSA_NAME (i, name, cfun)
5145 if (VN_INFO (name)->visited
5146 && SSA_VAL (name) != name)
5148 print_generic_expr (dump_file, name);
5149 fprintf (dump_file, " = ");
5150 print_generic_expr (dump_file, SSA_VAL (name));
5151 fprintf (dump_file, "\n");
5157 /* Return the maximum value id we have ever seen. */
5159 unsigned int
5160 get_max_value_id (void)
5162 return next_value_id;
5165 /* Return the next unique value id. */
5167 unsigned int
5168 get_next_value_id (void)
5170 return next_value_id++;
5174 /* Compare two expressions E1 and E2 and return true if they are equal. */
5176 bool
5177 expressions_equal_p (tree e1, tree e2)
5179 /* The obvious case. */
5180 if (e1 == e2)
5181 return true;
5183 /* If either one is VN_TOP consider them equal. */
5184 if (e1 == VN_TOP || e2 == VN_TOP)
5185 return true;
5187 /* If only one of them is null, they cannot be equal. */
5188 if (!e1 || !e2)
5189 return false;
5191 /* Now perform the actual comparison. */
5192 if (TREE_CODE (e1) == TREE_CODE (e2)
5193 && operand_equal_p (e1, e2, OEP_PURE_SAME))
5194 return true;
5196 return false;
5200 /* Return true if the nary operation NARY may trap. This is a copy
5201 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
5203 bool
5204 vn_nary_may_trap (vn_nary_op_t nary)
5206 tree type;
5207 tree rhs2 = NULL_TREE;
5208 bool honor_nans = false;
5209 bool honor_snans = false;
5210 bool fp_operation = false;
5211 bool honor_trapv = false;
5212 bool handled, ret;
5213 unsigned i;
5215 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
5216 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
5217 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
5219 type = nary->type;
5220 fp_operation = FLOAT_TYPE_P (type);
5221 if (fp_operation)
5223 honor_nans = flag_trapping_math && !flag_finite_math_only;
5224 honor_snans = flag_signaling_nans != 0;
5226 else if (INTEGRAL_TYPE_P (type)
5227 && TYPE_OVERFLOW_TRAPS (type))
5228 honor_trapv = true;
5230 if (nary->length >= 2)
5231 rhs2 = nary->op[1];
5232 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
5233 honor_trapv,
5234 honor_nans, honor_snans, rhs2,
5235 &handled);
5236 if (handled
5237 && ret)
5238 return true;
5240 for (i = 0; i < nary->length; ++i)
5241 if (tree_could_trap_p (nary->op[i]))
5242 return true;
5244 return false;
5248 class eliminate_dom_walker : public dom_walker
5250 public:
5251 eliminate_dom_walker (cdi_direction, bitmap);
5252 ~eliminate_dom_walker ();
5254 virtual edge before_dom_children (basic_block);
5255 virtual void after_dom_children (basic_block);
5257 tree eliminate_avail (tree op);
5258 void eliminate_push_avail (tree op);
5259 tree eliminate_insert (gimple_stmt_iterator *gsi, tree val);
5261 bool do_pre;
5262 unsigned int el_todo;
5263 unsigned int eliminations;
5264 unsigned int insertions;
5266 /* SSA names that had their defs inserted by PRE if do_pre. */
5267 bitmap inserted_exprs;
5269 /* Blocks with statements that have had their EH properties changed. */
5270 bitmap need_eh_cleanup;
5272 /* Blocks with statements that have had their AB properties changed. */
5273 bitmap need_ab_cleanup;
5275 auto_vec<gimple *> to_remove;
5276 auto_vec<gimple *> to_fixup;
5277 auto_vec<tree> avail;
5278 auto_vec<tree> avail_stack;
5281 eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction,
5282 bitmap inserted_exprs_)
5283 : dom_walker (direction), do_pre (inserted_exprs_ != NULL),
5284 el_todo (0), eliminations (0), insertions (0),
5285 inserted_exprs (inserted_exprs_)
5287 need_eh_cleanup = BITMAP_ALLOC (NULL);
5288 need_ab_cleanup = BITMAP_ALLOC (NULL);
5291 eliminate_dom_walker::~eliminate_dom_walker ()
5293 BITMAP_FREE (need_eh_cleanup);
5294 BITMAP_FREE (need_ab_cleanup);
5297 /* Return a leader for OP that is available at the current point of the
5298 eliminate domwalk. */
5300 tree
5301 eliminate_dom_walker::eliminate_avail (tree op)
5303 tree valnum = VN_INFO (op)->valnum;
5304 if (TREE_CODE (valnum) == SSA_NAME)
5306 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
5307 return valnum;
5308 if (avail.length () > SSA_NAME_VERSION (valnum))
5309 return avail[SSA_NAME_VERSION (valnum)];
5311 else if (is_gimple_min_invariant (valnum))
5312 return valnum;
5313 return NULL_TREE;
5316 /* At the current point of the eliminate domwalk make OP available. */
5318 void
5319 eliminate_dom_walker::eliminate_push_avail (tree op)
5321 tree valnum = VN_INFO (op)->valnum;
5322 if (TREE_CODE (valnum) == SSA_NAME)
5324 if (avail.length () <= SSA_NAME_VERSION (valnum))
5325 avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1);
5326 tree pushop = op;
5327 if (avail[SSA_NAME_VERSION (valnum)])
5328 pushop = avail[SSA_NAME_VERSION (valnum)];
5329 avail_stack.safe_push (pushop);
5330 avail[SSA_NAME_VERSION (valnum)] = op;
5334 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
5335 the leader for the expression if insertion was successful. */
5337 tree
5338 eliminate_dom_walker::eliminate_insert (gimple_stmt_iterator *gsi, tree val)
5340 /* We can insert a sequence with a single assignment only. */
5341 gimple_seq stmts = VN_INFO (val)->expr;
5342 if (!gimple_seq_singleton_p (stmts))
5343 return NULL_TREE;
5344 gassign *stmt = dyn_cast <gassign *> (gimple_seq_first_stmt (stmts));
5345 if (!stmt
5346 || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
5347 && gimple_assign_rhs_code (stmt) != VIEW_CONVERT_EXPR
5348 && gimple_assign_rhs_code (stmt) != BIT_FIELD_REF
5349 && (gimple_assign_rhs_code (stmt) != BIT_AND_EXPR
5350 || TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)))
5351 return NULL_TREE;
5353 tree op = gimple_assign_rhs1 (stmt);
5354 if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR
5355 || gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5356 op = TREE_OPERAND (op, 0);
5357 tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (op) : op;
5358 if (!leader)
5359 return NULL_TREE;
5361 tree res;
5362 stmts = NULL;
5363 if (gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5364 res = gimple_build (&stmts, BIT_FIELD_REF,
5365 TREE_TYPE (val), leader,
5366 TREE_OPERAND (gimple_assign_rhs1 (stmt), 1),
5367 TREE_OPERAND (gimple_assign_rhs1 (stmt), 2));
5368 else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
5369 res = gimple_build (&stmts, BIT_AND_EXPR,
5370 TREE_TYPE (val), leader, gimple_assign_rhs2 (stmt));
5371 else
5372 res = gimple_build (&stmts, gimple_assign_rhs_code (stmt),
5373 TREE_TYPE (val), leader);
5374 if (TREE_CODE (res) != SSA_NAME
5375 || SSA_NAME_IS_DEFAULT_DEF (res)
5376 || gimple_bb (SSA_NAME_DEF_STMT (res)))
5378 gimple_seq_discard (stmts);
5380 /* During propagation we have to treat SSA info conservatively
5381 and thus we can end up simplifying the inserted expression
5382 at elimination time to sth not defined in stmts. */
5383 /* But then this is a redundancy we failed to detect. Which means
5384 res now has two values. That doesn't play well with how
5385 we track availability here, so give up. */
5386 if (dump_file && (dump_flags & TDF_DETAILS))
5388 if (TREE_CODE (res) == SSA_NAME)
5389 res = eliminate_avail (res);
5390 if (res)
5392 fprintf (dump_file, "Failed to insert expression for value ");
5393 print_generic_expr (dump_file, val);
5394 fprintf (dump_file, " which is really fully redundant to ");
5395 print_generic_expr (dump_file, res);
5396 fprintf (dump_file, "\n");
5400 return NULL_TREE;
5402 else
5404 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
5405 VN_INFO_GET (res)->valnum = val;
5408 insertions++;
5409 if (dump_file && (dump_flags & TDF_DETAILS))
5411 fprintf (dump_file, "Inserted ");
5412 print_gimple_stmt (dump_file, SSA_NAME_DEF_STMT (res), 0);
5415 return res;
5420 /* Perform elimination for the basic-block B during the domwalk. */
5422 edge
5423 eliminate_dom_walker::before_dom_children (basic_block b)
5425 /* Mark new bb. */
5426 avail_stack.safe_push (NULL_TREE);
5428 /* Skip unreachable blocks marked unreachable during the SCCVN domwalk. */
5429 edge_iterator ei;
5430 edge e;
5431 FOR_EACH_EDGE (e, ei, b->preds)
5432 if (e->flags & EDGE_EXECUTABLE)
5433 break;
5434 if (! e)
5435 return NULL;
5437 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
5439 gphi *phi = gsi.phi ();
5440 tree res = PHI_RESULT (phi);
5442 if (virtual_operand_p (res))
5444 gsi_next (&gsi);
5445 continue;
5448 tree sprime = eliminate_avail (res);
5449 if (sprime
5450 && sprime != res)
5452 if (dump_file && (dump_flags & TDF_DETAILS))
5454 fprintf (dump_file, "Replaced redundant PHI node defining ");
5455 print_generic_expr (dump_file, res);
5456 fprintf (dump_file, " with ");
5457 print_generic_expr (dump_file, sprime);
5458 fprintf (dump_file, "\n");
5461 /* If we inserted this PHI node ourself, it's not an elimination. */
5462 if (! inserted_exprs
5463 || ! bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
5464 eliminations++;
5466 /* If we will propagate into all uses don't bother to do
5467 anything. */
5468 if (may_propagate_copy (res, sprime))
5470 /* Mark the PHI for removal. */
5471 to_remove.safe_push (phi);
5472 gsi_next (&gsi);
5473 continue;
5476 remove_phi_node (&gsi, false);
5478 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
5479 sprime = fold_convert (TREE_TYPE (res), sprime);
5480 gimple *stmt = gimple_build_assign (res, sprime);
5481 gimple_stmt_iterator gsi2 = gsi_after_labels (b);
5482 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
5483 continue;
5486 eliminate_push_avail (res);
5487 gsi_next (&gsi);
5490 for (gimple_stmt_iterator gsi = gsi_start_bb (b);
5491 !gsi_end_p (gsi);
5492 gsi_next (&gsi))
5494 tree sprime = NULL_TREE;
5495 gimple *stmt = gsi_stmt (gsi);
5496 tree lhs = gimple_get_lhs (stmt);
5497 if (lhs && TREE_CODE (lhs) == SSA_NAME
5498 && !gimple_has_volatile_ops (stmt)
5499 /* See PR43491. Do not replace a global register variable when
5500 it is a the RHS of an assignment. Do replace local register
5501 variables since gcc does not guarantee a local variable will
5502 be allocated in register.
5503 ??? The fix isn't effective here. This should instead
5504 be ensured by not value-numbering them the same but treating
5505 them like volatiles? */
5506 && !(gimple_assign_single_p (stmt)
5507 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
5508 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))
5509 && is_global_var (gimple_assign_rhs1 (stmt)))))
5511 sprime = eliminate_avail (lhs);
5512 if (!sprime)
5514 /* If there is no existing usable leader but SCCVN thinks
5515 it has an expression it wants to use as replacement,
5516 insert that. */
5517 tree val = VN_INFO (lhs)->valnum;
5518 if (val != VN_TOP
5519 && TREE_CODE (val) == SSA_NAME
5520 && VN_INFO (val)->needs_insertion
5521 && VN_INFO (val)->expr != NULL
5522 && (sprime = eliminate_insert (&gsi, val)) != NULL_TREE)
5523 eliminate_push_avail (sprime);
5526 /* If this now constitutes a copy duplicate points-to
5527 and range info appropriately. This is especially
5528 important for inserted code. See tree-ssa-copy.c
5529 for similar code. */
5530 if (sprime
5531 && TREE_CODE (sprime) == SSA_NAME)
5533 basic_block sprime_b = gimple_bb (SSA_NAME_DEF_STMT (sprime));
5534 if (POINTER_TYPE_P (TREE_TYPE (lhs))
5535 && VN_INFO_PTR_INFO (lhs)
5536 && ! VN_INFO_PTR_INFO (sprime))
5538 duplicate_ssa_name_ptr_info (sprime,
5539 VN_INFO_PTR_INFO (lhs));
5540 if (b != sprime_b)
5541 mark_ptr_info_alignment_unknown
5542 (SSA_NAME_PTR_INFO (sprime));
5544 else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
5545 && VN_INFO_RANGE_INFO (lhs)
5546 && ! VN_INFO_RANGE_INFO (sprime)
5547 && b == sprime_b)
5548 duplicate_ssa_name_range_info (sprime,
5549 VN_INFO_RANGE_TYPE (lhs),
5550 VN_INFO_RANGE_INFO (lhs));
5553 /* Inhibit the use of an inserted PHI on a loop header when
5554 the address of the memory reference is a simple induction
5555 variable. In other cases the vectorizer won't do anything
5556 anyway (either it's loop invariant or a complicated
5557 expression). */
5558 if (sprime
5559 && TREE_CODE (sprime) == SSA_NAME
5560 && do_pre
5561 && (flag_tree_loop_vectorize || flag_tree_parallelize_loops > 1)
5562 && loop_outer (b->loop_father)
5563 && has_zero_uses (sprime)
5564 && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
5565 && gimple_assign_load_p (stmt))
5567 gimple *def_stmt = SSA_NAME_DEF_STMT (sprime);
5568 basic_block def_bb = gimple_bb (def_stmt);
5569 if (gimple_code (def_stmt) == GIMPLE_PHI
5570 && def_bb->loop_father->header == def_bb)
5572 loop_p loop = def_bb->loop_father;
5573 ssa_op_iter iter;
5574 tree op;
5575 bool found = false;
5576 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5578 affine_iv iv;
5579 def_bb = gimple_bb (SSA_NAME_DEF_STMT (op));
5580 if (def_bb
5581 && flow_bb_inside_loop_p (loop, def_bb)
5582 && simple_iv (loop, loop, op, &iv, true))
5584 found = true;
5585 break;
5588 if (found)
5590 if (dump_file && (dump_flags & TDF_DETAILS))
5592 fprintf (dump_file, "Not replacing ");
5593 print_gimple_expr (dump_file, stmt, 0);
5594 fprintf (dump_file, " with ");
5595 print_generic_expr (dump_file, sprime);
5596 fprintf (dump_file, " which would add a loop"
5597 " carried dependence to loop %d\n",
5598 loop->num);
5600 /* Don't keep sprime available. */
5601 sprime = NULL_TREE;
5606 if (sprime)
5608 /* If we can propagate the value computed for LHS into
5609 all uses don't bother doing anything with this stmt. */
5610 if (may_propagate_copy (lhs, sprime))
5612 /* Mark it for removal. */
5613 to_remove.safe_push (stmt);
5615 /* ??? Don't count copy/constant propagations. */
5616 if (gimple_assign_single_p (stmt)
5617 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5618 || gimple_assign_rhs1 (stmt) == sprime))
5619 continue;
5621 if (dump_file && (dump_flags & TDF_DETAILS))
5623 fprintf (dump_file, "Replaced ");
5624 print_gimple_expr (dump_file, stmt, 0);
5625 fprintf (dump_file, " with ");
5626 print_generic_expr (dump_file, sprime);
5627 fprintf (dump_file, " in all uses of ");
5628 print_gimple_stmt (dump_file, stmt, 0);
5631 eliminations++;
5632 continue;
5635 /* If this is an assignment from our leader (which
5636 happens in the case the value-number is a constant)
5637 then there is nothing to do. */
5638 if (gimple_assign_single_p (stmt)
5639 && sprime == gimple_assign_rhs1 (stmt))
5640 continue;
5642 /* Else replace its RHS. */
5643 bool can_make_abnormal_goto
5644 = is_gimple_call (stmt)
5645 && stmt_can_make_abnormal_goto (stmt);
5647 if (dump_file && (dump_flags & TDF_DETAILS))
5649 fprintf (dump_file, "Replaced ");
5650 print_gimple_expr (dump_file, stmt, 0);
5651 fprintf (dump_file, " with ");
5652 print_generic_expr (dump_file, sprime);
5653 fprintf (dump_file, " in ");
5654 print_gimple_stmt (dump_file, stmt, 0);
5657 eliminations++;
5658 gimple *orig_stmt = stmt;
5659 if (!useless_type_conversion_p (TREE_TYPE (lhs),
5660 TREE_TYPE (sprime)))
5661 sprime = fold_convert (TREE_TYPE (lhs), sprime);
5662 tree vdef = gimple_vdef (stmt);
5663 tree vuse = gimple_vuse (stmt);
5664 propagate_tree_value_into_stmt (&gsi, sprime);
5665 stmt = gsi_stmt (gsi);
5666 update_stmt (stmt);
5667 if (vdef != gimple_vdef (stmt))
5668 VN_INFO (vdef)->valnum = vuse;
5670 /* If we removed EH side-effects from the statement, clean
5671 its EH information. */
5672 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
5674 bitmap_set_bit (need_eh_cleanup,
5675 gimple_bb (stmt)->index);
5676 if (dump_file && (dump_flags & TDF_DETAILS))
5677 fprintf (dump_file, " Removed EH side-effects.\n");
5680 /* Likewise for AB side-effects. */
5681 if (can_make_abnormal_goto
5682 && !stmt_can_make_abnormal_goto (stmt))
5684 bitmap_set_bit (need_ab_cleanup,
5685 gimple_bb (stmt)->index);
5686 if (dump_file && (dump_flags & TDF_DETAILS))
5687 fprintf (dump_file, " Removed AB side-effects.\n");
5690 continue;
5694 /* If the statement is a scalar store, see if the expression
5695 has the same value number as its rhs. If so, the store is
5696 dead. */
5697 if (gimple_assign_single_p (stmt)
5698 && !gimple_has_volatile_ops (stmt)
5699 && !is_gimple_reg (gimple_assign_lhs (stmt))
5700 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5701 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
5703 tree val;
5704 tree rhs = gimple_assign_rhs1 (stmt);
5705 vn_reference_t vnresult;
5706 val = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_WALKREWRITE,
5707 &vnresult, false);
5708 if (TREE_CODE (rhs) == SSA_NAME)
5709 rhs = VN_INFO (rhs)->valnum;
5710 if (val
5711 && operand_equal_p (val, rhs, 0))
5713 /* We can only remove the later store if the former aliases
5714 at least all accesses the later one does or if the store
5715 was to readonly memory storing the same value. */
5716 alias_set_type set = get_alias_set (lhs);
5717 if (! vnresult
5718 || vnresult->set == set
5719 || alias_set_subset_of (set, vnresult->set))
5721 if (dump_file && (dump_flags & TDF_DETAILS))
5723 fprintf (dump_file, "Deleted redundant store ");
5724 print_gimple_stmt (dump_file, stmt, 0);
5727 /* Queue stmt for removal. */
5728 to_remove.safe_push (stmt);
5729 continue;
5734 /* If this is a control statement value numbering left edges
5735 unexecuted on force the condition in a way consistent with
5736 that. */
5737 if (gcond *cond = dyn_cast <gcond *> (stmt))
5739 if ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE)
5740 ^ (EDGE_SUCC (b, 1)->flags & EDGE_EXECUTABLE))
5742 if (dump_file && (dump_flags & TDF_DETAILS))
5744 fprintf (dump_file, "Removing unexecutable edge from ");
5745 print_gimple_stmt (dump_file, stmt, 0);
5747 if (((EDGE_SUCC (b, 0)->flags & EDGE_TRUE_VALUE) != 0)
5748 == ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE) != 0))
5749 gimple_cond_make_true (cond);
5750 else
5751 gimple_cond_make_false (cond);
5752 update_stmt (cond);
5753 el_todo |= TODO_cleanup_cfg;
5754 continue;
5758 bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
5759 bool was_noreturn = (is_gimple_call (stmt)
5760 && gimple_call_noreturn_p (stmt));
5761 tree vdef = gimple_vdef (stmt);
5762 tree vuse = gimple_vuse (stmt);
5764 /* If we didn't replace the whole stmt (or propagate the result
5765 into all uses), replace all uses on this stmt with their
5766 leaders. */
5767 bool modified = false;
5768 use_operand_p use_p;
5769 ssa_op_iter iter;
5770 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5772 tree use = USE_FROM_PTR (use_p);
5773 /* ??? The call code above leaves stmt operands un-updated. */
5774 if (TREE_CODE (use) != SSA_NAME)
5775 continue;
5776 tree sprime = eliminate_avail (use);
5777 if (sprime && sprime != use
5778 && may_propagate_copy (use, sprime)
5779 /* We substitute into debug stmts to avoid excessive
5780 debug temporaries created by removed stmts, but we need
5781 to avoid doing so for inserted sprimes as we never want
5782 to create debug temporaries for them. */
5783 && (!inserted_exprs
5784 || TREE_CODE (sprime) != SSA_NAME
5785 || !is_gimple_debug (stmt)
5786 || !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
5788 propagate_value (use_p, sprime);
5789 modified = true;
5793 /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
5794 into which is a requirement for the IPA devirt machinery. */
5795 gimple *old_stmt = stmt;
5796 if (modified)
5798 /* If a formerly non-invariant ADDR_EXPR is turned into an
5799 invariant one it was on a separate stmt. */
5800 if (gimple_assign_single_p (stmt)
5801 && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
5802 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
5803 gimple_stmt_iterator prev = gsi;
5804 gsi_prev (&prev);
5805 if (fold_stmt (&gsi))
5807 /* fold_stmt may have created new stmts inbetween
5808 the previous stmt and the folded stmt. Mark
5809 all defs created there as varying to not confuse
5810 the SCCVN machinery as we're using that even during
5811 elimination. */
5812 if (gsi_end_p (prev))
5813 prev = gsi_start_bb (b);
5814 else
5815 gsi_next (&prev);
5816 if (gsi_stmt (prev) != gsi_stmt (gsi))
5819 tree def;
5820 ssa_op_iter dit;
5821 FOR_EACH_SSA_TREE_OPERAND (def, gsi_stmt (prev),
5822 dit, SSA_OP_ALL_DEFS)
5823 /* As existing DEFs may move between stmts
5824 we have to guard VN_INFO_GET. */
5825 if (! has_VN_INFO (def))
5826 VN_INFO_GET (def)->valnum = def;
5827 if (gsi_stmt (prev) == gsi_stmt (gsi))
5828 break;
5829 gsi_next (&prev);
5831 while (1);
5833 stmt = gsi_stmt (gsi);
5834 /* In case we folded the stmt away schedule the NOP for removal. */
5835 if (gimple_nop_p (stmt))
5836 to_remove.safe_push (stmt);
5839 /* Visit indirect calls and turn them into direct calls if
5840 possible using the devirtualization machinery. Do this before
5841 checking for required EH/abnormal/noreturn cleanup as devird
5842 may expose more of those. */
5843 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
5845 tree fn = gimple_call_fn (call_stmt);
5846 if (fn
5847 && flag_devirtualize
5848 && virtual_method_call_p (fn))
5850 tree otr_type = obj_type_ref_class (fn);
5851 unsigned HOST_WIDE_INT otr_tok
5852 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn));
5853 tree instance;
5854 ipa_polymorphic_call_context context (current_function_decl,
5855 fn, stmt, &instance);
5856 context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn),
5857 otr_type, stmt);
5858 bool final;
5859 vec <cgraph_node *> targets
5860 = possible_polymorphic_call_targets (obj_type_ref_class (fn),
5861 otr_tok, context, &final);
5862 if (dump_file)
5863 dump_possible_polymorphic_call_targets (dump_file,
5864 obj_type_ref_class (fn),
5865 otr_tok, context);
5866 if (final && targets.length () <= 1 && dbg_cnt (devirt))
5868 tree fn;
5869 if (targets.length () == 1)
5870 fn = targets[0]->decl;
5871 else
5872 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5873 if (dump_enabled_p ())
5875 location_t loc = gimple_location (stmt);
5876 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
5877 "converting indirect call to "
5878 "function %s\n",
5879 lang_hooks.decl_printable_name (fn, 2));
5881 gimple_call_set_fndecl (call_stmt, fn);
5882 /* If changing the call to __builtin_unreachable
5883 or similar noreturn function, adjust gimple_call_fntype
5884 too. */
5885 if (gimple_call_noreturn_p (call_stmt)
5886 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
5887 && TYPE_ARG_TYPES (TREE_TYPE (fn))
5888 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn)))
5889 == void_type_node))
5890 gimple_call_set_fntype (call_stmt, TREE_TYPE (fn));
5891 maybe_remove_unused_call_args (cfun, call_stmt);
5892 modified = true;
5897 if (modified)
5899 /* When changing a call into a noreturn call, cfg cleanup
5900 is needed to fix up the noreturn call. */
5901 if (!was_noreturn
5902 && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
5903 to_fixup.safe_push (stmt);
5904 /* When changing a condition or switch into one we know what
5905 edge will be executed, schedule a cfg cleanup. */
5906 if ((gimple_code (stmt) == GIMPLE_COND
5907 && (gimple_cond_true_p (as_a <gcond *> (stmt))
5908 || gimple_cond_false_p (as_a <gcond *> (stmt))))
5909 || (gimple_code (stmt) == GIMPLE_SWITCH
5910 && TREE_CODE (gimple_switch_index
5911 (as_a <gswitch *> (stmt))) == INTEGER_CST))
5912 el_todo |= TODO_cleanup_cfg;
5913 /* If we removed EH side-effects from the statement, clean
5914 its EH information. */
5915 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
5917 bitmap_set_bit (need_eh_cleanup,
5918 gimple_bb (stmt)->index);
5919 if (dump_file && (dump_flags & TDF_DETAILS))
5920 fprintf (dump_file, " Removed EH side-effects.\n");
5922 /* Likewise for AB side-effects. */
5923 if (can_make_abnormal_goto
5924 && !stmt_can_make_abnormal_goto (stmt))
5926 bitmap_set_bit (need_ab_cleanup,
5927 gimple_bb (stmt)->index);
5928 if (dump_file && (dump_flags & TDF_DETAILS))
5929 fprintf (dump_file, " Removed AB side-effects.\n");
5931 update_stmt (stmt);
5932 if (vdef != gimple_vdef (stmt))
5933 VN_INFO (vdef)->valnum = vuse;
5936 /* Make new values available - for fully redundant LHS we
5937 continue with the next stmt above and skip this. */
5938 def_operand_p defp;
5939 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
5940 eliminate_push_avail (DEF_FROM_PTR (defp));
5943 /* Replace destination PHI arguments. */
5944 FOR_EACH_EDGE (e, ei, b->succs)
5945 if (e->flags & EDGE_EXECUTABLE)
5946 for (gphi_iterator gsi = gsi_start_phis (e->dest);
5947 !gsi_end_p (gsi);
5948 gsi_next (&gsi))
5950 gphi *phi = gsi.phi ();
5951 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
5952 tree arg = USE_FROM_PTR (use_p);
5953 if (TREE_CODE (arg) != SSA_NAME
5954 || virtual_operand_p (arg))
5955 continue;
5956 tree sprime = eliminate_avail (arg);
5957 if (sprime && may_propagate_copy (arg, sprime))
5958 propagate_value (use_p, sprime);
5960 return NULL;
5963 /* Make no longer available leaders no longer available. */
5965 void
5966 eliminate_dom_walker::after_dom_children (basic_block)
5968 tree entry;
5969 while ((entry = avail_stack.pop ()) != NULL_TREE)
5971 tree valnum = VN_INFO (entry)->valnum;
5972 tree old = avail[SSA_NAME_VERSION (valnum)];
5973 if (old == entry)
5974 avail[SSA_NAME_VERSION (valnum)] = NULL_TREE;
5975 else
5976 avail[SSA_NAME_VERSION (valnum)] = entry;
5980 /* Eliminate fully redundant computations. */
5982 unsigned int
5983 vn_eliminate (bitmap inserted_exprs)
5985 eliminate_dom_walker el (CDI_DOMINATORS, inserted_exprs);
5986 el.avail.reserve (num_ssa_names);
5988 el.walk (cfun->cfg->x_entry_block_ptr);
5990 /* We cannot remove stmts during BB walk, especially not release SSA
5991 names there as this confuses the VN machinery. The stmts ending
5992 up in to_remove are either stores or simple copies.
5993 Remove stmts in reverse order to make debug stmt creation possible. */
5994 while (!el.to_remove.is_empty ())
5996 gimple *stmt = el.to_remove.pop ();
5998 if (dump_file && (dump_flags & TDF_DETAILS))
6000 fprintf (dump_file, "Removing dead stmt ");
6001 print_gimple_stmt (dump_file, stmt, 0, 0);
6004 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
6005 if (gimple_code (stmt) == GIMPLE_PHI)
6006 remove_phi_node (&gsi, true);
6007 else
6009 basic_block bb = gimple_bb (stmt);
6010 unlink_stmt_vdef (stmt);
6011 if (gsi_remove (&gsi, true))
6012 bitmap_set_bit (el.need_eh_cleanup, bb->index);
6013 if (is_gimple_call (stmt) && stmt_can_make_abnormal_goto (stmt))
6014 bitmap_set_bit (el.need_ab_cleanup, bb->index);
6015 release_defs (stmt);
6018 /* Removing a stmt may expose a forwarder block. */
6019 el.el_todo |= TODO_cleanup_cfg;
6022 /* Fixup stmts that became noreturn calls. This may require splitting
6023 blocks and thus isn't possible during the dominator walk. Do this
6024 in reverse order so we don't inadvertedly remove a stmt we want to
6025 fixup by visiting a dominating now noreturn call first. */
6026 while (!el.to_fixup.is_empty ())
6028 gimple *stmt = el.to_fixup.pop ();
6030 if (dump_file && (dump_flags & TDF_DETAILS))
6032 fprintf (dump_file, "Fixing up noreturn call ");
6033 print_gimple_stmt (dump_file, stmt, 0);
6036 if (fixup_noreturn_call (stmt))
6037 el.el_todo |= TODO_cleanup_cfg;
6040 bool do_eh_cleanup = !bitmap_empty_p (el.need_eh_cleanup);
6041 bool do_ab_cleanup = !bitmap_empty_p (el.need_ab_cleanup);
6043 if (do_eh_cleanup)
6044 gimple_purge_all_dead_eh_edges (el.need_eh_cleanup);
6046 if (do_ab_cleanup)
6047 gimple_purge_all_dead_abnormal_call_edges (el.need_ab_cleanup);
6049 if (do_eh_cleanup || do_ab_cleanup)
6050 el.el_todo |= TODO_cleanup_cfg;
6052 statistics_counter_event (cfun, "Eliminated", el.eliminations);
6053 statistics_counter_event (cfun, "Insertions", el.insertions);
6055 return el.el_todo;
6059 namespace {
6061 const pass_data pass_data_fre =
6063 GIMPLE_PASS, /* type */
6064 "fre", /* name */
6065 OPTGROUP_NONE, /* optinfo_flags */
6066 TV_TREE_FRE, /* tv_id */
6067 ( PROP_cfg | PROP_ssa ), /* properties_required */
6068 0, /* properties_provided */
6069 0, /* properties_destroyed */
6070 0, /* todo_flags_start */
6071 0, /* todo_flags_finish */
6074 class pass_fre : public gimple_opt_pass
6076 public:
6077 pass_fre (gcc::context *ctxt)
6078 : gimple_opt_pass (pass_data_fre, ctxt)
6081 /* opt_pass methods: */
6082 opt_pass * clone () { return new pass_fre (m_ctxt); }
6083 virtual bool gate (function *) { return flag_tree_fre != 0; }
6084 virtual unsigned int execute (function *);
6086 }; // class pass_fre
6088 unsigned int
6089 pass_fre::execute (function *)
6091 unsigned int todo = 0;
6093 run_scc_vn (VN_WALKREWRITE);
6095 /* Remove all the redundant expressions. */
6096 todo |= vn_eliminate (NULL);
6098 scc_vn_restore_ssa_info ();
6099 free_scc_vn ();
6101 return todo;
6104 } // anon namespace
6106 gimple_opt_pass *
6107 make_pass_fre (gcc::context *ctxt)
6109 return new pass_fre (ctxt);