2018-05-17 Richard Biener <rguenther@suse.de>
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blob39de866a8ce7a17a7df43f423bae82c376e8c140
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2018 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "insn-config.h"
32 #include "memmodel.h"
33 #include "emit-rtl.h"
34 #include "cgraph.h"
35 #include "gimple-pretty-print.h"
36 #include "alias.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "cfganal.h"
40 #include "tree-inline.h"
41 #include "internal-fn.h"
42 #include "gimple-fold.h"
43 #include "tree-eh.h"
44 #include "gimplify.h"
45 #include "flags.h"
46 #include "dojump.h"
47 #include "explow.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "stmt.h"
51 #include "expr.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "dumpfile.h"
55 #include "cfgloop.h"
56 #include "params.h"
57 #include "tree-ssa-propagate.h"
58 #include "tree-cfg.h"
59 #include "domwalk.h"
60 #include "gimple-iterator.h"
61 #include "gimple-match.h"
62 #include "stringpool.h"
63 #include "attribs.h"
64 #include "tree-pass.h"
65 #include "statistics.h"
66 #include "langhooks.h"
67 #include "ipa-utils.h"
68 #include "dbgcnt.h"
69 #include "tree-cfgcleanup.h"
70 #include "tree-ssa-loop.h"
71 #include "tree-scalar-evolution.h"
72 #include "tree-ssa-sccvn.h"
74 /* This algorithm is based on the SCC algorithm presented by Keith
75 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
76 (http://citeseer.ist.psu.edu/41805.html). In
77 straight line code, it is equivalent to a regular hash based value
78 numbering that is performed in reverse postorder.
80 For code with cycles, there are two alternatives, both of which
81 require keeping the hashtables separate from the actual list of
82 value numbers for SSA names.
84 1. Iterate value numbering in an RPO walk of the blocks, removing
85 all the entries from the hashtable after each iteration (but
86 keeping the SSA name->value number mapping between iterations).
87 Iterate until it does not change.
89 2. Perform value numbering as part of an SCC walk on the SSA graph,
90 iterating only the cycles in the SSA graph until they do not change
91 (using a separate, optimistic hashtable for value numbering the SCC
92 operands).
94 The second is not just faster in practice (because most SSA graph
95 cycles do not involve all the variables in the graph), it also has
96 some nice properties.
98 One of these nice properties is that when we pop an SCC off the
99 stack, we are guaranteed to have processed all the operands coming from
100 *outside of that SCC*, so we do not need to do anything special to
101 ensure they have value numbers.
103 Another nice property is that the SCC walk is done as part of a DFS
104 of the SSA graph, which makes it easy to perform combining and
105 simplifying operations at the same time.
107 The code below is deliberately written in a way that makes it easy
108 to separate the SCC walk from the other work it does.
110 In order to propagate constants through the code, we track which
111 expressions contain constants, and use those while folding. In
112 theory, we could also track expressions whose value numbers are
113 replaced, in case we end up folding based on expression
114 identities.
116 In order to value number memory, we assign value numbers to vuses.
117 This enables us to note that, for example, stores to the same
118 address of the same value from the same starting memory states are
119 equivalent.
120 TODO:
122 1. We can iterate only the changing portions of the SCC's, but
123 I have not seen an SCC big enough for this to be a win.
124 2. If you differentiate between phi nodes for loops and phi nodes
125 for if-then-else, you can properly consider phi nodes in different
126 blocks for equivalence.
127 3. We could value number vuses in more cases, particularly, whole
128 structure copies.
132 static tree *last_vuse_ptr;
133 static vn_lookup_kind vn_walk_kind;
134 static vn_lookup_kind default_vn_walk_kind;
135 bitmap const_parms;
137 /* vn_nary_op hashtable helpers. */
139 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
141 typedef vn_nary_op_s *compare_type;
142 static inline hashval_t hash (const vn_nary_op_s *);
143 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
146 /* Return the computed hashcode for nary operation P1. */
148 inline hashval_t
149 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
151 return vno1->hashcode;
154 /* Compare nary operations P1 and P2 and return true if they are
155 equivalent. */
157 inline bool
158 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
160 return vn_nary_op_eq (vno1, vno2);
163 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
164 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
167 /* vn_phi hashtable helpers. */
169 static int
170 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
172 struct vn_phi_hasher : pointer_hash <vn_phi_s>
174 static inline hashval_t hash (const vn_phi_s *);
175 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
176 static inline void remove (vn_phi_s *);
179 /* Return the computed hashcode for phi operation P1. */
181 inline hashval_t
182 vn_phi_hasher::hash (const vn_phi_s *vp1)
184 return vp1->hashcode;
187 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
189 inline bool
190 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
192 return vn_phi_eq (vp1, vp2);
195 /* Free a phi operation structure VP. */
197 inline void
198 vn_phi_hasher::remove (vn_phi_s *phi)
200 phi->phiargs.release ();
203 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
204 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
207 /* Compare two reference operands P1 and P2 for equality. Return true if
208 they are equal, and false otherwise. */
210 static int
211 vn_reference_op_eq (const void *p1, const void *p2)
213 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
214 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
216 return (vro1->opcode == vro2->opcode
217 /* We do not care for differences in type qualification. */
218 && (vro1->type == vro2->type
219 || (vro1->type && vro2->type
220 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
221 TYPE_MAIN_VARIANT (vro2->type))))
222 && expressions_equal_p (vro1->op0, vro2->op0)
223 && expressions_equal_p (vro1->op1, vro2->op1)
224 && expressions_equal_p (vro1->op2, vro2->op2));
227 /* Free a reference operation structure VP. */
229 static inline void
230 free_reference (vn_reference_s *vr)
232 vr->operands.release ();
236 /* vn_reference hashtable helpers. */
238 struct vn_reference_hasher : pointer_hash <vn_reference_s>
240 static inline hashval_t hash (const vn_reference_s *);
241 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
242 static inline void remove (vn_reference_s *);
245 /* Return the hashcode for a given reference operation P1. */
247 inline hashval_t
248 vn_reference_hasher::hash (const vn_reference_s *vr1)
250 return vr1->hashcode;
253 inline bool
254 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
256 return vn_reference_eq (v, c);
259 inline void
260 vn_reference_hasher::remove (vn_reference_s *v)
262 free_reference (v);
265 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
266 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
269 /* The set of hashtables and alloc_pool's for their items. */
271 typedef struct vn_tables_s
273 vn_nary_op_table_type *nary;
274 vn_phi_table_type *phis;
275 vn_reference_table_type *references;
276 struct obstack nary_obstack;
277 object_allocator<vn_phi_s> *phis_pool;
278 object_allocator<vn_reference_s> *references_pool;
279 } *vn_tables_t;
282 /* vn_constant hashtable helpers. */
284 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
286 static inline hashval_t hash (const vn_constant_s *);
287 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
290 /* Hash table hash function for vn_constant_t. */
292 inline hashval_t
293 vn_constant_hasher::hash (const vn_constant_s *vc1)
295 return vc1->hashcode;
298 /* Hash table equality function for vn_constant_t. */
300 inline bool
301 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
303 if (vc1->hashcode != vc2->hashcode)
304 return false;
306 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
309 static hash_table<vn_constant_hasher> *constant_to_value_id;
310 static bitmap constant_value_ids;
313 /* Valid hashtables storing information we have proven to be
314 correct. */
316 static vn_tables_t valid_info;
318 /* Optimistic hashtables storing information we are making assumptions about
319 during iterations. */
321 static vn_tables_t optimistic_info;
323 /* Pointer to the set of hashtables that is currently being used.
324 Should always point to either the optimistic_info, or the
325 valid_info. */
327 static vn_tables_t current_info;
330 /* Reverse post order index for each basic block. */
332 static int *rpo_numbers;
334 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
336 /* Return the SSA value of the VUSE x, supporting released VDEFs
337 during elimination which will value-number the VDEF to the
338 associated VUSE (but not substitute in the whole lattice). */
340 static inline tree
341 vuse_ssa_val (tree x)
343 if (!x)
344 return NULL_TREE;
348 tree tem = SSA_VAL (x);
349 /* stmt walking can walk over a backedge and reach code we didn't
350 value-number yet. */
351 if (tem == VN_TOP)
352 return x;
353 x = tem;
355 while (SSA_NAME_IN_FREE_LIST (x));
357 return x;
360 /* This represents the top of the VN lattice, which is the universal
361 value. */
363 tree VN_TOP;
365 /* Unique counter for our value ids. */
367 static unsigned int next_value_id;
369 /* Next DFS number and the stack for strongly connected component
370 detection. */
372 static unsigned int next_dfs_num;
373 static vec<tree> sccstack;
377 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
378 are allocated on an obstack for locality reasons, and to free them
379 without looping over the vec. */
381 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
382 static struct obstack vn_ssa_aux_obstack;
384 /* Return whether there is value numbering information for a given SSA name. */
386 bool
387 has_VN_INFO (tree name)
389 if (SSA_NAME_VERSION (name) < vn_ssa_aux_table.length ())
390 return vn_ssa_aux_table[SSA_NAME_VERSION (name)] != NULL;
391 return false;
394 /* Return the value numbering information for a given SSA name. */
396 vn_ssa_aux_t
397 VN_INFO (tree name)
399 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
400 gcc_checking_assert (res);
401 return res;
404 /* Set the value numbering info for a given SSA name to a given
405 value. */
407 static inline void
408 VN_INFO_SET (tree name, vn_ssa_aux_t value)
410 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
413 /* Initialize the value numbering info for a given SSA name.
414 This should be called just once for every SSA name. */
416 vn_ssa_aux_t
417 VN_INFO_GET (tree name)
419 vn_ssa_aux_t newinfo;
421 gcc_assert (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ()
422 || vn_ssa_aux_table[SSA_NAME_VERSION (name)] == NULL);
423 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
424 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
425 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
426 vn_ssa_aux_table.safe_grow_cleared (SSA_NAME_VERSION (name) + 1);
427 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
428 return newinfo;
432 /* Return the vn_kind the expression computed by the stmt should be
433 associated with. */
435 enum vn_kind
436 vn_get_stmt_kind (gimple *stmt)
438 switch (gimple_code (stmt))
440 case GIMPLE_CALL:
441 return VN_REFERENCE;
442 case GIMPLE_PHI:
443 return VN_PHI;
444 case GIMPLE_ASSIGN:
446 enum tree_code code = gimple_assign_rhs_code (stmt);
447 tree rhs1 = gimple_assign_rhs1 (stmt);
448 switch (get_gimple_rhs_class (code))
450 case GIMPLE_UNARY_RHS:
451 case GIMPLE_BINARY_RHS:
452 case GIMPLE_TERNARY_RHS:
453 return VN_NARY;
454 case GIMPLE_SINGLE_RHS:
455 switch (TREE_CODE_CLASS (code))
457 case tcc_reference:
458 /* VOP-less references can go through unary case. */
459 if ((code == REALPART_EXPR
460 || code == IMAGPART_EXPR
461 || code == VIEW_CONVERT_EXPR
462 || code == BIT_FIELD_REF)
463 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
464 return VN_NARY;
466 /* Fallthrough. */
467 case tcc_declaration:
468 return VN_REFERENCE;
470 case tcc_constant:
471 return VN_CONSTANT;
473 default:
474 if (code == ADDR_EXPR)
475 return (is_gimple_min_invariant (rhs1)
476 ? VN_CONSTANT : VN_REFERENCE);
477 else if (code == CONSTRUCTOR)
478 return VN_NARY;
479 return VN_NONE;
481 default:
482 return VN_NONE;
485 default:
486 return VN_NONE;
490 /* Lookup a value id for CONSTANT and return it. If it does not
491 exist returns 0. */
493 unsigned int
494 get_constant_value_id (tree constant)
496 vn_constant_s **slot;
497 struct vn_constant_s vc;
499 vc.hashcode = vn_hash_constant_with_type (constant);
500 vc.constant = constant;
501 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
502 if (slot)
503 return (*slot)->value_id;
504 return 0;
507 /* Lookup a value id for CONSTANT, and if it does not exist, create a
508 new one and return it. If it does exist, return it. */
510 unsigned int
511 get_or_alloc_constant_value_id (tree constant)
513 vn_constant_s **slot;
514 struct vn_constant_s vc;
515 vn_constant_t vcp;
517 vc.hashcode = vn_hash_constant_with_type (constant);
518 vc.constant = constant;
519 slot = constant_to_value_id->find_slot (&vc, INSERT);
520 if (*slot)
521 return (*slot)->value_id;
523 vcp = XNEW (struct vn_constant_s);
524 vcp->hashcode = vc.hashcode;
525 vcp->constant = constant;
526 vcp->value_id = get_next_value_id ();
527 *slot = vcp;
528 bitmap_set_bit (constant_value_ids, vcp->value_id);
529 return vcp->value_id;
532 /* Return true if V is a value id for a constant. */
534 bool
535 value_id_constant_p (unsigned int v)
537 return bitmap_bit_p (constant_value_ids, v);
540 /* Compute the hash for a reference operand VRO1. */
542 static void
543 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
545 hstate.add_int (vro1->opcode);
546 if (vro1->op0)
547 inchash::add_expr (vro1->op0, hstate);
548 if (vro1->op1)
549 inchash::add_expr (vro1->op1, hstate);
550 if (vro1->op2)
551 inchash::add_expr (vro1->op2, hstate);
554 /* Compute a hash for the reference operation VR1 and return it. */
556 static hashval_t
557 vn_reference_compute_hash (const vn_reference_t vr1)
559 inchash::hash hstate;
560 hashval_t result;
561 int i;
562 vn_reference_op_t vro;
563 poly_int64 off = -1;
564 bool deref = false;
566 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
568 if (vro->opcode == MEM_REF)
569 deref = true;
570 else if (vro->opcode != ADDR_EXPR)
571 deref = false;
572 if (maybe_ne (vro->off, -1))
574 if (known_eq (off, -1))
575 off = 0;
576 off += vro->off;
578 else
580 if (maybe_ne (off, -1)
581 && maybe_ne (off, 0))
582 hstate.add_poly_int (off);
583 off = -1;
584 if (deref
585 && vro->opcode == ADDR_EXPR)
587 if (vro->op0)
589 tree op = TREE_OPERAND (vro->op0, 0);
590 hstate.add_int (TREE_CODE (op));
591 inchash::add_expr (op, hstate);
594 else
595 vn_reference_op_compute_hash (vro, hstate);
598 result = hstate.end ();
599 /* ??? We would ICE later if we hash instead of adding that in. */
600 if (vr1->vuse)
601 result += SSA_NAME_VERSION (vr1->vuse);
603 return result;
606 /* Return true if reference operations VR1 and VR2 are equivalent. This
607 means they have the same set of operands and vuses. */
609 bool
610 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
612 unsigned i, j;
614 /* Early out if this is not a hash collision. */
615 if (vr1->hashcode != vr2->hashcode)
616 return false;
618 /* The VOP needs to be the same. */
619 if (vr1->vuse != vr2->vuse)
620 return false;
622 /* If the operands are the same we are done. */
623 if (vr1->operands == vr2->operands)
624 return true;
626 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
627 return false;
629 if (INTEGRAL_TYPE_P (vr1->type)
630 && INTEGRAL_TYPE_P (vr2->type))
632 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
633 return false;
635 else if (INTEGRAL_TYPE_P (vr1->type)
636 && (TYPE_PRECISION (vr1->type)
637 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
638 return false;
639 else if (INTEGRAL_TYPE_P (vr2->type)
640 && (TYPE_PRECISION (vr2->type)
641 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
642 return false;
644 i = 0;
645 j = 0;
648 poly_int64 off1 = 0, off2 = 0;
649 vn_reference_op_t vro1, vro2;
650 vn_reference_op_s tem1, tem2;
651 bool deref1 = false, deref2 = false;
652 for (; vr1->operands.iterate (i, &vro1); i++)
654 if (vro1->opcode == MEM_REF)
655 deref1 = true;
656 /* Do not look through a storage order barrier. */
657 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
658 return false;
659 if (known_eq (vro1->off, -1))
660 break;
661 off1 += vro1->off;
663 for (; vr2->operands.iterate (j, &vro2); j++)
665 if (vro2->opcode == MEM_REF)
666 deref2 = true;
667 /* Do not look through a storage order barrier. */
668 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
669 return false;
670 if (known_eq (vro2->off, -1))
671 break;
672 off2 += vro2->off;
674 if (maybe_ne (off1, off2))
675 return false;
676 if (deref1 && vro1->opcode == ADDR_EXPR)
678 memset (&tem1, 0, sizeof (tem1));
679 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
680 tem1.type = TREE_TYPE (tem1.op0);
681 tem1.opcode = TREE_CODE (tem1.op0);
682 vro1 = &tem1;
683 deref1 = false;
685 if (deref2 && vro2->opcode == ADDR_EXPR)
687 memset (&tem2, 0, sizeof (tem2));
688 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
689 tem2.type = TREE_TYPE (tem2.op0);
690 tem2.opcode = TREE_CODE (tem2.op0);
691 vro2 = &tem2;
692 deref2 = false;
694 if (deref1 != deref2)
695 return false;
696 if (!vn_reference_op_eq (vro1, vro2))
697 return false;
698 ++j;
699 ++i;
701 while (vr1->operands.length () != i
702 || vr2->operands.length () != j);
704 return true;
707 /* Copy the operations present in load/store REF into RESULT, a vector of
708 vn_reference_op_s's. */
710 static void
711 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
713 if (TREE_CODE (ref) == TARGET_MEM_REF)
715 vn_reference_op_s temp;
717 result->reserve (3);
719 memset (&temp, 0, sizeof (temp));
720 temp.type = TREE_TYPE (ref);
721 temp.opcode = TREE_CODE (ref);
722 temp.op0 = TMR_INDEX (ref);
723 temp.op1 = TMR_STEP (ref);
724 temp.op2 = TMR_OFFSET (ref);
725 temp.off = -1;
726 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
727 temp.base = MR_DEPENDENCE_BASE (ref);
728 result->quick_push (temp);
730 memset (&temp, 0, sizeof (temp));
731 temp.type = NULL_TREE;
732 temp.opcode = ERROR_MARK;
733 temp.op0 = TMR_INDEX2 (ref);
734 temp.off = -1;
735 result->quick_push (temp);
737 memset (&temp, 0, sizeof (temp));
738 temp.type = NULL_TREE;
739 temp.opcode = TREE_CODE (TMR_BASE (ref));
740 temp.op0 = TMR_BASE (ref);
741 temp.off = -1;
742 result->quick_push (temp);
743 return;
746 /* For non-calls, store the information that makes up the address. */
747 tree orig = ref;
748 while (ref)
750 vn_reference_op_s temp;
752 memset (&temp, 0, sizeof (temp));
753 temp.type = TREE_TYPE (ref);
754 temp.opcode = TREE_CODE (ref);
755 temp.off = -1;
757 switch (temp.opcode)
759 case MODIFY_EXPR:
760 temp.op0 = TREE_OPERAND (ref, 1);
761 break;
762 case WITH_SIZE_EXPR:
763 temp.op0 = TREE_OPERAND (ref, 1);
764 temp.off = 0;
765 break;
766 case MEM_REF:
767 /* The base address gets its own vn_reference_op_s structure. */
768 temp.op0 = TREE_OPERAND (ref, 1);
769 if (!mem_ref_offset (ref).to_shwi (&temp.off))
770 temp.off = -1;
771 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
772 temp.base = MR_DEPENDENCE_BASE (ref);
773 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
774 break;
775 case BIT_FIELD_REF:
776 /* Record bits, position and storage order. */
777 temp.op0 = TREE_OPERAND (ref, 1);
778 temp.op1 = TREE_OPERAND (ref, 2);
779 if (!multiple_p (bit_field_offset (ref), BITS_PER_UNIT, &temp.off))
780 temp.off = -1;
781 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
782 break;
783 case COMPONENT_REF:
784 /* The field decl is enough to unambiguously specify the field,
785 a matching type is not necessary and a mismatching type
786 is always a spurious difference. */
787 temp.type = NULL_TREE;
788 temp.op0 = TREE_OPERAND (ref, 1);
789 temp.op1 = TREE_OPERAND (ref, 2);
791 tree this_offset = component_ref_field_offset (ref);
792 if (this_offset
793 && poly_int_tree_p (this_offset))
795 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
796 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
798 poly_offset_int off
799 = (wi::to_poly_offset (this_offset)
800 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
801 /* Probibit value-numbering zero offset components
802 of addresses the same before the pass folding
803 __builtin_object_size had a chance to run
804 (checking cfun->after_inlining does the
805 trick here). */
806 if (TREE_CODE (orig) != ADDR_EXPR
807 || maybe_ne (off, 0)
808 || cfun->after_inlining)
809 off.to_shwi (&temp.off);
813 break;
814 case ARRAY_RANGE_REF:
815 case ARRAY_REF:
817 tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
818 /* Record index as operand. */
819 temp.op0 = TREE_OPERAND (ref, 1);
820 /* Always record lower bounds and element size. */
821 temp.op1 = array_ref_low_bound (ref);
822 /* But record element size in units of the type alignment. */
823 temp.op2 = TREE_OPERAND (ref, 3);
824 temp.align = eltype->type_common.align;
825 if (! temp.op2)
826 temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
827 size_int (TYPE_ALIGN_UNIT (eltype)));
828 if (poly_int_tree_p (temp.op0)
829 && poly_int_tree_p (temp.op1)
830 && TREE_CODE (temp.op2) == INTEGER_CST)
832 poly_offset_int off = ((wi::to_poly_offset (temp.op0)
833 - wi::to_poly_offset (temp.op1))
834 * wi::to_offset (temp.op2)
835 * vn_ref_op_align_unit (&temp));
836 off.to_shwi (&temp.off);
839 break;
840 case VAR_DECL:
841 if (DECL_HARD_REGISTER (ref))
843 temp.op0 = ref;
844 break;
846 /* Fallthru. */
847 case PARM_DECL:
848 case CONST_DECL:
849 case RESULT_DECL:
850 /* Canonicalize decls to MEM[&decl] which is what we end up with
851 when valueizing MEM[ptr] with ptr = &decl. */
852 temp.opcode = MEM_REF;
853 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
854 temp.off = 0;
855 result->safe_push (temp);
856 temp.opcode = ADDR_EXPR;
857 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
858 temp.type = TREE_TYPE (temp.op0);
859 temp.off = -1;
860 break;
861 case STRING_CST:
862 case INTEGER_CST:
863 case COMPLEX_CST:
864 case VECTOR_CST:
865 case REAL_CST:
866 case FIXED_CST:
867 case CONSTRUCTOR:
868 case SSA_NAME:
869 temp.op0 = ref;
870 break;
871 case ADDR_EXPR:
872 if (is_gimple_min_invariant (ref))
874 temp.op0 = ref;
875 break;
877 break;
878 /* These are only interesting for their operands, their
879 existence, and their type. They will never be the last
880 ref in the chain of references (IE they require an
881 operand), so we don't have to put anything
882 for op* as it will be handled by the iteration */
883 case REALPART_EXPR:
884 temp.off = 0;
885 break;
886 case VIEW_CONVERT_EXPR:
887 temp.off = 0;
888 temp.reverse = storage_order_barrier_p (ref);
889 break;
890 case IMAGPART_EXPR:
891 /* This is only interesting for its constant offset. */
892 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
893 break;
894 default:
895 gcc_unreachable ();
897 result->safe_push (temp);
899 if (REFERENCE_CLASS_P (ref)
900 || TREE_CODE (ref) == MODIFY_EXPR
901 || TREE_CODE (ref) == WITH_SIZE_EXPR
902 || (TREE_CODE (ref) == ADDR_EXPR
903 && !is_gimple_min_invariant (ref)))
904 ref = TREE_OPERAND (ref, 0);
905 else
906 ref = NULL_TREE;
910 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
911 operands in *OPS, the reference alias set SET and the reference type TYPE.
912 Return true if something useful was produced. */
914 bool
915 ao_ref_init_from_vn_reference (ao_ref *ref,
916 alias_set_type set, tree type,
917 vec<vn_reference_op_s> ops)
919 vn_reference_op_t op;
920 unsigned i;
921 tree base = NULL_TREE;
922 tree *op0_p = &base;
923 poly_offset_int offset = 0;
924 poly_offset_int max_size;
925 poly_offset_int size = -1;
926 tree size_tree = NULL_TREE;
927 alias_set_type base_alias_set = -1;
929 /* First get the final access size from just the outermost expression. */
930 op = &ops[0];
931 if (op->opcode == COMPONENT_REF)
932 size_tree = DECL_SIZE (op->op0);
933 else if (op->opcode == BIT_FIELD_REF)
934 size_tree = op->op0;
935 else
937 machine_mode mode = TYPE_MODE (type);
938 if (mode == BLKmode)
939 size_tree = TYPE_SIZE (type);
940 else
941 size = GET_MODE_BITSIZE (mode);
943 if (size_tree != NULL_TREE
944 && poly_int_tree_p (size_tree))
945 size = wi::to_poly_offset (size_tree);
947 /* Initially, maxsize is the same as the accessed element size.
948 In the following it will only grow (or become -1). */
949 max_size = size;
951 /* Compute cumulative bit-offset for nested component-refs and array-refs,
952 and find the ultimate containing object. */
953 FOR_EACH_VEC_ELT (ops, i, op)
955 switch (op->opcode)
957 /* These may be in the reference ops, but we cannot do anything
958 sensible with them here. */
959 case ADDR_EXPR:
960 /* Apart from ADDR_EXPR arguments to MEM_REF. */
961 if (base != NULL_TREE
962 && TREE_CODE (base) == MEM_REF
963 && op->op0
964 && DECL_P (TREE_OPERAND (op->op0, 0)))
966 vn_reference_op_t pop = &ops[i-1];
967 base = TREE_OPERAND (op->op0, 0);
968 if (known_eq (pop->off, -1))
970 max_size = -1;
971 offset = 0;
973 else
974 offset += pop->off * BITS_PER_UNIT;
975 op0_p = NULL;
976 break;
978 /* Fallthru. */
979 case CALL_EXPR:
980 return false;
982 /* Record the base objects. */
983 case MEM_REF:
984 base_alias_set = get_deref_alias_set (op->op0);
985 *op0_p = build2 (MEM_REF, op->type,
986 NULL_TREE, op->op0);
987 MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
988 MR_DEPENDENCE_BASE (*op0_p) = op->base;
989 op0_p = &TREE_OPERAND (*op0_p, 0);
990 break;
992 case VAR_DECL:
993 case PARM_DECL:
994 case RESULT_DECL:
995 case SSA_NAME:
996 *op0_p = op->op0;
997 op0_p = NULL;
998 break;
1000 /* And now the usual component-reference style ops. */
1001 case BIT_FIELD_REF:
1002 offset += wi::to_offset (op->op1);
1003 break;
1005 case COMPONENT_REF:
1007 tree field = op->op0;
1008 /* We do not have a complete COMPONENT_REF tree here so we
1009 cannot use component_ref_field_offset. Do the interesting
1010 parts manually. */
1011 tree this_offset = DECL_FIELD_OFFSET (field);
1013 if (op->op1 || !poly_int_tree_p (this_offset))
1014 max_size = -1;
1015 else
1017 poly_offset_int woffset = (wi::to_poly_offset (this_offset)
1018 << LOG2_BITS_PER_UNIT);
1019 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1020 offset += woffset;
1022 break;
1025 case ARRAY_RANGE_REF:
1026 case ARRAY_REF:
1027 /* We recorded the lower bound and the element size. */
1028 if (!poly_int_tree_p (op->op0)
1029 || !poly_int_tree_p (op->op1)
1030 || TREE_CODE (op->op2) != INTEGER_CST)
1031 max_size = -1;
1032 else
1034 poly_offset_int woffset
1035 = wi::sext (wi::to_poly_offset (op->op0)
1036 - wi::to_poly_offset (op->op1),
1037 TYPE_PRECISION (TREE_TYPE (op->op0)));
1038 woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
1039 woffset <<= LOG2_BITS_PER_UNIT;
1040 offset += woffset;
1042 break;
1044 case REALPART_EXPR:
1045 break;
1047 case IMAGPART_EXPR:
1048 offset += size;
1049 break;
1051 case VIEW_CONVERT_EXPR:
1052 break;
1054 case STRING_CST:
1055 case INTEGER_CST:
1056 case COMPLEX_CST:
1057 case VECTOR_CST:
1058 case REAL_CST:
1059 case CONSTRUCTOR:
1060 case CONST_DECL:
1061 return false;
1063 default:
1064 return false;
1068 if (base == NULL_TREE)
1069 return false;
1071 ref->ref = NULL_TREE;
1072 ref->base = base;
1073 ref->ref_alias_set = set;
1074 if (base_alias_set != -1)
1075 ref->base_alias_set = base_alias_set;
1076 else
1077 ref->base_alias_set = get_alias_set (base);
1078 /* We discount volatiles from value-numbering elsewhere. */
1079 ref->volatile_p = false;
1081 if (!size.to_shwi (&ref->size) || maybe_lt (ref->size, 0))
1083 ref->offset = 0;
1084 ref->size = -1;
1085 ref->max_size = -1;
1086 return true;
1089 if (!offset.to_shwi (&ref->offset))
1091 ref->offset = 0;
1092 ref->max_size = -1;
1093 return true;
1096 if (!max_size.to_shwi (&ref->max_size) || maybe_lt (ref->max_size, 0))
1097 ref->max_size = -1;
1099 return true;
1102 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1103 vn_reference_op_s's. */
1105 static void
1106 copy_reference_ops_from_call (gcall *call,
1107 vec<vn_reference_op_s> *result)
1109 vn_reference_op_s temp;
1110 unsigned i;
1111 tree lhs = gimple_call_lhs (call);
1112 int lr;
1114 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1115 different. By adding the lhs here in the vector, we ensure that the
1116 hashcode is different, guaranteeing a different value number. */
1117 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1119 memset (&temp, 0, sizeof (temp));
1120 temp.opcode = MODIFY_EXPR;
1121 temp.type = TREE_TYPE (lhs);
1122 temp.op0 = lhs;
1123 temp.off = -1;
1124 result->safe_push (temp);
1127 /* Copy the type, opcode, function, static chain and EH region, if any. */
1128 memset (&temp, 0, sizeof (temp));
1129 temp.type = gimple_call_return_type (call);
1130 temp.opcode = CALL_EXPR;
1131 temp.op0 = gimple_call_fn (call);
1132 temp.op1 = gimple_call_chain (call);
1133 if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1134 temp.op2 = size_int (lr);
1135 temp.off = -1;
1136 if (gimple_call_with_bounds_p (call))
1137 temp.with_bounds = 1;
1138 result->safe_push (temp);
1140 /* Copy the call arguments. As they can be references as well,
1141 just chain them together. */
1142 for (i = 0; i < gimple_call_num_args (call); ++i)
1144 tree callarg = gimple_call_arg (call, i);
1145 copy_reference_ops_from_ref (callarg, result);
1149 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1150 *I_P to point to the last element of the replacement. */
1151 static bool
1152 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1153 unsigned int *i_p)
1155 unsigned int i = *i_p;
1156 vn_reference_op_t op = &(*ops)[i];
1157 vn_reference_op_t mem_op = &(*ops)[i - 1];
1158 tree addr_base;
1159 poly_int64 addr_offset = 0;
1161 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1162 from .foo.bar to the preceding MEM_REF offset and replace the
1163 address with &OBJ. */
1164 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1165 &addr_offset);
1166 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1167 if (addr_base != TREE_OPERAND (op->op0, 0))
1169 poly_offset_int off
1170 = (poly_offset_int::from (wi::to_poly_wide (mem_op->op0),
1171 SIGNED)
1172 + addr_offset);
1173 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1174 op->op0 = build_fold_addr_expr (addr_base);
1175 if (tree_fits_shwi_p (mem_op->op0))
1176 mem_op->off = tree_to_shwi (mem_op->op0);
1177 else
1178 mem_op->off = -1;
1179 return true;
1181 return false;
1184 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1185 *I_P to point to the last element of the replacement. */
1186 static bool
1187 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1188 unsigned int *i_p)
1190 unsigned int i = *i_p;
1191 vn_reference_op_t op = &(*ops)[i];
1192 vn_reference_op_t mem_op = &(*ops)[i - 1];
1193 gimple *def_stmt;
1194 enum tree_code code;
1195 poly_offset_int off;
1197 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1198 if (!is_gimple_assign (def_stmt))
1199 return false;
1201 code = gimple_assign_rhs_code (def_stmt);
1202 if (code != ADDR_EXPR
1203 && code != POINTER_PLUS_EXPR)
1204 return false;
1206 off = poly_offset_int::from (wi::to_poly_wide (mem_op->op0), SIGNED);
1208 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1209 from .foo.bar to the preceding MEM_REF offset and replace the
1210 address with &OBJ. */
1211 if (code == ADDR_EXPR)
1213 tree addr, addr_base;
1214 poly_int64 addr_offset;
1216 addr = gimple_assign_rhs1 (def_stmt);
1217 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1218 &addr_offset);
1219 /* If that didn't work because the address isn't invariant propagate
1220 the reference tree from the address operation in case the current
1221 dereference isn't offsetted. */
1222 if (!addr_base
1223 && *i_p == ops->length () - 1
1224 && known_eq (off, 0)
1225 /* This makes us disable this transform for PRE where the
1226 reference ops might be also used for code insertion which
1227 is invalid. */
1228 && default_vn_walk_kind == VN_WALKREWRITE)
1230 auto_vec<vn_reference_op_s, 32> tem;
1231 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1232 /* Make sure to preserve TBAA info. The only objects not
1233 wrapped in MEM_REFs that can have their address taken are
1234 STRING_CSTs. */
1235 if (tem.length () >= 2
1236 && tem[tem.length () - 2].opcode == MEM_REF)
1238 vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1239 new_mem_op->op0
1240 = wide_int_to_tree (TREE_TYPE (mem_op->op0),
1241 wi::to_poly_wide (new_mem_op->op0));
1243 else
1244 gcc_assert (tem.last ().opcode == STRING_CST);
1245 ops->pop ();
1246 ops->pop ();
1247 ops->safe_splice (tem);
1248 --*i_p;
1249 return true;
1251 if (!addr_base
1252 || TREE_CODE (addr_base) != MEM_REF
1253 || (TREE_CODE (TREE_OPERAND (addr_base, 0)) == SSA_NAME
1254 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (addr_base, 0))))
1255 return false;
1257 off += addr_offset;
1258 off += mem_ref_offset (addr_base);
1259 op->op0 = TREE_OPERAND (addr_base, 0);
1261 else
1263 tree ptr, ptroff;
1264 ptr = gimple_assign_rhs1 (def_stmt);
1265 ptroff = gimple_assign_rhs2 (def_stmt);
1266 if (TREE_CODE (ptr) != SSA_NAME
1267 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1268 || TREE_CODE (ptroff) != INTEGER_CST)
1269 return false;
1271 off += wi::to_offset (ptroff);
1272 op->op0 = ptr;
1275 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1276 if (tree_fits_shwi_p (mem_op->op0))
1277 mem_op->off = tree_to_shwi (mem_op->op0);
1278 else
1279 mem_op->off = -1;
1280 if (TREE_CODE (op->op0) == SSA_NAME)
1281 op->op0 = SSA_VAL (op->op0);
1282 if (TREE_CODE (op->op0) != SSA_NAME)
1283 op->opcode = TREE_CODE (op->op0);
1285 /* And recurse. */
1286 if (TREE_CODE (op->op0) == SSA_NAME)
1287 vn_reference_maybe_forwprop_address (ops, i_p);
1288 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1289 vn_reference_fold_indirect (ops, i_p);
1290 return true;
1293 /* Optimize the reference REF to a constant if possible or return
1294 NULL_TREE if not. */
1296 tree
1297 fully_constant_vn_reference_p (vn_reference_t ref)
1299 vec<vn_reference_op_s> operands = ref->operands;
1300 vn_reference_op_t op;
1302 /* Try to simplify the translated expression if it is
1303 a call to a builtin function with at most two arguments. */
1304 op = &operands[0];
1305 if (op->opcode == CALL_EXPR
1306 && TREE_CODE (op->op0) == ADDR_EXPR
1307 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1308 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1309 && operands.length () >= 2
1310 && operands.length () <= 3)
1312 vn_reference_op_t arg0, arg1 = NULL;
1313 bool anyconst = false;
1314 arg0 = &operands[1];
1315 if (operands.length () > 2)
1316 arg1 = &operands[2];
1317 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1318 || (arg0->opcode == ADDR_EXPR
1319 && is_gimple_min_invariant (arg0->op0)))
1320 anyconst = true;
1321 if (arg1
1322 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1323 || (arg1->opcode == ADDR_EXPR
1324 && is_gimple_min_invariant (arg1->op0))))
1325 anyconst = true;
1326 if (anyconst)
1328 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1329 arg1 ? 2 : 1,
1330 arg0->op0,
1331 arg1 ? arg1->op0 : NULL);
1332 if (folded
1333 && TREE_CODE (folded) == NOP_EXPR)
1334 folded = TREE_OPERAND (folded, 0);
1335 if (folded
1336 && is_gimple_min_invariant (folded))
1337 return folded;
1341 /* Simplify reads from constants or constant initializers. */
1342 else if (BITS_PER_UNIT == 8
1343 && is_gimple_reg_type (ref->type)
1344 && (!INTEGRAL_TYPE_P (ref->type)
1345 || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
1347 poly_int64 off = 0;
1348 HOST_WIDE_INT size;
1349 if (INTEGRAL_TYPE_P (ref->type))
1350 size = TYPE_PRECISION (ref->type);
1351 else
1352 size = tree_to_shwi (TYPE_SIZE (ref->type));
1353 if (size % BITS_PER_UNIT != 0
1354 || size > MAX_BITSIZE_MODE_ANY_MODE)
1355 return NULL_TREE;
1356 size /= BITS_PER_UNIT;
1357 unsigned i;
1358 for (i = 0; i < operands.length (); ++i)
1360 if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1362 ++i;
1363 break;
1365 if (known_eq (operands[i].off, -1))
1366 return NULL_TREE;
1367 off += operands[i].off;
1368 if (operands[i].opcode == MEM_REF)
1370 ++i;
1371 break;
1374 vn_reference_op_t base = &operands[--i];
1375 tree ctor = error_mark_node;
1376 tree decl = NULL_TREE;
1377 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1378 ctor = base->op0;
1379 else if (base->opcode == MEM_REF
1380 && base[1].opcode == ADDR_EXPR
1381 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1382 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL
1383 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == STRING_CST))
1385 decl = TREE_OPERAND (base[1].op0, 0);
1386 if (TREE_CODE (decl) == STRING_CST)
1387 ctor = decl;
1388 else
1389 ctor = ctor_for_folding (decl);
1391 if (ctor == NULL_TREE)
1392 return build_zero_cst (ref->type);
1393 else if (ctor != error_mark_node)
1395 HOST_WIDE_INT const_off;
1396 if (decl)
1398 tree res = fold_ctor_reference (ref->type, ctor,
1399 off * BITS_PER_UNIT,
1400 size * BITS_PER_UNIT, decl);
1401 if (res)
1403 STRIP_USELESS_TYPE_CONVERSION (res);
1404 if (is_gimple_min_invariant (res))
1405 return res;
1408 else if (off.is_constant (&const_off))
1410 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1411 int len = native_encode_expr (ctor, buf, size, const_off);
1412 if (len > 0)
1413 return native_interpret_expr (ref->type, buf, len);
1418 return NULL_TREE;
1421 /* Return true if OPS contain a storage order barrier. */
1423 static bool
1424 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1426 vn_reference_op_t op;
1427 unsigned i;
1429 FOR_EACH_VEC_ELT (ops, i, op)
1430 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1431 return true;
1433 return false;
1436 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1437 structures into their value numbers. This is done in-place, and
1438 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1439 whether any operands were valueized. */
1441 static vec<vn_reference_op_s>
1442 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1444 vn_reference_op_t vro;
1445 unsigned int i;
1447 *valueized_anything = false;
1449 FOR_EACH_VEC_ELT (orig, i, vro)
1451 if (vro->opcode == SSA_NAME
1452 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1454 tree tem = SSA_VAL (vro->op0);
1455 if (tem != vro->op0)
1457 *valueized_anything = true;
1458 vro->op0 = tem;
1460 /* If it transforms from an SSA_NAME to a constant, update
1461 the opcode. */
1462 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1463 vro->opcode = TREE_CODE (vro->op0);
1465 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1467 tree tem = SSA_VAL (vro->op1);
1468 if (tem != vro->op1)
1470 *valueized_anything = true;
1471 vro->op1 = tem;
1474 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1476 tree tem = SSA_VAL (vro->op2);
1477 if (tem != vro->op2)
1479 *valueized_anything = true;
1480 vro->op2 = tem;
1483 /* If it transforms from an SSA_NAME to an address, fold with
1484 a preceding indirect reference. */
1485 if (i > 0
1486 && vro->op0
1487 && TREE_CODE (vro->op0) == ADDR_EXPR
1488 && orig[i - 1].opcode == MEM_REF)
1490 if (vn_reference_fold_indirect (&orig, &i))
1491 *valueized_anything = true;
1493 else if (i > 0
1494 && vro->opcode == SSA_NAME
1495 && orig[i - 1].opcode == MEM_REF)
1497 if (vn_reference_maybe_forwprop_address (&orig, &i))
1498 *valueized_anything = true;
1500 /* If it transforms a non-constant ARRAY_REF into a constant
1501 one, adjust the constant offset. */
1502 else if (vro->opcode == ARRAY_REF
1503 && known_eq (vro->off, -1)
1504 && poly_int_tree_p (vro->op0)
1505 && poly_int_tree_p (vro->op1)
1506 && TREE_CODE (vro->op2) == INTEGER_CST)
1508 poly_offset_int off = ((wi::to_poly_offset (vro->op0)
1509 - wi::to_poly_offset (vro->op1))
1510 * wi::to_offset (vro->op2)
1511 * vn_ref_op_align_unit (vro));
1512 off.to_shwi (&vro->off);
1516 return orig;
1519 static vec<vn_reference_op_s>
1520 valueize_refs (vec<vn_reference_op_s> orig)
1522 bool tem;
1523 return valueize_refs_1 (orig, &tem);
1526 static vec<vn_reference_op_s> shared_lookup_references;
1528 /* Create a vector of vn_reference_op_s structures from REF, a
1529 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1530 this function. *VALUEIZED_ANYTHING will specify whether any
1531 operands were valueized. */
1533 static vec<vn_reference_op_s>
1534 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1536 if (!ref)
1537 return vNULL;
1538 shared_lookup_references.truncate (0);
1539 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1540 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1541 valueized_anything);
1542 return shared_lookup_references;
1545 /* Create a vector of vn_reference_op_s structures from CALL, a
1546 call statement. The vector is shared among all callers of
1547 this function. */
1549 static vec<vn_reference_op_s>
1550 valueize_shared_reference_ops_from_call (gcall *call)
1552 if (!call)
1553 return vNULL;
1554 shared_lookup_references.truncate (0);
1555 copy_reference_ops_from_call (call, &shared_lookup_references);
1556 shared_lookup_references = valueize_refs (shared_lookup_references);
1557 return shared_lookup_references;
1560 /* Lookup a SCCVN reference operation VR in the current hash table.
1561 Returns the resulting value number if it exists in the hash table,
1562 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1563 vn_reference_t stored in the hashtable if something is found. */
1565 static tree
1566 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1568 vn_reference_s **slot;
1569 hashval_t hash;
1571 hash = vr->hashcode;
1572 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1573 if (!slot && current_info == optimistic_info)
1574 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1575 if (slot)
1577 if (vnresult)
1578 *vnresult = (vn_reference_t)*slot;
1579 return ((vn_reference_t)*slot)->result;
1582 return NULL_TREE;
1585 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1586 with the current VUSE and performs the expression lookup. */
1588 static void *
1589 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1590 unsigned int cnt, void *vr_)
1592 vn_reference_t vr = (vn_reference_t)vr_;
1593 vn_reference_s **slot;
1594 hashval_t hash;
1596 /* This bounds the stmt walks we perform on reference lookups
1597 to O(1) instead of O(N) where N is the number of dominating
1598 stores. */
1599 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1600 return (void *)-1;
1602 if (last_vuse_ptr)
1603 *last_vuse_ptr = vuse;
1605 /* Fixup vuse and hash. */
1606 if (vr->vuse)
1607 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1608 vr->vuse = vuse_ssa_val (vuse);
1609 if (vr->vuse)
1610 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1612 hash = vr->hashcode;
1613 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1614 if (!slot && current_info == optimistic_info)
1615 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1616 if (slot)
1617 return *slot;
1619 return NULL;
1622 /* Lookup an existing or insert a new vn_reference entry into the
1623 value table for the VUSE, SET, TYPE, OPERANDS reference which
1624 has the value VALUE which is either a constant or an SSA name. */
1626 static vn_reference_t
1627 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1628 alias_set_type set,
1629 tree type,
1630 vec<vn_reference_op_s,
1631 va_heap> operands,
1632 tree value)
1634 vn_reference_s vr1;
1635 vn_reference_t result;
1636 unsigned value_id;
1637 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1638 vr1.operands = operands;
1639 vr1.type = type;
1640 vr1.set = set;
1641 vr1.hashcode = vn_reference_compute_hash (&vr1);
1642 if (vn_reference_lookup_1 (&vr1, &result))
1643 return result;
1644 if (TREE_CODE (value) == SSA_NAME)
1645 value_id = VN_INFO (value)->value_id;
1646 else
1647 value_id = get_or_alloc_constant_value_id (value);
1648 return vn_reference_insert_pieces (vuse, set, type,
1649 operands.copy (), value, value_id);
1652 static vn_nary_op_t vn_nary_op_insert_stmt (gimple *stmt, tree result);
1653 static unsigned mprts_hook_cnt;
1655 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
1657 static tree
1658 vn_lookup_simplify_result (code_helper rcode, tree type, tree *ops_)
1660 if (!rcode.is_tree_code ())
1661 return NULL_TREE;
1662 tree *ops = ops_;
1663 unsigned int length = TREE_CODE_LENGTH ((tree_code) rcode);
1664 if (rcode == CONSTRUCTOR
1665 /* ??? We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
1666 and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree. */
1667 && TREE_CODE (ops_[0]) == CONSTRUCTOR)
1669 length = CONSTRUCTOR_NELTS (ops_[0]);
1670 ops = XALLOCAVEC (tree, length);
1671 for (unsigned i = 0; i < length; ++i)
1672 ops[i] = CONSTRUCTOR_ELT (ops_[0], i)->value;
1674 vn_nary_op_t vnresult = NULL;
1675 tree res = vn_nary_op_lookup_pieces (length, (tree_code) rcode,
1676 type, ops, &vnresult);
1677 /* We can end up endlessly recursing simplifications if the lookup above
1678 presents us with a def-use chain that mirrors the original simplification.
1679 See PR80887 for an example. Limit successful lookup artificially
1680 to 10 times if we are called as mprts_hook. */
1681 if (res
1682 && mprts_hook
1683 && --mprts_hook_cnt == 0)
1685 if (dump_file && (dump_flags & TDF_DETAILS))
1686 fprintf (dump_file, "Resetting mprts_hook after too many "
1687 "invocations.\n");
1688 mprts_hook = NULL;
1690 return res;
1693 /* Return a value-number for RCODE OPS... either by looking up an existing
1694 value-number for the simplified result or by inserting the operation if
1695 INSERT is true. */
1697 static tree
1698 vn_nary_build_or_lookup_1 (code_helper rcode, tree type, tree *ops,
1699 bool insert)
1701 tree result = NULL_TREE;
1702 /* We will be creating a value number for
1703 RCODE (OPS...).
1704 So first simplify and lookup this expression to see if it
1705 is already available. */
1706 mprts_hook = vn_lookup_simplify_result;
1707 mprts_hook_cnt = 9;
1708 bool res = false;
1709 switch (TREE_CODE_LENGTH ((tree_code) rcode))
1711 case 1:
1712 res = gimple_resimplify1 (NULL, &rcode, type, ops, vn_valueize);
1713 break;
1714 case 2:
1715 res = gimple_resimplify2 (NULL, &rcode, type, ops, vn_valueize);
1716 break;
1717 case 3:
1718 res = gimple_resimplify3 (NULL, &rcode, type, ops, vn_valueize);
1719 break;
1721 mprts_hook = NULL;
1722 gimple *new_stmt = NULL;
1723 if (res
1724 && gimple_simplified_result_is_gimple_val (rcode, ops))
1725 /* The expression is already available. */
1726 result = ops[0];
1727 else
1729 tree val = vn_lookup_simplify_result (rcode, type, ops);
1730 if (!val && insert)
1732 gimple_seq stmts = NULL;
1733 result = maybe_push_res_to_seq (rcode, type, ops, &stmts);
1734 if (result)
1736 gcc_assert (gimple_seq_singleton_p (stmts));
1737 new_stmt = gimple_seq_first_stmt (stmts);
1740 else
1741 /* The expression is already available. */
1742 result = val;
1744 if (new_stmt)
1746 /* The expression is not yet available, value-number lhs to
1747 the new SSA_NAME we created. */
1748 /* Initialize value-number information properly. */
1749 VN_INFO_GET (result)->valnum = result;
1750 VN_INFO (result)->value_id = get_next_value_id ();
1751 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
1752 new_stmt);
1753 VN_INFO (result)->needs_insertion = true;
1754 /* ??? PRE phi-translation inserts NARYs without corresponding
1755 SSA name result. Re-use those but set their result according
1756 to the stmt we just built. */
1757 vn_nary_op_t nary = NULL;
1758 vn_nary_op_lookup_stmt (new_stmt, &nary);
1759 if (nary)
1761 gcc_assert (nary->result == NULL_TREE);
1762 nary->result = gimple_assign_lhs (new_stmt);
1764 /* As all "inserted" statements are singleton SCCs, insert
1765 to the valid table. This is strictly needed to
1766 avoid re-generating new value SSA_NAMEs for the same
1767 expression during SCC iteration over and over (the
1768 optimistic table gets cleared after each iteration).
1769 We do not need to insert into the optimistic table, as
1770 lookups there will fall back to the valid table. */
1771 else if (current_info == optimistic_info)
1773 current_info = valid_info;
1774 vn_nary_op_insert_stmt (new_stmt, result);
1775 current_info = optimistic_info;
1777 else
1778 vn_nary_op_insert_stmt (new_stmt, result);
1779 if (dump_file && (dump_flags & TDF_DETAILS))
1781 fprintf (dump_file, "Inserting name ");
1782 print_generic_expr (dump_file, result);
1783 fprintf (dump_file, " for expression ");
1784 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
1785 fprintf (dump_file, "\n");
1788 return result;
1791 /* Return a value-number for RCODE OPS... either by looking up an existing
1792 value-number for the simplified result or by inserting the operation. */
1794 static tree
1795 vn_nary_build_or_lookup (code_helper rcode, tree type, tree *ops)
1797 return vn_nary_build_or_lookup_1 (rcode, type, ops, true);
1800 /* Try to simplify the expression RCODE OPS... of type TYPE and return
1801 its value if present. */
1803 tree
1804 vn_nary_simplify (vn_nary_op_t nary)
1806 if (nary->length > 3)
1807 return NULL_TREE;
1808 tree ops[3];
1809 memcpy (ops, nary->op, sizeof (tree) * nary->length);
1810 return vn_nary_build_or_lookup_1 (nary->opcode, nary->type, ops, false);
1814 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1815 from the statement defining VUSE and if not successful tries to
1816 translate *REFP and VR_ through an aggregate copy at the definition
1817 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
1818 of *REF and *VR. If only disambiguation was performed then
1819 *DISAMBIGUATE_ONLY is set to true. */
1821 static void *
1822 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1823 bool *disambiguate_only)
1825 vn_reference_t vr = (vn_reference_t)vr_;
1826 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
1827 tree base = ao_ref_base (ref);
1828 HOST_WIDE_INT offseti, maxsizei;
1829 static vec<vn_reference_op_s> lhs_ops;
1830 ao_ref lhs_ref;
1831 bool lhs_ref_ok = false;
1832 poly_int64 copy_size;
1834 /* If the reference is based on a parameter that was determined as
1835 pointing to readonly memory it doesn't change. */
1836 if (TREE_CODE (base) == MEM_REF
1837 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
1838 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
1839 && bitmap_bit_p (const_parms,
1840 SSA_NAME_VERSION (TREE_OPERAND (base, 0))))
1842 *disambiguate_only = true;
1843 return NULL;
1846 /* First try to disambiguate after value-replacing in the definitions LHS. */
1847 if (is_gimple_assign (def_stmt))
1849 tree lhs = gimple_assign_lhs (def_stmt);
1850 bool valueized_anything = false;
1851 /* Avoid re-allocation overhead. */
1852 lhs_ops.truncate (0);
1853 copy_reference_ops_from_ref (lhs, &lhs_ops);
1854 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1855 if (valueized_anything)
1857 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1858 get_alias_set (lhs),
1859 TREE_TYPE (lhs), lhs_ops);
1860 if (lhs_ref_ok
1861 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1863 *disambiguate_only = true;
1864 return NULL;
1867 else
1869 ao_ref_init (&lhs_ref, lhs);
1870 lhs_ref_ok = true;
1873 /* If we reach a clobbering statement try to skip it and see if
1874 we find a VN result with exactly the same value as the
1875 possible clobber. In this case we can ignore the clobber
1876 and return the found value.
1877 Note that we don't need to worry about partial overlapping
1878 accesses as we then can use TBAA to disambiguate against the
1879 clobbering statement when looking up a load (thus the
1880 VN_WALKREWRITE guard). */
1881 if (vn_walk_kind == VN_WALKREWRITE
1882 && is_gimple_reg_type (TREE_TYPE (lhs))
1883 && types_compatible_p (TREE_TYPE (lhs), vr->type))
1885 tree *saved_last_vuse_ptr = last_vuse_ptr;
1886 /* Do not update last_vuse_ptr in vn_reference_lookup_2. */
1887 last_vuse_ptr = NULL;
1888 tree saved_vuse = vr->vuse;
1889 hashval_t saved_hashcode = vr->hashcode;
1890 void *res = vn_reference_lookup_2 (ref,
1891 gimple_vuse (def_stmt), 0, vr);
1892 /* Need to restore vr->vuse and vr->hashcode. */
1893 vr->vuse = saved_vuse;
1894 vr->hashcode = saved_hashcode;
1895 last_vuse_ptr = saved_last_vuse_ptr;
1896 if (res && res != (void *)-1)
1898 vn_reference_t vnresult = (vn_reference_t) res;
1899 if (vnresult->result
1900 && operand_equal_p (vnresult->result,
1901 gimple_assign_rhs1 (def_stmt), 0))
1902 return res;
1906 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1907 && gimple_call_num_args (def_stmt) <= 4)
1909 /* For builtin calls valueize its arguments and call the
1910 alias oracle again. Valueization may improve points-to
1911 info of pointers and constify size and position arguments.
1912 Originally this was motivated by PR61034 which has
1913 conditional calls to free falsely clobbering ref because
1914 of imprecise points-to info of the argument. */
1915 tree oldargs[4];
1916 bool valueized_anything = false;
1917 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1919 oldargs[i] = gimple_call_arg (def_stmt, i);
1920 tree val = vn_valueize (oldargs[i]);
1921 if (val != oldargs[i])
1923 gimple_call_set_arg (def_stmt, i, val);
1924 valueized_anything = true;
1927 if (valueized_anything)
1929 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1930 ref);
1931 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1932 gimple_call_set_arg (def_stmt, i, oldargs[i]);
1933 if (!res)
1935 *disambiguate_only = true;
1936 return NULL;
1941 if (*disambiguate_only)
1942 return (void *)-1;
1944 /* If we cannot constrain the size of the reference we cannot
1945 test if anything kills it. */
1946 if (!ref->max_size_known_p ())
1947 return (void *)-1;
1949 poly_int64 offset = ref->offset;
1950 poly_int64 maxsize = ref->max_size;
1952 /* We can't deduce anything useful from clobbers. */
1953 if (gimple_clobber_p (def_stmt))
1954 return (void *)-1;
1956 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1957 from that definition.
1958 1) Memset. */
1959 if (is_gimple_reg_type (vr->type)
1960 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1961 && (integer_zerop (gimple_call_arg (def_stmt, 1))
1962 || (INTEGRAL_TYPE_P (vr->type) && known_eq (ref->size, 8)))
1963 && poly_int_tree_p (gimple_call_arg (def_stmt, 2))
1964 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
1965 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME))
1967 tree base2;
1968 poly_int64 offset2, size2, maxsize2;
1969 bool reverse;
1970 tree ref2 = gimple_call_arg (def_stmt, 0);
1971 if (TREE_CODE (ref2) == SSA_NAME)
1973 ref2 = SSA_VAL (ref2);
1974 if (TREE_CODE (ref2) == SSA_NAME
1975 && (TREE_CODE (base) != MEM_REF
1976 || TREE_OPERAND (base, 0) != ref2))
1978 gimple *def_stmt = SSA_NAME_DEF_STMT (ref2);
1979 if (gimple_assign_single_p (def_stmt)
1980 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
1981 ref2 = gimple_assign_rhs1 (def_stmt);
1984 if (TREE_CODE (ref2) == ADDR_EXPR)
1986 ref2 = TREE_OPERAND (ref2, 0);
1987 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
1988 &reverse);
1989 if (!known_size_p (maxsize2)
1990 || !operand_equal_p (base, base2, OEP_ADDRESS_OF))
1991 return (void *)-1;
1993 else if (TREE_CODE (ref2) == SSA_NAME)
1995 poly_int64 soff;
1996 if (TREE_CODE (base) != MEM_REF
1997 || !(mem_ref_offset (base) << LOG2_BITS_PER_UNIT).to_shwi (&soff))
1998 return (void *)-1;
1999 offset += soff;
2000 offset2 = 0;
2001 if (TREE_OPERAND (base, 0) != ref2)
2003 gimple *def = SSA_NAME_DEF_STMT (ref2);
2004 if (is_gimple_assign (def)
2005 && gimple_assign_rhs_code (def) == POINTER_PLUS_EXPR
2006 && gimple_assign_rhs1 (def) == TREE_OPERAND (base, 0)
2007 && poly_int_tree_p (gimple_assign_rhs2 (def))
2008 && (wi::to_poly_offset (gimple_assign_rhs2 (def))
2009 << LOG2_BITS_PER_UNIT).to_shwi (&offset2))
2011 ref2 = gimple_assign_rhs1 (def);
2012 if (TREE_CODE (ref2) == SSA_NAME)
2013 ref2 = SSA_VAL (ref2);
2015 else
2016 return (void *)-1;
2019 else
2020 return (void *)-1;
2021 tree len = gimple_call_arg (def_stmt, 2);
2022 if (known_subrange_p (offset, maxsize, offset2,
2023 wi::to_poly_offset (len) << LOG2_BITS_PER_UNIT))
2025 tree val;
2026 if (integer_zerop (gimple_call_arg (def_stmt, 1)))
2027 val = build_zero_cst (vr->type);
2028 else
2029 val = fold_convert (vr->type, gimple_call_arg (def_stmt, 1));
2030 return vn_reference_lookup_or_insert_for_pieces
2031 (vuse, vr->set, vr->type, vr->operands, val);
2035 /* 2) Assignment from an empty CONSTRUCTOR. */
2036 else if (is_gimple_reg_type (vr->type)
2037 && gimple_assign_single_p (def_stmt)
2038 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
2039 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
2041 tree base2;
2042 poly_int64 offset2, size2, maxsize2;
2043 bool reverse;
2044 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2045 &offset2, &size2, &maxsize2, &reverse);
2046 if (known_size_p (maxsize2)
2047 && operand_equal_p (base, base2, 0)
2048 && known_subrange_p (offset, maxsize, offset2, size2))
2050 tree val = build_zero_cst (vr->type);
2051 return vn_reference_lookup_or_insert_for_pieces
2052 (vuse, vr->set, vr->type, vr->operands, val);
2056 /* 3) Assignment from a constant. We can use folds native encode/interpret
2057 routines to extract the assigned bits. */
2058 else if (known_eq (ref->size, maxsize)
2059 && is_gimple_reg_type (vr->type)
2060 && !contains_storage_order_barrier_p (vr->operands)
2061 && gimple_assign_single_p (def_stmt)
2062 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
2063 /* native_encode and native_decode operate on arrays of bytes
2064 and so fundamentally need a compile-time size and offset. */
2065 && maxsize.is_constant (&maxsizei)
2066 && maxsizei % BITS_PER_UNIT == 0
2067 && offset.is_constant (&offseti)
2068 && offseti % BITS_PER_UNIT == 0
2069 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
2070 || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
2071 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
2073 tree base2;
2074 HOST_WIDE_INT offset2, size2;
2075 bool reverse;
2076 base2 = get_ref_base_and_extent_hwi (gimple_assign_lhs (def_stmt),
2077 &offset2, &size2, &reverse);
2078 if (base2
2079 && !reverse
2080 && size2 % BITS_PER_UNIT == 0
2081 && offset2 % BITS_PER_UNIT == 0
2082 && operand_equal_p (base, base2, 0)
2083 && known_subrange_p (offseti, maxsizei, offset2, size2))
2085 /* We support up to 512-bit values (for V8DFmode). */
2086 unsigned char buffer[64];
2087 int len;
2089 tree rhs = gimple_assign_rhs1 (def_stmt);
2090 if (TREE_CODE (rhs) == SSA_NAME)
2091 rhs = SSA_VAL (rhs);
2092 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
2093 buffer, sizeof (buffer),
2094 (offseti - offset2) / BITS_PER_UNIT);
2095 if (len > 0 && len * BITS_PER_UNIT >= maxsizei)
2097 tree type = vr->type;
2098 /* Make sure to interpret in a type that has a range
2099 covering the whole access size. */
2100 if (INTEGRAL_TYPE_P (vr->type)
2101 && maxsizei != TYPE_PRECISION (vr->type))
2102 type = build_nonstandard_integer_type (maxsizei,
2103 TYPE_UNSIGNED (type));
2104 tree val = native_interpret_expr (type, buffer,
2105 maxsizei / BITS_PER_UNIT);
2106 /* If we chop off bits because the types precision doesn't
2107 match the memory access size this is ok when optimizing
2108 reads but not when called from the DSE code during
2109 elimination. */
2110 if (val
2111 && type != vr->type)
2113 if (! int_fits_type_p (val, vr->type))
2114 val = NULL_TREE;
2115 else
2116 val = fold_convert (vr->type, val);
2119 if (val)
2120 return vn_reference_lookup_or_insert_for_pieces
2121 (vuse, vr->set, vr->type, vr->operands, val);
2126 /* 4) Assignment from an SSA name which definition we may be able
2127 to access pieces from. */
2128 else if (known_eq (ref->size, maxsize)
2129 && is_gimple_reg_type (vr->type)
2130 && !contains_storage_order_barrier_p (vr->operands)
2131 && gimple_assign_single_p (def_stmt)
2132 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
2134 tree base2;
2135 poly_int64 offset2, size2, maxsize2;
2136 bool reverse;
2137 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2138 &offset2, &size2, &maxsize2,
2139 &reverse);
2140 if (!reverse
2141 && known_size_p (maxsize2)
2142 && known_eq (maxsize2, size2)
2143 && operand_equal_p (base, base2, 0)
2144 && known_subrange_p (offset, maxsize, offset2, size2)
2145 /* ??? We can't handle bitfield precision extracts without
2146 either using an alternate type for the BIT_FIELD_REF and
2147 then doing a conversion or possibly adjusting the offset
2148 according to endianness. */
2149 && (! INTEGRAL_TYPE_P (vr->type)
2150 || known_eq (ref->size, TYPE_PRECISION (vr->type)))
2151 && multiple_p (ref->size, BITS_PER_UNIT))
2153 code_helper rcode = BIT_FIELD_REF;
2154 tree ops[3];
2155 ops[0] = SSA_VAL (gimple_assign_rhs1 (def_stmt));
2156 ops[1] = bitsize_int (ref->size);
2157 ops[2] = bitsize_int (offset - offset2);
2158 tree val = vn_nary_build_or_lookup (rcode, vr->type, ops);
2159 if (val
2160 && (TREE_CODE (val) != SSA_NAME
2161 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2163 vn_reference_t res = vn_reference_lookup_or_insert_for_pieces
2164 (vuse, vr->set, vr->type, vr->operands, val);
2165 return res;
2170 /* 5) For aggregate copies translate the reference through them if
2171 the copy kills ref. */
2172 else if (vn_walk_kind == VN_WALKREWRITE
2173 && gimple_assign_single_p (def_stmt)
2174 && (DECL_P (gimple_assign_rhs1 (def_stmt))
2175 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
2176 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
2178 tree base2;
2179 int i, j, k;
2180 auto_vec<vn_reference_op_s> rhs;
2181 vn_reference_op_t vro;
2182 ao_ref r;
2184 if (!lhs_ref_ok)
2185 return (void *)-1;
2187 /* See if the assignment kills REF. */
2188 base2 = ao_ref_base (&lhs_ref);
2189 if (!lhs_ref.max_size_known_p ()
2190 || (base != base2
2191 && (TREE_CODE (base) != MEM_REF
2192 || TREE_CODE (base2) != MEM_REF
2193 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
2194 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
2195 TREE_OPERAND (base2, 1))))
2196 || !stmt_kills_ref_p (def_stmt, ref))
2197 return (void *)-1;
2199 /* Find the common base of ref and the lhs. lhs_ops already
2200 contains valueized operands for the lhs. */
2201 i = vr->operands.length () - 1;
2202 j = lhs_ops.length () - 1;
2203 while (j >= 0 && i >= 0
2204 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
2206 i--;
2207 j--;
2210 /* ??? The innermost op should always be a MEM_REF and we already
2211 checked that the assignment to the lhs kills vr. Thus for
2212 aggregate copies using char[] types the vn_reference_op_eq
2213 may fail when comparing types for compatibility. But we really
2214 don't care here - further lookups with the rewritten operands
2215 will simply fail if we messed up types too badly. */
2216 poly_int64 extra_off = 0;
2217 if (j == 0 && i >= 0
2218 && lhs_ops[0].opcode == MEM_REF
2219 && maybe_ne (lhs_ops[0].off, -1))
2221 if (known_eq (lhs_ops[0].off, vr->operands[i].off))
2222 i--, j--;
2223 else if (vr->operands[i].opcode == MEM_REF
2224 && maybe_ne (vr->operands[i].off, -1))
2226 extra_off = vr->operands[i].off - lhs_ops[0].off;
2227 i--, j--;
2231 /* i now points to the first additional op.
2232 ??? LHS may not be completely contained in VR, one or more
2233 VIEW_CONVERT_EXPRs could be in its way. We could at least
2234 try handling outermost VIEW_CONVERT_EXPRs. */
2235 if (j != -1)
2236 return (void *)-1;
2238 /* Punt if the additional ops contain a storage order barrier. */
2239 for (k = i; k >= 0; k--)
2241 vro = &vr->operands[k];
2242 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
2243 return (void *)-1;
2246 /* Now re-write REF to be based on the rhs of the assignment. */
2247 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
2249 /* Apply an extra offset to the inner MEM_REF of the RHS. */
2250 if (maybe_ne (extra_off, 0))
2252 if (rhs.length () < 2
2253 || rhs[0].opcode != MEM_REF
2254 || known_eq (rhs[0].off, -1))
2255 return (void *)-1;
2256 rhs[0].off += extra_off;
2257 rhs[0].op0 = int_const_binop (PLUS_EXPR, rhs[0].op0,
2258 build_int_cst (TREE_TYPE (rhs[0].op0),
2259 extra_off));
2262 /* We need to pre-pend vr->operands[0..i] to rhs. */
2263 vec<vn_reference_op_s> old = vr->operands;
2264 if (i + 1 + rhs.length () > vr->operands.length ())
2265 vr->operands.safe_grow (i + 1 + rhs.length ());
2266 else
2267 vr->operands.truncate (i + 1 + rhs.length ());
2268 FOR_EACH_VEC_ELT (rhs, j, vro)
2269 vr->operands[i + 1 + j] = *vro;
2270 vr->operands = valueize_refs (vr->operands);
2271 if (old == shared_lookup_references)
2272 shared_lookup_references = vr->operands;
2273 vr->hashcode = vn_reference_compute_hash (vr);
2275 /* Try folding the new reference to a constant. */
2276 tree val = fully_constant_vn_reference_p (vr);
2277 if (val)
2278 return vn_reference_lookup_or_insert_for_pieces
2279 (vuse, vr->set, vr->type, vr->operands, val);
2281 /* Adjust *ref from the new operands. */
2282 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2283 return (void *)-1;
2284 /* This can happen with bitfields. */
2285 if (maybe_ne (ref->size, r.size))
2286 return (void *)-1;
2287 *ref = r;
2289 /* Do not update last seen VUSE after translating. */
2290 last_vuse_ptr = NULL;
2292 /* Keep looking for the adjusted *REF / VR pair. */
2293 return NULL;
2296 /* 6) For memcpy copies translate the reference through them if
2297 the copy kills ref. */
2298 else if (vn_walk_kind == VN_WALKREWRITE
2299 && is_gimple_reg_type (vr->type)
2300 /* ??? Handle BCOPY as well. */
2301 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2302 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2303 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2304 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2305 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2306 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2307 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2308 && poly_int_tree_p (gimple_call_arg (def_stmt, 2), &copy_size))
2310 tree lhs, rhs;
2311 ao_ref r;
2312 poly_int64 rhs_offset, lhs_offset;
2313 vn_reference_op_s op;
2314 poly_uint64 mem_offset;
2315 poly_int64 at, byte_maxsize;
2317 /* Only handle non-variable, addressable refs. */
2318 if (maybe_ne (ref->size, maxsize)
2319 || !multiple_p (offset, BITS_PER_UNIT, &at)
2320 || !multiple_p (maxsize, BITS_PER_UNIT, &byte_maxsize))
2321 return (void *)-1;
2323 /* Extract a pointer base and an offset for the destination. */
2324 lhs = gimple_call_arg (def_stmt, 0);
2325 lhs_offset = 0;
2326 if (TREE_CODE (lhs) == SSA_NAME)
2328 lhs = SSA_VAL (lhs);
2329 if (TREE_CODE (lhs) == SSA_NAME)
2331 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
2332 if (gimple_assign_single_p (def_stmt)
2333 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2334 lhs = gimple_assign_rhs1 (def_stmt);
2337 if (TREE_CODE (lhs) == ADDR_EXPR)
2339 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2340 &lhs_offset);
2341 if (!tem)
2342 return (void *)-1;
2343 if (TREE_CODE (tem) == MEM_REF
2344 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
2346 lhs = TREE_OPERAND (tem, 0);
2347 if (TREE_CODE (lhs) == SSA_NAME)
2348 lhs = SSA_VAL (lhs);
2349 lhs_offset += mem_offset;
2351 else if (DECL_P (tem))
2352 lhs = build_fold_addr_expr (tem);
2353 else
2354 return (void *)-1;
2356 if (TREE_CODE (lhs) != SSA_NAME
2357 && TREE_CODE (lhs) != ADDR_EXPR)
2358 return (void *)-1;
2360 /* Extract a pointer base and an offset for the source. */
2361 rhs = gimple_call_arg (def_stmt, 1);
2362 rhs_offset = 0;
2363 if (TREE_CODE (rhs) == SSA_NAME)
2364 rhs = SSA_VAL (rhs);
2365 if (TREE_CODE (rhs) == ADDR_EXPR)
2367 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2368 &rhs_offset);
2369 if (!tem)
2370 return (void *)-1;
2371 if (TREE_CODE (tem) == MEM_REF
2372 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
2374 rhs = TREE_OPERAND (tem, 0);
2375 rhs_offset += mem_offset;
2377 else if (DECL_P (tem)
2378 || TREE_CODE (tem) == STRING_CST)
2379 rhs = build_fold_addr_expr (tem);
2380 else
2381 return (void *)-1;
2383 if (TREE_CODE (rhs) != SSA_NAME
2384 && TREE_CODE (rhs) != ADDR_EXPR)
2385 return (void *)-1;
2387 /* The bases of the destination and the references have to agree. */
2388 if (TREE_CODE (base) == MEM_REF)
2390 if (TREE_OPERAND (base, 0) != lhs
2391 || !poly_int_tree_p (TREE_OPERAND (base, 1), &mem_offset))
2392 return (void *) -1;
2393 at += mem_offset;
2395 else if (!DECL_P (base)
2396 || TREE_CODE (lhs) != ADDR_EXPR
2397 || TREE_OPERAND (lhs, 0) != base)
2398 return (void *)-1;
2400 /* If the access is completely outside of the memcpy destination
2401 area there is no aliasing. */
2402 if (!ranges_maybe_overlap_p (lhs_offset, copy_size, at, byte_maxsize))
2403 return NULL;
2404 /* And the access has to be contained within the memcpy destination. */
2405 if (!known_subrange_p (at, byte_maxsize, lhs_offset, copy_size))
2406 return (void *)-1;
2408 /* Make room for 2 operands in the new reference. */
2409 if (vr->operands.length () < 2)
2411 vec<vn_reference_op_s> old = vr->operands;
2412 vr->operands.safe_grow_cleared (2);
2413 if (old == shared_lookup_references)
2414 shared_lookup_references = vr->operands;
2416 else
2417 vr->operands.truncate (2);
2419 /* The looked-through reference is a simple MEM_REF. */
2420 memset (&op, 0, sizeof (op));
2421 op.type = vr->type;
2422 op.opcode = MEM_REF;
2423 op.op0 = build_int_cst (ptr_type_node, at - lhs_offset + rhs_offset);
2424 op.off = at - lhs_offset + rhs_offset;
2425 vr->operands[0] = op;
2426 op.type = TREE_TYPE (rhs);
2427 op.opcode = TREE_CODE (rhs);
2428 op.op0 = rhs;
2429 op.off = -1;
2430 vr->operands[1] = op;
2431 vr->hashcode = vn_reference_compute_hash (vr);
2433 /* Try folding the new reference to a constant. */
2434 tree val = fully_constant_vn_reference_p (vr);
2435 if (val)
2436 return vn_reference_lookup_or_insert_for_pieces
2437 (vuse, vr->set, vr->type, vr->operands, val);
2439 /* Adjust *ref from the new operands. */
2440 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2441 return (void *)-1;
2442 /* This can happen with bitfields. */
2443 if (maybe_ne (ref->size, r.size))
2444 return (void *)-1;
2445 *ref = r;
2447 /* Do not update last seen VUSE after translating. */
2448 last_vuse_ptr = NULL;
2450 /* Keep looking for the adjusted *REF / VR pair. */
2451 return NULL;
2454 /* Bail out and stop walking. */
2455 return (void *)-1;
2458 /* Return a reference op vector from OP that can be used for
2459 vn_reference_lookup_pieces. The caller is responsible for releasing
2460 the vector. */
2462 vec<vn_reference_op_s>
2463 vn_reference_operands_for_lookup (tree op)
2465 bool valueized;
2466 return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
2469 /* Lookup a reference operation by it's parts, in the current hash table.
2470 Returns the resulting value number if it exists in the hash table,
2471 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2472 vn_reference_t stored in the hashtable if something is found. */
2474 tree
2475 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2476 vec<vn_reference_op_s> operands,
2477 vn_reference_t *vnresult, vn_lookup_kind kind)
2479 struct vn_reference_s vr1;
2480 vn_reference_t tmp;
2481 tree cst;
2483 if (!vnresult)
2484 vnresult = &tmp;
2485 *vnresult = NULL;
2487 vr1.vuse = vuse_ssa_val (vuse);
2488 shared_lookup_references.truncate (0);
2489 shared_lookup_references.safe_grow (operands.length ());
2490 memcpy (shared_lookup_references.address (),
2491 operands.address (),
2492 sizeof (vn_reference_op_s)
2493 * operands.length ());
2494 vr1.operands = operands = shared_lookup_references
2495 = valueize_refs (shared_lookup_references);
2496 vr1.type = type;
2497 vr1.set = set;
2498 vr1.hashcode = vn_reference_compute_hash (&vr1);
2499 if ((cst = fully_constant_vn_reference_p (&vr1)))
2500 return cst;
2502 vn_reference_lookup_1 (&vr1, vnresult);
2503 if (!*vnresult
2504 && kind != VN_NOWALK
2505 && vr1.vuse)
2507 ao_ref r;
2508 vn_walk_kind = kind;
2509 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2510 *vnresult =
2511 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2512 vn_reference_lookup_2,
2513 vn_reference_lookup_3,
2514 vuse_ssa_val, &vr1);
2515 gcc_checking_assert (vr1.operands == shared_lookup_references);
2518 if (*vnresult)
2519 return (*vnresult)->result;
2521 return NULL_TREE;
2524 /* Lookup OP in the current hash table, and return the resulting value
2525 number if it exists in the hash table. Return NULL_TREE if it does
2526 not exist in the hash table or if the result field of the structure
2527 was NULL.. VNRESULT will be filled in with the vn_reference_t
2528 stored in the hashtable if one exists. When TBAA_P is false assume
2529 we are looking up a store and treat it as having alias-set zero. */
2531 tree
2532 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2533 vn_reference_t *vnresult, bool tbaa_p)
2535 vec<vn_reference_op_s> operands;
2536 struct vn_reference_s vr1;
2537 tree cst;
2538 bool valuezied_anything;
2540 if (vnresult)
2541 *vnresult = NULL;
2543 vr1.vuse = vuse_ssa_val (vuse);
2544 vr1.operands = operands
2545 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2546 vr1.type = TREE_TYPE (op);
2547 vr1.set = tbaa_p ? get_alias_set (op) : 0;
2548 vr1.hashcode = vn_reference_compute_hash (&vr1);
2549 if ((cst = fully_constant_vn_reference_p (&vr1)))
2550 return cst;
2552 if (kind != VN_NOWALK
2553 && vr1.vuse)
2555 vn_reference_t wvnresult;
2556 ao_ref r;
2557 /* Make sure to use a valueized reference if we valueized anything.
2558 Otherwise preserve the full reference for advanced TBAA. */
2559 if (!valuezied_anything
2560 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2561 vr1.operands))
2562 ao_ref_init (&r, op);
2563 if (! tbaa_p)
2564 r.ref_alias_set = r.base_alias_set = 0;
2565 vn_walk_kind = kind;
2566 wvnresult =
2567 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2568 vn_reference_lookup_2,
2569 vn_reference_lookup_3,
2570 vuse_ssa_val, &vr1);
2571 gcc_checking_assert (vr1.operands == shared_lookup_references);
2572 if (wvnresult)
2574 if (vnresult)
2575 *vnresult = wvnresult;
2576 return wvnresult->result;
2579 return NULL_TREE;
2582 return vn_reference_lookup_1 (&vr1, vnresult);
2585 /* Lookup CALL in the current hash table and return the entry in
2586 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2588 void
2589 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2590 vn_reference_t vr)
2592 if (vnresult)
2593 *vnresult = NULL;
2595 tree vuse = gimple_vuse (call);
2597 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2598 vr->operands = valueize_shared_reference_ops_from_call (call);
2599 vr->type = gimple_expr_type (call);
2600 vr->set = 0;
2601 vr->hashcode = vn_reference_compute_hash (vr);
2602 vn_reference_lookup_1 (vr, vnresult);
2605 /* Insert OP into the current hash table with a value number of
2606 RESULT, and return the resulting reference structure we created. */
2608 static vn_reference_t
2609 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2611 vn_reference_s **slot;
2612 vn_reference_t vr1;
2613 bool tem;
2615 vr1 = current_info->references_pool->allocate ();
2616 if (TREE_CODE (result) == SSA_NAME)
2617 vr1->value_id = VN_INFO (result)->value_id;
2618 else
2619 vr1->value_id = get_or_alloc_constant_value_id (result);
2620 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2621 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2622 vr1->type = TREE_TYPE (op);
2623 vr1->set = get_alias_set (op);
2624 vr1->hashcode = vn_reference_compute_hash (vr1);
2625 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2626 vr1->result_vdef = vdef;
2628 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2629 INSERT);
2631 /* Because we lookup stores using vuses, and value number failures
2632 using the vdefs (see visit_reference_op_store for how and why),
2633 it's possible that on failure we may try to insert an already
2634 inserted store. This is not wrong, there is no ssa name for a
2635 store that we could use as a differentiator anyway. Thus, unlike
2636 the other lookup functions, you cannot gcc_assert (!*slot)
2637 here. */
2639 /* But free the old slot in case of a collision. */
2640 if (*slot)
2641 free_reference (*slot);
2643 *slot = vr1;
2644 return vr1;
2647 /* Insert a reference by it's pieces into the current hash table with
2648 a value number of RESULT. Return the resulting reference
2649 structure we created. */
2651 vn_reference_t
2652 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2653 vec<vn_reference_op_s> operands,
2654 tree result, unsigned int value_id)
2657 vn_reference_s **slot;
2658 vn_reference_t vr1;
2660 vr1 = current_info->references_pool->allocate ();
2661 vr1->value_id = value_id;
2662 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2663 vr1->operands = valueize_refs (operands);
2664 vr1->type = type;
2665 vr1->set = set;
2666 vr1->hashcode = vn_reference_compute_hash (vr1);
2667 if (result && TREE_CODE (result) == SSA_NAME)
2668 result = SSA_VAL (result);
2669 vr1->result = result;
2671 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2672 INSERT);
2674 /* At this point we should have all the things inserted that we have
2675 seen before, and we should never try inserting something that
2676 already exists. */
2677 gcc_assert (!*slot);
2678 if (*slot)
2679 free_reference (*slot);
2681 *slot = vr1;
2682 return vr1;
2685 /* Compute and return the hash value for nary operation VBO1. */
2687 static hashval_t
2688 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2690 inchash::hash hstate;
2691 unsigned i;
2693 for (i = 0; i < vno1->length; ++i)
2694 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2695 vno1->op[i] = SSA_VAL (vno1->op[i]);
2697 if (((vno1->length == 2
2698 && commutative_tree_code (vno1->opcode))
2699 || (vno1->length == 3
2700 && commutative_ternary_tree_code (vno1->opcode)))
2701 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2702 std::swap (vno1->op[0], vno1->op[1]);
2703 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
2704 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2706 std::swap (vno1->op[0], vno1->op[1]);
2707 vno1->opcode = swap_tree_comparison (vno1->opcode);
2710 hstate.add_int (vno1->opcode);
2711 for (i = 0; i < vno1->length; ++i)
2712 inchash::add_expr (vno1->op[i], hstate);
2714 return hstate.end ();
2717 /* Compare nary operations VNO1 and VNO2 and return true if they are
2718 equivalent. */
2720 bool
2721 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2723 unsigned i;
2725 if (vno1->hashcode != vno2->hashcode)
2726 return false;
2728 if (vno1->length != vno2->length)
2729 return false;
2731 if (vno1->opcode != vno2->opcode
2732 || !types_compatible_p (vno1->type, vno2->type))
2733 return false;
2735 for (i = 0; i < vno1->length; ++i)
2736 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2737 return false;
2739 /* BIT_INSERT_EXPR has an implict operand as the type precision
2740 of op1. Need to check to make sure they are the same. */
2741 if (vno1->opcode == BIT_INSERT_EXPR
2742 && TREE_CODE (vno1->op[1]) == INTEGER_CST
2743 && TYPE_PRECISION (TREE_TYPE (vno1->op[1]))
2744 != TYPE_PRECISION (TREE_TYPE (vno2->op[1])))
2745 return false;
2747 return true;
2750 /* Initialize VNO from the pieces provided. */
2752 static void
2753 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2754 enum tree_code code, tree type, tree *ops)
2756 vno->opcode = code;
2757 vno->length = length;
2758 vno->type = type;
2759 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2762 /* Initialize VNO from OP. */
2764 static void
2765 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2767 unsigned i;
2769 vno->opcode = TREE_CODE (op);
2770 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2771 vno->type = TREE_TYPE (op);
2772 for (i = 0; i < vno->length; ++i)
2773 vno->op[i] = TREE_OPERAND (op, i);
2776 /* Return the number of operands for a vn_nary ops structure from STMT. */
2778 static unsigned int
2779 vn_nary_length_from_stmt (gimple *stmt)
2781 switch (gimple_assign_rhs_code (stmt))
2783 case REALPART_EXPR:
2784 case IMAGPART_EXPR:
2785 case VIEW_CONVERT_EXPR:
2786 return 1;
2788 case BIT_FIELD_REF:
2789 return 3;
2791 case CONSTRUCTOR:
2792 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2794 default:
2795 return gimple_num_ops (stmt) - 1;
2799 /* Initialize VNO from STMT. */
2801 static void
2802 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
2804 unsigned i;
2806 vno->opcode = gimple_assign_rhs_code (stmt);
2807 vno->type = gimple_expr_type (stmt);
2808 switch (vno->opcode)
2810 case REALPART_EXPR:
2811 case IMAGPART_EXPR:
2812 case VIEW_CONVERT_EXPR:
2813 vno->length = 1;
2814 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2815 break;
2817 case BIT_FIELD_REF:
2818 vno->length = 3;
2819 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2820 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2821 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2822 break;
2824 case CONSTRUCTOR:
2825 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2826 for (i = 0; i < vno->length; ++i)
2827 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2828 break;
2830 default:
2831 gcc_checking_assert (!gimple_assign_single_p (stmt));
2832 vno->length = gimple_num_ops (stmt) - 1;
2833 for (i = 0; i < vno->length; ++i)
2834 vno->op[i] = gimple_op (stmt, i + 1);
2838 /* Compute the hashcode for VNO and look for it in the hash table;
2839 return the resulting value number if it exists in the hash table.
2840 Return NULL_TREE if it does not exist in the hash table or if the
2841 result field of the operation is NULL. VNRESULT will contain the
2842 vn_nary_op_t from the hashtable if it exists. */
2844 static tree
2845 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2847 vn_nary_op_s **slot;
2849 if (vnresult)
2850 *vnresult = NULL;
2852 vno->hashcode = vn_nary_op_compute_hash (vno);
2853 slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2854 NO_INSERT);
2855 if (!slot && current_info == optimistic_info)
2856 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2857 NO_INSERT);
2858 if (!slot)
2859 return NULL_TREE;
2860 if (vnresult)
2861 *vnresult = *slot;
2862 return (*slot)->result;
2865 /* Lookup a n-ary operation by its pieces and return the resulting value
2866 number if it exists in the hash table. Return NULL_TREE if it does
2867 not exist in the hash table or if the result field of the operation
2868 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2869 if it exists. */
2871 tree
2872 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2873 tree type, tree *ops, vn_nary_op_t *vnresult)
2875 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2876 sizeof_vn_nary_op (length));
2877 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2878 return vn_nary_op_lookup_1 (vno1, vnresult);
2881 /* Lookup OP in the current hash table, and return the resulting value
2882 number if it exists in the hash table. Return NULL_TREE if it does
2883 not exist in the hash table or if the result field of the operation
2884 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2885 if it exists. */
2887 tree
2888 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2890 vn_nary_op_t vno1
2891 = XALLOCAVAR (struct vn_nary_op_s,
2892 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2893 init_vn_nary_op_from_op (vno1, op);
2894 return vn_nary_op_lookup_1 (vno1, vnresult);
2897 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2898 value number if it exists in the hash table. Return NULL_TREE if
2899 it does not exist in the hash table. VNRESULT will contain the
2900 vn_nary_op_t from the hashtable if it exists. */
2902 tree
2903 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
2905 vn_nary_op_t vno1
2906 = XALLOCAVAR (struct vn_nary_op_s,
2907 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2908 init_vn_nary_op_from_stmt (vno1, stmt);
2909 return vn_nary_op_lookup_1 (vno1, vnresult);
2912 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2914 static vn_nary_op_t
2915 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2917 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2920 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2921 obstack. */
2923 static vn_nary_op_t
2924 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2926 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2927 &current_info->nary_obstack);
2929 vno1->value_id = value_id;
2930 vno1->length = length;
2931 vno1->result = result;
2933 return vno1;
2936 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2937 VNO->HASHCODE first. */
2939 static vn_nary_op_t
2940 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
2941 bool compute_hash)
2943 vn_nary_op_s **slot;
2945 if (compute_hash)
2946 vno->hashcode = vn_nary_op_compute_hash (vno);
2948 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
2949 /* While we do not want to insert things twice it's awkward to
2950 avoid it in the case where visit_nary_op pattern-matches stuff
2951 and ends up simplifying the replacement to itself. We then
2952 get two inserts, one from visit_nary_op and one from
2953 vn_nary_build_or_lookup.
2954 So allow inserts with the same value number. */
2955 if (*slot && (*slot)->result == vno->result)
2956 return *slot;
2958 gcc_assert (!*slot);
2960 *slot = vno;
2961 return vno;
2964 /* Insert a n-ary operation into the current hash table using it's
2965 pieces. Return the vn_nary_op_t structure we created and put in
2966 the hashtable. */
2968 vn_nary_op_t
2969 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2970 tree type, tree *ops,
2971 tree result, unsigned int value_id)
2973 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2974 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2975 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2978 /* Insert OP into the current hash table with a value number of
2979 RESULT. Return the vn_nary_op_t structure we created and put in
2980 the hashtable. */
2982 vn_nary_op_t
2983 vn_nary_op_insert (tree op, tree result)
2985 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2986 vn_nary_op_t vno1;
2988 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2989 init_vn_nary_op_from_op (vno1, op);
2990 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2993 /* Insert the rhs of STMT into the current hash table with a value number of
2994 RESULT. */
2996 static vn_nary_op_t
2997 vn_nary_op_insert_stmt (gimple *stmt, tree result)
2999 vn_nary_op_t vno1
3000 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
3001 result, VN_INFO (result)->value_id);
3002 init_vn_nary_op_from_stmt (vno1, stmt);
3003 return vn_nary_op_insert_into (vno1, current_info->nary, true);
3006 /* Compute a hashcode for PHI operation VP1 and return it. */
3008 static inline hashval_t
3009 vn_phi_compute_hash (vn_phi_t vp1)
3011 inchash::hash hstate (vp1->phiargs.length () > 2
3012 ? vp1->block->index : vp1->phiargs.length ());
3013 tree phi1op;
3014 tree type;
3015 edge e;
3016 edge_iterator ei;
3018 /* If all PHI arguments are constants we need to distinguish
3019 the PHI node via its type. */
3020 type = vp1->type;
3021 hstate.merge_hash (vn_hash_type (type));
3023 FOR_EACH_EDGE (e, ei, vp1->block->preds)
3025 /* Don't hash backedge values they need to be handled as VN_TOP
3026 for optimistic value-numbering. */
3027 if (e->flags & EDGE_DFS_BACK)
3028 continue;
3030 phi1op = vp1->phiargs[e->dest_idx];
3031 if (phi1op == VN_TOP)
3032 continue;
3033 inchash::add_expr (phi1op, hstate);
3036 return hstate.end ();
3040 /* Return true if COND1 and COND2 represent the same condition, set
3041 *INVERTED_P if one needs to be inverted to make it the same as
3042 the other. */
3044 static bool
3045 cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
3046 gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
3048 enum tree_code code1 = gimple_cond_code (cond1);
3049 enum tree_code code2 = gimple_cond_code (cond2);
3051 *inverted_p = false;
3052 if (code1 == code2)
3054 else if (code1 == swap_tree_comparison (code2))
3055 std::swap (lhs2, rhs2);
3056 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
3057 *inverted_p = true;
3058 else if (code1 == invert_tree_comparison
3059 (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
3061 std::swap (lhs2, rhs2);
3062 *inverted_p = true;
3064 else
3065 return false;
3067 return ((expressions_equal_p (lhs1, lhs2)
3068 && expressions_equal_p (rhs1, rhs2))
3069 || (commutative_tree_code (code1)
3070 && expressions_equal_p (lhs1, rhs2)
3071 && expressions_equal_p (rhs1, lhs2)));
3074 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
3076 static int
3077 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
3079 if (vp1->hashcode != vp2->hashcode)
3080 return false;
3082 if (vp1->block != vp2->block)
3084 if (vp1->phiargs.length () != vp2->phiargs.length ())
3085 return false;
3087 switch (vp1->phiargs.length ())
3089 case 1:
3090 /* Single-arg PHIs are just copies. */
3091 break;
3093 case 2:
3095 /* Rule out backedges into the PHI. */
3096 if (vp1->block->loop_father->header == vp1->block
3097 || vp2->block->loop_father->header == vp2->block)
3098 return false;
3100 /* If the PHI nodes do not have compatible types
3101 they are not the same. */
3102 if (!types_compatible_p (vp1->type, vp2->type))
3103 return false;
3105 basic_block idom1
3106 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3107 basic_block idom2
3108 = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
3109 /* If the immediate dominator end in switch stmts multiple
3110 values may end up in the same PHI arg via intermediate
3111 CFG merges. */
3112 if (EDGE_COUNT (idom1->succs) != 2
3113 || EDGE_COUNT (idom2->succs) != 2)
3114 return false;
3116 /* Verify the controlling stmt is the same. */
3117 gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1));
3118 gcond *last2 = safe_dyn_cast <gcond *> (last_stmt (idom2));
3119 if (! last1 || ! last2)
3120 return false;
3121 bool inverted_p;
3122 if (! cond_stmts_equal_p (last1, vp1->cclhs, vp1->ccrhs,
3123 last2, vp2->cclhs, vp2->ccrhs,
3124 &inverted_p))
3125 return false;
3127 /* Get at true/false controlled edges into the PHI. */
3128 edge te1, te2, fe1, fe2;
3129 if (! extract_true_false_controlled_edges (idom1, vp1->block,
3130 &te1, &fe1)
3131 || ! extract_true_false_controlled_edges (idom2, vp2->block,
3132 &te2, &fe2))
3133 return false;
3135 /* Swap edges if the second condition is the inverted of the
3136 first. */
3137 if (inverted_p)
3138 std::swap (te2, fe2);
3140 /* ??? Handle VN_TOP specially. */
3141 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
3142 vp2->phiargs[te2->dest_idx])
3143 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
3144 vp2->phiargs[fe2->dest_idx]))
3145 return false;
3147 return true;
3150 default:
3151 return false;
3155 /* If the PHI nodes do not have compatible types
3156 they are not the same. */
3157 if (!types_compatible_p (vp1->type, vp2->type))
3158 return false;
3160 /* Any phi in the same block will have it's arguments in the
3161 same edge order, because of how we store phi nodes. */
3162 int i;
3163 tree phi1op;
3164 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
3166 tree phi2op = vp2->phiargs[i];
3167 if (phi1op == VN_TOP || phi2op == VN_TOP)
3168 continue;
3169 if (!expressions_equal_p (phi1op, phi2op))
3170 return false;
3173 return true;
3176 static vec<tree> shared_lookup_phiargs;
3178 /* Lookup PHI in the current hash table, and return the resulting
3179 value number if it exists in the hash table. Return NULL_TREE if
3180 it does not exist in the hash table. */
3182 static tree
3183 vn_phi_lookup (gimple *phi)
3185 vn_phi_s **slot;
3186 struct vn_phi_s vp1;
3187 edge e;
3188 edge_iterator ei;
3190 shared_lookup_phiargs.truncate (0);
3191 shared_lookup_phiargs.safe_grow (gimple_phi_num_args (phi));
3193 /* Canonicalize the SSA_NAME's to their value number. */
3194 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3196 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3197 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3198 shared_lookup_phiargs[e->dest_idx] = def;
3200 vp1.type = TREE_TYPE (gimple_phi_result (phi));
3201 vp1.phiargs = shared_lookup_phiargs;
3202 vp1.block = gimple_bb (phi);
3203 /* Extract values of the controlling condition. */
3204 vp1.cclhs = NULL_TREE;
3205 vp1.ccrhs = NULL_TREE;
3206 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1.block);
3207 if (EDGE_COUNT (idom1->succs) == 2)
3208 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3210 vp1.cclhs = vn_valueize (gimple_cond_lhs (last1));
3211 vp1.ccrhs = vn_valueize (gimple_cond_rhs (last1));
3213 vp1.hashcode = vn_phi_compute_hash (&vp1);
3214 slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3215 NO_INSERT);
3216 if (!slot && current_info == optimistic_info)
3217 slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3218 NO_INSERT);
3219 if (!slot)
3220 return NULL_TREE;
3221 return (*slot)->result;
3224 /* Insert PHI into the current hash table with a value number of
3225 RESULT. */
3227 static vn_phi_t
3228 vn_phi_insert (gimple *phi, tree result)
3230 vn_phi_s **slot;
3231 vn_phi_t vp1 = current_info->phis_pool->allocate ();
3232 vec<tree> args = vNULL;
3233 edge e;
3234 edge_iterator ei;
3236 args.safe_grow (gimple_phi_num_args (phi));
3238 /* Canonicalize the SSA_NAME's to their value number. */
3239 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3241 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3242 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3243 args[e->dest_idx] = def;
3245 vp1->value_id = VN_INFO (result)->value_id;
3246 vp1->type = TREE_TYPE (gimple_phi_result (phi));
3247 vp1->phiargs = args;
3248 vp1->block = gimple_bb (phi);
3249 /* Extract values of the controlling condition. */
3250 vp1->cclhs = NULL_TREE;
3251 vp1->ccrhs = NULL_TREE;
3252 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3253 if (EDGE_COUNT (idom1->succs) == 2)
3254 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3256 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
3257 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
3259 vp1->result = result;
3260 vp1->hashcode = vn_phi_compute_hash (vp1);
3262 slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
3264 /* Because we iterate over phi operations more than once, it's
3265 possible the slot might already exist here, hence no assert.*/
3266 *slot = vp1;
3267 return vp1;
3271 /* Print set of components in strongly connected component SCC to OUT. */
3273 static void
3274 print_scc (FILE *out, vec<tree> scc)
3276 tree var;
3277 unsigned int i;
3279 fprintf (out, "SCC consists of %u:", scc.length ());
3280 FOR_EACH_VEC_ELT (scc, i, var)
3282 fprintf (out, " ");
3283 print_generic_expr (out, var);
3285 fprintf (out, "\n");
3288 /* Return true if BB1 is dominated by BB2 taking into account edges
3289 that are not executable. */
3291 static bool
3292 dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
3294 edge_iterator ei;
3295 edge e;
3297 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3298 return true;
3300 /* Before iterating we'd like to know if there exists a
3301 (executable) path from bb2 to bb1 at all, if not we can
3302 directly return false. For now simply iterate once. */
3304 /* Iterate to the single executable bb1 predecessor. */
3305 if (EDGE_COUNT (bb1->preds) > 1)
3307 edge prede = NULL;
3308 FOR_EACH_EDGE (e, ei, bb1->preds)
3309 if (e->flags & EDGE_EXECUTABLE)
3311 if (prede)
3313 prede = NULL;
3314 break;
3316 prede = e;
3318 if (prede)
3320 bb1 = prede->src;
3322 /* Re-do the dominance check with changed bb1. */
3323 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3324 return true;
3328 /* Iterate to the single executable bb2 successor. */
3329 edge succe = NULL;
3330 FOR_EACH_EDGE (e, ei, bb2->succs)
3331 if (e->flags & EDGE_EXECUTABLE)
3333 if (succe)
3335 succe = NULL;
3336 break;
3338 succe = e;
3340 if (succe)
3342 /* Verify the reached block is only reached through succe.
3343 If there is only one edge we can spare us the dominator
3344 check and iterate directly. */
3345 if (EDGE_COUNT (succe->dest->preds) > 1)
3347 FOR_EACH_EDGE (e, ei, succe->dest->preds)
3348 if (e != succe
3349 && (e->flags & EDGE_EXECUTABLE))
3351 succe = NULL;
3352 break;
3355 if (succe)
3357 bb2 = succe->dest;
3359 /* Re-do the dominance check with changed bb2. */
3360 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3361 return true;
3365 /* We could now iterate updating bb1 / bb2. */
3366 return false;
3369 /* Set the value number of FROM to TO, return true if it has changed
3370 as a result. */
3372 static inline bool
3373 set_ssa_val_to (tree from, tree to)
3375 tree currval = SSA_VAL (from);
3376 poly_int64 toff, coff;
3378 /* The only thing we allow as value numbers are ssa_names
3379 and invariants. So assert that here. We don't allow VN_TOP
3380 as visiting a stmt should produce a value-number other than
3381 that.
3382 ??? Still VN_TOP can happen for unreachable code, so force
3383 it to varying in that case. Not all code is prepared to
3384 get VN_TOP on valueization. */
3385 if (to == VN_TOP)
3387 if (dump_file && (dump_flags & TDF_DETAILS))
3388 fprintf (dump_file, "Forcing value number to varying on "
3389 "receiving VN_TOP\n");
3390 to = from;
3393 gcc_assert (to != NULL_TREE
3394 && ((TREE_CODE (to) == SSA_NAME
3395 && (to == from || SSA_VAL (to) == to))
3396 || is_gimple_min_invariant (to)));
3398 if (from != to)
3400 if (currval == from)
3402 if (dump_file && (dump_flags & TDF_DETAILS))
3404 fprintf (dump_file, "Not changing value number of ");
3405 print_generic_expr (dump_file, from);
3406 fprintf (dump_file, " from VARYING to ");
3407 print_generic_expr (dump_file, to);
3408 fprintf (dump_file, "\n");
3410 return false;
3412 else if (currval != VN_TOP
3413 && ! is_gimple_min_invariant (currval)
3414 && is_gimple_min_invariant (to))
3416 if (dump_file && (dump_flags & TDF_DETAILS))
3418 fprintf (dump_file, "Forcing VARYING instead of changing "
3419 "value number of ");
3420 print_generic_expr (dump_file, from);
3421 fprintf (dump_file, " from ");
3422 print_generic_expr (dump_file, currval);
3423 fprintf (dump_file, " (non-constant) to ");
3424 print_generic_expr (dump_file, to);
3425 fprintf (dump_file, " (constant)\n");
3427 to = from;
3429 else if (TREE_CODE (to) == SSA_NAME
3430 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
3431 to = from;
3434 if (dump_file && (dump_flags & TDF_DETAILS))
3436 fprintf (dump_file, "Setting value number of ");
3437 print_generic_expr (dump_file, from);
3438 fprintf (dump_file, " to ");
3439 print_generic_expr (dump_file, to);
3442 if (currval != to
3443 && !operand_equal_p (currval, to, 0)
3444 /* Different undefined SSA names are not actually different. See
3445 PR82320 for a testcase were we'd otherwise not terminate iteration. */
3446 && !(TREE_CODE (currval) == SSA_NAME
3447 && TREE_CODE (to) == SSA_NAME
3448 && ssa_undefined_value_p (currval, false)
3449 && ssa_undefined_value_p (to, false))
3450 /* ??? For addresses involving volatile objects or types operand_equal_p
3451 does not reliably detect ADDR_EXPRs as equal. We know we are only
3452 getting invariant gimple addresses here, so can use
3453 get_addr_base_and_unit_offset to do this comparison. */
3454 && !(TREE_CODE (currval) == ADDR_EXPR
3455 && TREE_CODE (to) == ADDR_EXPR
3456 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
3457 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
3458 && known_eq (coff, toff)))
3460 if (dump_file && (dump_flags & TDF_DETAILS))
3461 fprintf (dump_file, " (changed)\n");
3463 /* If we equate two SSA names we have to make the side-band info
3464 of the leader conservative (and remember whatever original value
3465 was present). */
3466 if (TREE_CODE (to) == SSA_NAME)
3468 if (INTEGRAL_TYPE_P (TREE_TYPE (to))
3469 && SSA_NAME_RANGE_INFO (to))
3471 if (SSA_NAME_IS_DEFAULT_DEF (to)
3472 || dominated_by_p_w_unex
3473 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3474 gimple_bb (SSA_NAME_DEF_STMT (to))))
3475 /* Keep the info from the dominator. */
3477 else
3479 /* Save old info. */
3480 if (! VN_INFO (to)->info.range_info)
3482 VN_INFO (to)->info.range_info = SSA_NAME_RANGE_INFO (to);
3483 VN_INFO (to)->range_info_anti_range_p
3484 = SSA_NAME_ANTI_RANGE_P (to);
3486 /* Rather than allocating memory and unioning the info
3487 just clear it. */
3488 if (dump_file && (dump_flags & TDF_DETAILS))
3490 fprintf (dump_file, "clearing range info of ");
3491 print_generic_expr (dump_file, to);
3492 fprintf (dump_file, "\n");
3494 SSA_NAME_RANGE_INFO (to) = NULL;
3497 else if (POINTER_TYPE_P (TREE_TYPE (to))
3498 && SSA_NAME_PTR_INFO (to))
3500 if (SSA_NAME_IS_DEFAULT_DEF (to)
3501 || dominated_by_p_w_unex
3502 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3503 gimple_bb (SSA_NAME_DEF_STMT (to))))
3504 /* Keep the info from the dominator. */
3506 else if (! SSA_NAME_PTR_INFO (from)
3507 /* Handle the case of trivially equivalent info. */
3508 || memcmp (SSA_NAME_PTR_INFO (to),
3509 SSA_NAME_PTR_INFO (from),
3510 sizeof (ptr_info_def)) != 0)
3512 /* Save old info. */
3513 if (! VN_INFO (to)->info.ptr_info)
3514 VN_INFO (to)->info.ptr_info = SSA_NAME_PTR_INFO (to);
3515 /* Rather than allocating memory and unioning the info
3516 just clear it. */
3517 if (dump_file && (dump_flags & TDF_DETAILS))
3519 fprintf (dump_file, "clearing points-to info of ");
3520 print_generic_expr (dump_file, to);
3521 fprintf (dump_file, "\n");
3523 SSA_NAME_PTR_INFO (to) = NULL;
3528 VN_INFO (from)->valnum = to;
3529 return true;
3531 if (dump_file && (dump_flags & TDF_DETAILS))
3532 fprintf (dump_file, "\n");
3533 return false;
3536 /* Mark as processed all the definitions in the defining stmt of USE, or
3537 the USE itself. */
3539 static void
3540 mark_use_processed (tree use)
3542 ssa_op_iter iter;
3543 def_operand_p defp;
3544 gimple *stmt = SSA_NAME_DEF_STMT (use);
3546 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
3548 VN_INFO (use)->use_processed = true;
3549 return;
3552 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3554 tree def = DEF_FROM_PTR (defp);
3556 VN_INFO (def)->use_processed = true;
3560 /* Set all definitions in STMT to value number to themselves.
3561 Return true if a value number changed. */
3563 static bool
3564 defs_to_varying (gimple *stmt)
3566 bool changed = false;
3567 ssa_op_iter iter;
3568 def_operand_p defp;
3570 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3572 tree def = DEF_FROM_PTR (defp);
3573 changed |= set_ssa_val_to (def, def);
3575 return changed;
3578 /* Visit a copy between LHS and RHS, return true if the value number
3579 changed. */
3581 static bool
3582 visit_copy (tree lhs, tree rhs)
3584 /* Valueize. */
3585 rhs = SSA_VAL (rhs);
3587 return set_ssa_val_to (lhs, rhs);
3590 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
3591 is the same. */
3593 static tree
3594 valueized_wider_op (tree wide_type, tree op)
3596 if (TREE_CODE (op) == SSA_NAME)
3597 op = SSA_VAL (op);
3599 /* Either we have the op widened available. */
3600 tree ops[3] = {};
3601 ops[0] = op;
3602 tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
3603 wide_type, ops, NULL);
3604 if (tem)
3605 return tem;
3607 /* Or the op is truncated from some existing value. */
3608 if (TREE_CODE (op) == SSA_NAME)
3610 gimple *def = SSA_NAME_DEF_STMT (op);
3611 if (is_gimple_assign (def)
3612 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3614 tem = gimple_assign_rhs1 (def);
3615 if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)))
3617 if (TREE_CODE (tem) == SSA_NAME)
3618 tem = SSA_VAL (tem);
3619 return tem;
3624 /* For constants simply extend it. */
3625 if (TREE_CODE (op) == INTEGER_CST)
3626 return wide_int_to_tree (wide_type, wi::to_wide (op));
3628 return NULL_TREE;
3631 /* Visit a nary operator RHS, value number it, and return true if the
3632 value number of LHS has changed as a result. */
3634 static bool
3635 visit_nary_op (tree lhs, gassign *stmt)
3637 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
3638 if (result)
3639 return set_ssa_val_to (lhs, result);
3641 /* Do some special pattern matching for redundancies of operations
3642 in different types. */
3643 enum tree_code code = gimple_assign_rhs_code (stmt);
3644 tree type = TREE_TYPE (lhs);
3645 tree rhs1 = gimple_assign_rhs1 (stmt);
3646 switch (code)
3648 CASE_CONVERT:
3649 /* Match arithmetic done in a different type where we can easily
3650 substitute the result from some earlier sign-changed or widened
3651 operation. */
3652 if (INTEGRAL_TYPE_P (type)
3653 && TREE_CODE (rhs1) == SSA_NAME
3654 /* We only handle sign-changes or zero-extension -> & mask. */
3655 && ((TYPE_UNSIGNED (TREE_TYPE (rhs1))
3656 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs1)))
3657 || TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (rhs1))))
3659 gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
3660 if (def
3661 && (gimple_assign_rhs_code (def) == PLUS_EXPR
3662 || gimple_assign_rhs_code (def) == MINUS_EXPR
3663 || gimple_assign_rhs_code (def) == MULT_EXPR))
3665 tree ops[3] = {};
3666 /* Either we have the op widened available. */
3667 ops[0] = valueized_wider_op (type,
3668 gimple_assign_rhs1 (def));
3669 if (ops[0])
3670 ops[1] = valueized_wider_op (type,
3671 gimple_assign_rhs2 (def));
3672 if (ops[0] && ops[1])
3674 ops[0] = vn_nary_op_lookup_pieces
3675 (2, gimple_assign_rhs_code (def), type, ops, NULL);
3676 /* We have wider operation available. */
3677 if (ops[0])
3679 unsigned lhs_prec = TYPE_PRECISION (type);
3680 unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1));
3681 if (lhs_prec == rhs_prec)
3683 ops[1] = NULL_TREE;
3684 result = vn_nary_build_or_lookup (NOP_EXPR,
3685 type, ops);
3686 if (result)
3688 bool changed = set_ssa_val_to (lhs, result);
3689 vn_nary_op_insert_stmt (stmt, result);
3690 return changed;
3693 else
3695 ops[1] = wide_int_to_tree (type,
3696 wi::mask (rhs_prec, false,
3697 lhs_prec));
3698 result = vn_nary_build_or_lookup (BIT_AND_EXPR,
3699 TREE_TYPE (lhs),
3700 ops);
3701 if (result)
3703 bool changed = set_ssa_val_to (lhs, result);
3704 vn_nary_op_insert_stmt (stmt, result);
3705 return changed;
3712 default:;
3715 bool changed = set_ssa_val_to (lhs, lhs);
3716 vn_nary_op_insert_stmt (stmt, lhs);
3717 return changed;
3720 /* Visit a call STMT storing into LHS. Return true if the value number
3721 of the LHS has changed as a result. */
3723 static bool
3724 visit_reference_op_call (tree lhs, gcall *stmt)
3726 bool changed = false;
3727 struct vn_reference_s vr1;
3728 vn_reference_t vnresult = NULL;
3729 tree vdef = gimple_vdef (stmt);
3731 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
3732 if (lhs && TREE_CODE (lhs) != SSA_NAME)
3733 lhs = NULL_TREE;
3735 vn_reference_lookup_call (stmt, &vnresult, &vr1);
3736 if (vnresult)
3738 if (vnresult->result_vdef && vdef)
3739 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
3740 else if (vdef)
3741 /* If the call was discovered to be pure or const reflect
3742 that as far as possible. */
3743 changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt)));
3745 if (!vnresult->result && lhs)
3746 vnresult->result = lhs;
3748 if (vnresult->result && lhs)
3749 changed |= set_ssa_val_to (lhs, vnresult->result);
3751 else
3753 vn_reference_t vr2;
3754 vn_reference_s **slot;
3755 tree vdef_val = vdef;
3756 if (vdef)
3758 /* If we value numbered an indirect functions function to
3759 one not clobbering memory value number its VDEF to its
3760 VUSE. */
3761 tree fn = gimple_call_fn (stmt);
3762 if (fn && TREE_CODE (fn) == SSA_NAME)
3764 fn = SSA_VAL (fn);
3765 if (TREE_CODE (fn) == ADDR_EXPR
3766 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
3767 && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
3768 & (ECF_CONST | ECF_PURE)))
3769 vdef_val = vuse_ssa_val (gimple_vuse (stmt));
3771 changed |= set_ssa_val_to (vdef, vdef_val);
3773 if (lhs)
3774 changed |= set_ssa_val_to (lhs, lhs);
3775 vr2 = current_info->references_pool->allocate ();
3776 vr2->vuse = vr1.vuse;
3777 /* As we are not walking the virtual operand chain we know the
3778 shared_lookup_references are still original so we can re-use
3779 them here. */
3780 vr2->operands = vr1.operands.copy ();
3781 vr2->type = vr1.type;
3782 vr2->set = vr1.set;
3783 vr2->hashcode = vr1.hashcode;
3784 vr2->result = lhs;
3785 vr2->result_vdef = vdef_val;
3786 slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3787 INSERT);
3788 gcc_assert (!*slot);
3789 *slot = vr2;
3792 return changed;
3795 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3796 and return true if the value number of the LHS has changed as a result. */
3798 static bool
3799 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
3801 bool changed = false;
3802 tree last_vuse;
3803 tree result;
3805 last_vuse = gimple_vuse (stmt);
3806 last_vuse_ptr = &last_vuse;
3807 result = vn_reference_lookup (op, gimple_vuse (stmt),
3808 default_vn_walk_kind, NULL, true);
3809 last_vuse_ptr = NULL;
3811 /* We handle type-punning through unions by value-numbering based
3812 on offset and size of the access. Be prepared to handle a
3813 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3814 if (result
3815 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3817 /* We will be setting the value number of lhs to the value number
3818 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3819 So first simplify and lookup this expression to see if it
3820 is already available. */
3821 code_helper rcode = VIEW_CONVERT_EXPR;
3822 tree ops[3] = { result };
3823 result = vn_nary_build_or_lookup (rcode, TREE_TYPE (op), ops);
3826 if (result)
3827 changed = set_ssa_val_to (lhs, result);
3828 else
3830 changed = set_ssa_val_to (lhs, lhs);
3831 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
3834 return changed;
3838 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3839 and return true if the value number of the LHS has changed as a result. */
3841 static bool
3842 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
3844 bool changed = false;
3845 vn_reference_t vnresult = NULL;
3846 tree assign;
3847 bool resultsame = false;
3848 tree vuse = gimple_vuse (stmt);
3849 tree vdef = gimple_vdef (stmt);
3851 if (TREE_CODE (op) == SSA_NAME)
3852 op = SSA_VAL (op);
3854 /* First we want to lookup using the *vuses* from the store and see
3855 if there the last store to this location with the same address
3856 had the same value.
3858 The vuses represent the memory state before the store. If the
3859 memory state, address, and value of the store is the same as the
3860 last store to this location, then this store will produce the
3861 same memory state as that store.
3863 In this case the vdef versions for this store are value numbered to those
3864 vuse versions, since they represent the same memory state after
3865 this store.
3867 Otherwise, the vdefs for the store are used when inserting into
3868 the table, since the store generates a new memory state. */
3870 vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
3871 if (vnresult
3872 && vnresult->result)
3874 tree result = vnresult->result;
3875 if (TREE_CODE (result) == SSA_NAME)
3876 result = SSA_VAL (result);
3877 resultsame = expressions_equal_p (result, op);
3878 if (resultsame)
3880 /* If the TBAA state isn't compatible for downstream reads
3881 we cannot value-number the VDEFs the same. */
3882 alias_set_type set = get_alias_set (lhs);
3883 if (vnresult->set != set
3884 && ! alias_set_subset_of (set, vnresult->set))
3885 resultsame = false;
3889 if (!resultsame)
3891 /* Only perform the following when being called from PRE
3892 which embeds tail merging. */
3893 if (default_vn_walk_kind == VN_WALK)
3895 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3896 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
3897 if (vnresult)
3899 VN_INFO (vdef)->use_processed = true;
3900 return set_ssa_val_to (vdef, vnresult->result_vdef);
3904 if (dump_file && (dump_flags & TDF_DETAILS))
3906 fprintf (dump_file, "No store match\n");
3907 fprintf (dump_file, "Value numbering store ");
3908 print_generic_expr (dump_file, lhs);
3909 fprintf (dump_file, " to ");
3910 print_generic_expr (dump_file, op);
3911 fprintf (dump_file, "\n");
3913 /* Have to set value numbers before insert, since insert is
3914 going to valueize the references in-place. */
3915 if (vdef)
3916 changed |= set_ssa_val_to (vdef, vdef);
3918 /* Do not insert structure copies into the tables. */
3919 if (is_gimple_min_invariant (op)
3920 || is_gimple_reg (op))
3921 vn_reference_insert (lhs, op, vdef, NULL);
3923 /* Only perform the following when being called from PRE
3924 which embeds tail merging. */
3925 if (default_vn_walk_kind == VN_WALK)
3927 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3928 vn_reference_insert (assign, lhs, vuse, vdef);
3931 else
3933 /* We had a match, so value number the vdef to have the value
3934 number of the vuse it came from. */
3936 if (dump_file && (dump_flags & TDF_DETAILS))
3937 fprintf (dump_file, "Store matched earlier value, "
3938 "value numbering store vdefs to matching vuses.\n");
3940 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3943 return changed;
3946 /* Visit and value number PHI, return true if the value number
3947 changed. */
3949 static bool
3950 visit_phi (gimple *phi)
3952 tree result, sameval = VN_TOP, seen_undef = NULL_TREE;
3953 unsigned n_executable = 0;
3954 bool allsame = true;
3955 edge_iterator ei;
3956 edge e;
3958 /* TODO: We could check for this in init_sccvn, and replace this
3959 with a gcc_assert. */
3960 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3961 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3963 /* See if all non-TOP arguments have the same value. TOP is
3964 equivalent to everything, so we can ignore it. */
3965 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3966 if (e->flags & EDGE_EXECUTABLE)
3968 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3970 ++n_executable;
3971 if (TREE_CODE (def) == SSA_NAME)
3972 def = SSA_VAL (def);
3973 if (def == VN_TOP)
3975 /* Ignore undefined defs for sameval but record one. */
3976 else if (TREE_CODE (def) == SSA_NAME
3977 && ssa_undefined_value_p (def, false))
3978 seen_undef = def;
3979 else if (sameval == VN_TOP)
3980 sameval = def;
3981 else if (!expressions_equal_p (def, sameval))
3983 allsame = false;
3984 break;
3989 /* If none of the edges was executable keep the value-number at VN_TOP,
3990 if only a single edge is exectuable use its value. */
3991 if (n_executable <= 1)
3992 result = seen_undef ? seen_undef : sameval;
3993 /* If we saw only undefined values and VN_TOP use one of the
3994 undefined values. */
3995 else if (sameval == VN_TOP)
3996 result = seen_undef ? seen_undef : sameval;
3997 /* First see if it is equivalent to a phi node in this block. We prefer
3998 this as it allows IV elimination - see PRs 66502 and 67167. */
3999 else if ((result = vn_phi_lookup (phi)))
4001 /* If all values are the same use that, unless we've seen undefined
4002 values as well and the value isn't constant.
4003 CCP/copyprop have the same restriction to not remove uninit warnings. */
4004 else if (allsame
4005 && (! seen_undef || is_gimple_min_invariant (sameval)))
4006 result = sameval;
4007 else
4009 result = PHI_RESULT (phi);
4010 /* Only insert PHIs that are varying, for constant value numbers
4011 we mess up equivalences otherwise as we are only comparing
4012 the immediate controlling predicates. */
4013 vn_phi_insert (phi, result);
4016 return set_ssa_val_to (PHI_RESULT (phi), result);
4019 /* Try to simplify RHS using equivalences and constant folding. */
4021 static tree
4022 try_to_simplify (gassign *stmt)
4024 enum tree_code code = gimple_assign_rhs_code (stmt);
4025 tree tem;
4027 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
4028 in this case, there is no point in doing extra work. */
4029 if (code == SSA_NAME)
4030 return NULL_TREE;
4032 /* First try constant folding based on our current lattice. */
4033 mprts_hook = vn_lookup_simplify_result;
4034 mprts_hook_cnt = 9;
4035 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
4036 mprts_hook = NULL;
4037 if (tem
4038 && (TREE_CODE (tem) == SSA_NAME
4039 || is_gimple_min_invariant (tem)))
4040 return tem;
4042 return NULL_TREE;
4045 /* Visit and value number USE, return true if the value number
4046 changed. */
4048 static bool
4049 visit_use (tree use)
4051 bool changed = false;
4052 gimple *stmt = SSA_NAME_DEF_STMT (use);
4054 mark_use_processed (use);
4056 gcc_assert (!SSA_NAME_IN_FREE_LIST (use)
4057 && !SSA_NAME_IS_DEFAULT_DEF (use));
4059 if (dump_file && (dump_flags & TDF_DETAILS))
4061 fprintf (dump_file, "Value numbering ");
4062 print_generic_expr (dump_file, use);
4063 fprintf (dump_file, " stmt = ");
4064 print_gimple_stmt (dump_file, stmt, 0);
4067 if (gimple_code (stmt) == GIMPLE_PHI)
4068 changed = visit_phi (stmt);
4069 else if (gimple_has_volatile_ops (stmt))
4070 changed = defs_to_varying (stmt);
4071 else if (gassign *ass = dyn_cast <gassign *> (stmt))
4073 enum tree_code code = gimple_assign_rhs_code (ass);
4074 tree lhs = gimple_assign_lhs (ass);
4075 tree rhs1 = gimple_assign_rhs1 (ass);
4076 tree simplified;
4078 /* Shortcut for copies. Simplifying copies is pointless,
4079 since we copy the expression and value they represent. */
4080 if (code == SSA_NAME
4081 && TREE_CODE (lhs) == SSA_NAME)
4083 changed = visit_copy (lhs, rhs1);
4084 goto done;
4086 simplified = try_to_simplify (ass);
4087 if (simplified)
4089 if (dump_file && (dump_flags & TDF_DETAILS))
4091 fprintf (dump_file, "RHS ");
4092 print_gimple_expr (dump_file, ass, 0);
4093 fprintf (dump_file, " simplified to ");
4094 print_generic_expr (dump_file, simplified);
4095 fprintf (dump_file, "\n");
4098 /* Setting value numbers to constants will occasionally
4099 screw up phi congruence because constants are not
4100 uniquely associated with a single ssa name that can be
4101 looked up. */
4102 if (simplified
4103 && is_gimple_min_invariant (simplified)
4104 && TREE_CODE (lhs) == SSA_NAME)
4106 changed = set_ssa_val_to (lhs, simplified);
4107 goto done;
4109 else if (simplified
4110 && TREE_CODE (simplified) == SSA_NAME
4111 && TREE_CODE (lhs) == SSA_NAME)
4113 changed = visit_copy (lhs, simplified);
4114 goto done;
4117 if ((TREE_CODE (lhs) == SSA_NAME
4118 /* We can substitute SSA_NAMEs that are live over
4119 abnormal edges with their constant value. */
4120 && !(gimple_assign_copy_p (ass)
4121 && is_gimple_min_invariant (rhs1))
4122 && !(simplified
4123 && is_gimple_min_invariant (simplified))
4124 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4125 /* Stores or copies from SSA_NAMEs that are live over
4126 abnormal edges are a problem. */
4127 || (code == SSA_NAME
4128 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
4129 changed = defs_to_varying (ass);
4130 else if (REFERENCE_CLASS_P (lhs)
4131 || DECL_P (lhs))
4132 changed = visit_reference_op_store (lhs, rhs1, ass);
4133 else if (TREE_CODE (lhs) == SSA_NAME)
4135 if ((gimple_assign_copy_p (ass)
4136 && is_gimple_min_invariant (rhs1))
4137 || (simplified
4138 && is_gimple_min_invariant (simplified)))
4140 if (simplified)
4141 changed = set_ssa_val_to (lhs, simplified);
4142 else
4143 changed = set_ssa_val_to (lhs, rhs1);
4145 else
4147 /* Visit the original statement. */
4148 switch (vn_get_stmt_kind (ass))
4150 case VN_NARY:
4151 changed = visit_nary_op (lhs, ass);
4152 break;
4153 case VN_REFERENCE:
4154 changed = visit_reference_op_load (lhs, rhs1, ass);
4155 break;
4156 default:
4157 changed = defs_to_varying (ass);
4158 break;
4162 else
4163 changed = defs_to_varying (ass);
4165 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
4167 tree lhs = gimple_call_lhs (call_stmt);
4168 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4170 /* Try constant folding based on our current lattice. */
4171 tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
4172 vn_valueize);
4173 if (simplified)
4175 if (dump_file && (dump_flags & TDF_DETAILS))
4177 fprintf (dump_file, "call ");
4178 print_gimple_expr (dump_file, call_stmt, 0);
4179 fprintf (dump_file, " simplified to ");
4180 print_generic_expr (dump_file, simplified);
4181 fprintf (dump_file, "\n");
4184 /* Setting value numbers to constants will occasionally
4185 screw up phi congruence because constants are not
4186 uniquely associated with a single ssa name that can be
4187 looked up. */
4188 if (simplified
4189 && is_gimple_min_invariant (simplified))
4191 changed = set_ssa_val_to (lhs, simplified);
4192 if (gimple_vdef (call_stmt))
4193 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4194 SSA_VAL (gimple_vuse (call_stmt)));
4195 goto done;
4197 else if (simplified
4198 && TREE_CODE (simplified) == SSA_NAME)
4200 changed = visit_copy (lhs, simplified);
4201 if (gimple_vdef (call_stmt))
4202 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4203 SSA_VAL (gimple_vuse (call_stmt)));
4204 goto done;
4206 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4208 changed = defs_to_varying (call_stmt);
4209 goto done;
4213 /* Pick up flags from a devirtualization target. */
4214 tree fn = gimple_call_fn (stmt);
4215 int extra_fnflags = 0;
4216 if (fn && TREE_CODE (fn) == SSA_NAME)
4218 fn = SSA_VAL (fn);
4219 if (TREE_CODE (fn) == ADDR_EXPR
4220 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
4221 extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0));
4223 if (!gimple_call_internal_p (call_stmt)
4224 && (/* Calls to the same function with the same vuse
4225 and the same operands do not necessarily return the same
4226 value, unless they're pure or const. */
4227 ((gimple_call_flags (call_stmt) | extra_fnflags)
4228 & (ECF_PURE | ECF_CONST))
4229 /* If calls have a vdef, subsequent calls won't have
4230 the same incoming vuse. So, if 2 calls with vdef have the
4231 same vuse, we know they're not subsequent.
4232 We can value number 2 calls to the same function with the
4233 same vuse and the same operands which are not subsequent
4234 the same, because there is no code in the program that can
4235 compare the 2 values... */
4236 || (gimple_vdef (call_stmt)
4237 /* ... unless the call returns a pointer which does
4238 not alias with anything else. In which case the
4239 information that the values are distinct are encoded
4240 in the IL. */
4241 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
4242 /* Only perform the following when being called from PRE
4243 which embeds tail merging. */
4244 && default_vn_walk_kind == VN_WALK)))
4245 changed = visit_reference_op_call (lhs, call_stmt);
4246 else
4247 changed = defs_to_varying (call_stmt);
4249 else
4250 changed = defs_to_varying (stmt);
4251 done:
4252 return changed;
4255 /* Compare two operands by reverse postorder index */
4257 static int
4258 compare_ops (const void *pa, const void *pb)
4260 const tree opa = *((const tree *)pa);
4261 const tree opb = *((const tree *)pb);
4262 gimple *opstmta = SSA_NAME_DEF_STMT (opa);
4263 gimple *opstmtb = SSA_NAME_DEF_STMT (opb);
4264 basic_block bba;
4265 basic_block bbb;
4267 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
4268 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4269 else if (gimple_nop_p (opstmta))
4270 return -1;
4271 else if (gimple_nop_p (opstmtb))
4272 return 1;
4274 bba = gimple_bb (opstmta);
4275 bbb = gimple_bb (opstmtb);
4277 if (!bba && !bbb)
4278 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4279 else if (!bba)
4280 return -1;
4281 else if (!bbb)
4282 return 1;
4284 if (bba == bbb)
4286 if (gimple_code (opstmta) == GIMPLE_PHI
4287 && gimple_code (opstmtb) == GIMPLE_PHI)
4288 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4289 else if (gimple_code (opstmta) == GIMPLE_PHI)
4290 return -1;
4291 else if (gimple_code (opstmtb) == GIMPLE_PHI)
4292 return 1;
4293 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
4294 return gimple_uid (opstmta) - gimple_uid (opstmtb);
4295 else
4296 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4298 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
4301 /* Sort an array containing members of a strongly connected component
4302 SCC so that the members are ordered by RPO number.
4303 This means that when the sort is complete, iterating through the
4304 array will give you the members in RPO order. */
4306 static void
4307 sort_scc (vec<tree> scc)
4309 scc.qsort (compare_ops);
4312 /* Insert the no longer used nary ONARY to the hash INFO. */
4314 static void
4315 copy_nary (vn_nary_op_t onary, vn_tables_t info)
4317 size_t size = sizeof_vn_nary_op (onary->length);
4318 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
4319 &info->nary_obstack);
4320 memcpy (nary, onary, size);
4321 vn_nary_op_insert_into (nary, info->nary, false);
4324 /* Insert the no longer used phi OPHI to the hash INFO. */
4326 static void
4327 copy_phi (vn_phi_t ophi, vn_tables_t info)
4329 vn_phi_t phi = info->phis_pool->allocate ();
4330 vn_phi_s **slot;
4331 memcpy (phi, ophi, sizeof (*phi));
4332 ophi->phiargs.create (0);
4333 slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
4334 gcc_assert (!*slot);
4335 *slot = phi;
4338 /* Insert the no longer used reference OREF to the hash INFO. */
4340 static void
4341 copy_reference (vn_reference_t oref, vn_tables_t info)
4343 vn_reference_t ref;
4344 vn_reference_s **slot;
4345 ref = info->references_pool->allocate ();
4346 memcpy (ref, oref, sizeof (*ref));
4347 oref->operands.create (0);
4348 slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
4349 if (*slot)
4350 free_reference (*slot);
4351 *slot = ref;
4354 /* Process a strongly connected component in the SSA graph. */
4356 static void
4357 process_scc (vec<tree> scc)
4359 tree var;
4360 unsigned int i;
4361 unsigned int iterations = 0;
4362 bool changed = true;
4363 vn_nary_op_iterator_type hin;
4364 vn_phi_iterator_type hip;
4365 vn_reference_iterator_type hir;
4366 vn_nary_op_t nary;
4367 vn_phi_t phi;
4368 vn_reference_t ref;
4370 /* If the SCC has a single member, just visit it. */
4371 if (scc.length () == 1)
4373 tree use = scc[0];
4374 if (VN_INFO (use)->use_processed)
4375 return;
4376 /* We need to make sure it doesn't form a cycle itself, which can
4377 happen for self-referential PHI nodes. In that case we would
4378 end up inserting an expression with VN_TOP operands into the
4379 valid table which makes us derive bogus equivalences later.
4380 The cheapest way to check this is to assume it for all PHI nodes. */
4381 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
4382 /* Fallthru to iteration. */ ;
4383 else
4385 visit_use (use);
4386 return;
4390 if (dump_file && (dump_flags & TDF_DETAILS))
4391 print_scc (dump_file, scc);
4393 /* Iterate over the SCC with the optimistic table until it stops
4394 changing. */
4395 current_info = optimistic_info;
4396 while (changed)
4398 changed = false;
4399 iterations++;
4400 if (dump_file && (dump_flags & TDF_DETAILS))
4401 fprintf (dump_file, "Starting iteration %d\n", iterations);
4402 /* As we are value-numbering optimistically we have to
4403 clear the expression tables and the simplified expressions
4404 in each iteration until we converge. */
4405 optimistic_info->nary->empty ();
4406 optimistic_info->phis->empty ();
4407 optimistic_info->references->empty ();
4408 obstack_free (&optimistic_info->nary_obstack, NULL);
4409 gcc_obstack_init (&optimistic_info->nary_obstack);
4410 optimistic_info->phis_pool->release ();
4411 optimistic_info->references_pool->release ();
4412 FOR_EACH_VEC_ELT (scc, i, var)
4413 gcc_assert (!VN_INFO (var)->needs_insertion
4414 && VN_INFO (var)->expr == NULL);
4415 FOR_EACH_VEC_ELT (scc, i, var)
4416 changed |= visit_use (var);
4419 if (dump_file && (dump_flags & TDF_DETAILS))
4420 fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
4421 statistics_histogram_event (cfun, "SCC iterations", iterations);
4423 /* Finally, copy the contents of the no longer used optimistic
4424 table to the valid table. */
4425 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
4426 copy_nary (nary, valid_info);
4427 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
4428 copy_phi (phi, valid_info);
4429 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
4430 ref, vn_reference_t, hir)
4431 copy_reference (ref, valid_info);
4433 current_info = valid_info;
4437 /* Pop the components of the found SCC for NAME off the SCC stack
4438 and process them. Returns true if all went well, false if
4439 we run into resource limits. */
4441 static void
4442 extract_and_process_scc_for_name (tree name)
4444 auto_vec<tree> scc;
4445 tree x;
4447 /* Found an SCC, pop the components off the SCC stack and
4448 process them. */
4451 x = sccstack.pop ();
4453 VN_INFO (x)->on_sccstack = false;
4454 scc.safe_push (x);
4455 } while (x != name);
4457 /* Drop all defs in the SCC to varying in case a SCC turns out to be
4458 incredibly large.
4459 ??? Just switch to a non-optimistic mode that avoids any iteration. */
4460 if (scc.length () > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
4462 if (dump_file)
4464 print_scc (dump_file, scc);
4465 fprintf (dump_file, "WARNING: Giving up value-numbering SCC due to "
4466 "size %u exceeding %u\n", scc.length (),
4467 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
4469 tree var;
4470 unsigned i;
4471 FOR_EACH_VEC_ELT (scc, i, var)
4473 gimple *def = SSA_NAME_DEF_STMT (var);
4474 mark_use_processed (var);
4475 if (SSA_NAME_IS_DEFAULT_DEF (var)
4476 || gimple_code (def) == GIMPLE_PHI)
4477 set_ssa_val_to (var, var);
4478 else
4479 defs_to_varying (def);
4481 return;
4484 if (scc.length () > 1)
4485 sort_scc (scc);
4487 process_scc (scc);
4490 /* Depth first search on NAME to discover and process SCC's in the SSA
4491 graph.
4492 Execution of this algorithm relies on the fact that the SCC's are
4493 popped off the stack in topological order.
4494 Returns true if successful, false if we stopped processing SCC's due
4495 to resource constraints. */
4497 static void
4498 DFS (tree name)
4500 auto_vec<ssa_op_iter> itervec;
4501 auto_vec<tree> namevec;
4502 use_operand_p usep = NULL;
4503 gimple *defstmt;
4504 tree use;
4505 ssa_op_iter iter;
4507 start_over:
4508 /* SCC info */
4509 VN_INFO (name)->dfsnum = next_dfs_num++;
4510 VN_INFO (name)->visited = true;
4511 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
4513 sccstack.safe_push (name);
4514 VN_INFO (name)->on_sccstack = true;
4515 defstmt = SSA_NAME_DEF_STMT (name);
4517 /* Recursively DFS on our operands, looking for SCC's. */
4518 if (!gimple_nop_p (defstmt))
4520 /* Push a new iterator. */
4521 if (gphi *phi = dyn_cast <gphi *> (defstmt))
4522 usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
4523 else
4524 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
4526 else
4527 clear_and_done_ssa_iter (&iter);
4529 while (1)
4531 /* If we are done processing uses of a name, go up the stack
4532 of iterators and process SCCs as we found them. */
4533 if (op_iter_done (&iter))
4535 /* See if we found an SCC. */
4536 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
4537 extract_and_process_scc_for_name (name);
4539 /* Check if we are done. */
4540 if (namevec.is_empty ())
4541 return;
4543 /* Restore the last use walker and continue walking there. */
4544 use = name;
4545 name = namevec.pop ();
4546 memcpy (&iter, &itervec.last (),
4547 sizeof (ssa_op_iter));
4548 itervec.pop ();
4549 goto continue_walking;
4552 use = USE_FROM_PTR (usep);
4554 /* Since we handle phi nodes, we will sometimes get
4555 invariants in the use expression. */
4556 if (TREE_CODE (use) == SSA_NAME)
4558 if (! (VN_INFO (use)->visited))
4560 /* Recurse by pushing the current use walking state on
4561 the stack and starting over. */
4562 itervec.safe_push (iter);
4563 namevec.safe_push (name);
4564 name = use;
4565 goto start_over;
4567 continue_walking:
4568 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
4569 VN_INFO (use)->low);
4571 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
4572 && VN_INFO (use)->on_sccstack)
4574 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
4575 VN_INFO (name)->low);
4579 usep = op_iter_next_use (&iter);
4583 /* Allocate a value number table. */
4585 static void
4586 allocate_vn_table (vn_tables_t table)
4588 table->phis = new vn_phi_table_type (23);
4589 table->nary = new vn_nary_op_table_type (23);
4590 table->references = new vn_reference_table_type (23);
4592 gcc_obstack_init (&table->nary_obstack);
4593 table->phis_pool = new object_allocator<vn_phi_s> ("VN phis");
4594 table->references_pool = new object_allocator<vn_reference_s>
4595 ("VN references");
4598 /* Free a value number table. */
4600 static void
4601 free_vn_table (vn_tables_t table)
4603 delete table->phis;
4604 table->phis = NULL;
4605 delete table->nary;
4606 table->nary = NULL;
4607 delete table->references;
4608 table->references = NULL;
4609 obstack_free (&table->nary_obstack, NULL);
4610 delete table->phis_pool;
4611 delete table->references_pool;
4614 static void
4615 init_scc_vn (void)
4617 int j;
4618 int *rpo_numbers_temp;
4620 calculate_dominance_info (CDI_DOMINATORS);
4621 mark_dfs_back_edges ();
4623 sccstack.create (0);
4624 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
4626 constant_value_ids = BITMAP_ALLOC (NULL);
4628 next_dfs_num = 1;
4629 next_value_id = 1;
4631 vn_ssa_aux_table.create (num_ssa_names + 1);
4632 /* VEC_alloc doesn't actually grow it to the right size, it just
4633 preallocates the space to do so. */
4634 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
4635 gcc_obstack_init (&vn_ssa_aux_obstack);
4637 shared_lookup_phiargs.create (0);
4638 shared_lookup_references.create (0);
4639 rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
4640 rpo_numbers_temp =
4641 XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
4642 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4644 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4645 the i'th block in RPO order is bb. We want to map bb's to RPO
4646 numbers, so we need to rearrange this array. */
4647 for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
4648 rpo_numbers[rpo_numbers_temp[j]] = j;
4650 XDELETE (rpo_numbers_temp);
4652 VN_TOP = build_decl (UNKNOWN_LOCATION, VAR_DECL,
4653 get_identifier ("VN_TOP"), error_mark_node);
4655 renumber_gimple_stmt_uids ();
4657 /* Create the valid and optimistic value numbering tables. */
4658 valid_info = XCNEW (struct vn_tables_s);
4659 allocate_vn_table (valid_info);
4660 optimistic_info = XCNEW (struct vn_tables_s);
4661 allocate_vn_table (optimistic_info);
4662 current_info = valid_info;
4664 /* Create the VN_INFO structures, and initialize value numbers to
4665 TOP or VARYING for parameters. */
4666 size_t i;
4667 tree name;
4669 FOR_EACH_SSA_NAME (i, name, cfun)
4671 VN_INFO_GET (name)->valnum = VN_TOP;
4672 VN_INFO (name)->needs_insertion = false;
4673 VN_INFO (name)->expr = NULL;
4674 VN_INFO (name)->value_id = 0;
4676 if (!SSA_NAME_IS_DEFAULT_DEF (name))
4677 continue;
4679 switch (TREE_CODE (SSA_NAME_VAR (name)))
4681 case VAR_DECL:
4682 /* All undefined vars are VARYING. */
4683 VN_INFO (name)->valnum = name;
4684 VN_INFO (name)->visited = true;
4685 break;
4687 case PARM_DECL:
4688 /* Parameters are VARYING but we can record a condition
4689 if we know it is a non-NULL pointer. */
4690 VN_INFO (name)->visited = true;
4691 VN_INFO (name)->valnum = name;
4692 if (POINTER_TYPE_P (TREE_TYPE (name))
4693 && nonnull_arg_p (SSA_NAME_VAR (name)))
4695 tree ops[2];
4696 ops[0] = name;
4697 ops[1] = build_int_cst (TREE_TYPE (name), 0);
4698 vn_nary_op_insert_pieces (2, NE_EXPR, boolean_type_node, ops,
4699 boolean_true_node, 0);
4700 if (dump_file && (dump_flags & TDF_DETAILS))
4702 fprintf (dump_file, "Recording ");
4703 print_generic_expr (dump_file, name, TDF_SLIM);
4704 fprintf (dump_file, " != 0\n");
4707 break;
4709 case RESULT_DECL:
4710 /* If the result is passed by invisible reference the default
4711 def is initialized, otherwise it's uninitialized. Still
4712 undefined is varying. */
4713 VN_INFO (name)->visited = true;
4714 VN_INFO (name)->valnum = name;
4715 break;
4717 default:
4718 gcc_unreachable ();
4723 /* Restore SSA info that has been reset on value leaders. */
4725 void
4726 scc_vn_restore_ssa_info (void)
4728 unsigned i;
4729 tree name;
4731 FOR_EACH_SSA_NAME (i, name, cfun)
4733 if (has_VN_INFO (name))
4735 if (VN_INFO (name)->needs_insertion)
4737 else if (POINTER_TYPE_P (TREE_TYPE (name))
4738 && VN_INFO (name)->info.ptr_info)
4739 SSA_NAME_PTR_INFO (name) = VN_INFO (name)->info.ptr_info;
4740 else if (INTEGRAL_TYPE_P (TREE_TYPE (name))
4741 && VN_INFO (name)->info.range_info)
4743 SSA_NAME_RANGE_INFO (name) = VN_INFO (name)->info.range_info;
4744 SSA_NAME_ANTI_RANGE_P (name)
4745 = VN_INFO (name)->range_info_anti_range_p;
4751 void
4752 free_scc_vn (void)
4754 size_t i;
4755 tree name;
4757 delete constant_to_value_id;
4758 constant_to_value_id = NULL;
4759 BITMAP_FREE (constant_value_ids);
4760 shared_lookup_phiargs.release ();
4761 shared_lookup_references.release ();
4762 XDELETEVEC (rpo_numbers);
4764 FOR_EACH_SSA_NAME (i, name, cfun)
4766 if (has_VN_INFO (name)
4767 && VN_INFO (name)->needs_insertion)
4768 release_ssa_name (name);
4770 obstack_free (&vn_ssa_aux_obstack, NULL);
4771 vn_ssa_aux_table.release ();
4773 sccstack.release ();
4774 free_vn_table (valid_info);
4775 XDELETE (valid_info);
4776 free_vn_table (optimistic_info);
4777 XDELETE (optimistic_info);
4779 BITMAP_FREE (const_parms);
4782 /* Set *ID according to RESULT. */
4784 static void
4785 set_value_id_for_result (tree result, unsigned int *id)
4787 if (result && TREE_CODE (result) == SSA_NAME)
4788 *id = VN_INFO (result)->value_id;
4789 else if (result && is_gimple_min_invariant (result))
4790 *id = get_or_alloc_constant_value_id (result);
4791 else
4792 *id = get_next_value_id ();
4795 /* Set the value ids in the valid hash tables. */
4797 static void
4798 set_hashtable_value_ids (void)
4800 vn_nary_op_iterator_type hin;
4801 vn_phi_iterator_type hip;
4802 vn_reference_iterator_type hir;
4803 vn_nary_op_t vno;
4804 vn_reference_t vr;
4805 vn_phi_t vp;
4807 /* Now set the value ids of the things we had put in the hash
4808 table. */
4810 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4811 set_value_id_for_result (vno->result, &vno->value_id);
4813 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4814 set_value_id_for_result (vp->result, &vp->value_id);
4816 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4817 hir)
4818 set_value_id_for_result (vr->result, &vr->value_id);
4821 class sccvn_dom_walker : public dom_walker
4823 public:
4824 sccvn_dom_walker ()
4825 : dom_walker (CDI_DOMINATORS, REACHABLE_BLOCKS), cond_stack (0) {}
4827 virtual edge before_dom_children (basic_block);
4828 virtual void after_dom_children (basic_block);
4830 void record_cond (basic_block,
4831 enum tree_code code, tree lhs, tree rhs, bool value);
4832 void record_conds (basic_block,
4833 enum tree_code code, tree lhs, tree rhs, bool value);
4835 auto_vec<std::pair <basic_block, std::pair <vn_nary_op_t, vn_nary_op_t> > >
4836 cond_stack;
4839 /* Record a temporary condition for the BB and its dominated blocks. */
4841 void
4842 sccvn_dom_walker::record_cond (basic_block bb,
4843 enum tree_code code, tree lhs, tree rhs,
4844 bool value)
4846 tree ops[2] = { lhs, rhs };
4847 vn_nary_op_t old = NULL;
4848 if (vn_nary_op_lookup_pieces (2, code, boolean_type_node, ops, &old))
4849 current_info->nary->remove_elt_with_hash (old, old->hashcode);
4850 vn_nary_op_t cond
4851 = vn_nary_op_insert_pieces (2, code, boolean_type_node, ops,
4852 value
4853 ? boolean_true_node
4854 : boolean_false_node, 0);
4855 if (dump_file && (dump_flags & TDF_DETAILS))
4857 fprintf (dump_file, "Recording temporarily ");
4858 print_generic_expr (dump_file, ops[0], TDF_SLIM);
4859 fprintf (dump_file, " %s ", get_tree_code_name (code));
4860 print_generic_expr (dump_file, ops[1], TDF_SLIM);
4861 fprintf (dump_file, " == %s%s\n",
4862 value ? "true" : "false",
4863 old ? " (old entry saved)" : "");
4865 cond_stack.safe_push (std::make_pair (bb, std::make_pair (cond, old)));
4868 /* Record temporary conditions for the BB and its dominated blocks
4869 according to LHS CODE RHS == VALUE and its dominated conditions. */
4871 void
4872 sccvn_dom_walker::record_conds (basic_block bb,
4873 enum tree_code code, tree lhs, tree rhs,
4874 bool value)
4876 /* Record the original condition. */
4877 record_cond (bb, code, lhs, rhs, value);
4879 if (!value)
4880 return;
4882 /* Record dominated conditions if the condition is true. Note that
4883 the inversion is already recorded. */
4884 switch (code)
4886 case LT_EXPR:
4887 case GT_EXPR:
4888 record_cond (bb, code == LT_EXPR ? LE_EXPR : GE_EXPR, lhs, rhs, true);
4889 record_cond (bb, NE_EXPR, lhs, rhs, true);
4890 record_cond (bb, EQ_EXPR, lhs, rhs, false);
4891 break;
4893 case EQ_EXPR:
4894 record_cond (bb, LE_EXPR, lhs, rhs, true);
4895 record_cond (bb, GE_EXPR, lhs, rhs, true);
4896 record_cond (bb, LT_EXPR, lhs, rhs, false);
4897 record_cond (bb, GT_EXPR, lhs, rhs, false);
4898 break;
4900 default:
4901 break;
4905 /* Restore expressions and values derived from conditionals. */
4907 void
4908 sccvn_dom_walker::after_dom_children (basic_block bb)
4910 while (!cond_stack.is_empty ()
4911 && cond_stack.last ().first == bb)
4913 vn_nary_op_t cond = cond_stack.last ().second.first;
4914 vn_nary_op_t old = cond_stack.last ().second.second;
4915 current_info->nary->remove_elt_with_hash (cond, cond->hashcode);
4916 if (old)
4917 vn_nary_op_insert_into (old, current_info->nary, false);
4918 cond_stack.pop ();
4922 /* Value number all statements in BB. */
4924 edge
4925 sccvn_dom_walker::before_dom_children (basic_block bb)
4927 if (dump_file && (dump_flags & TDF_DETAILS))
4928 fprintf (dump_file, "Visiting BB %d\n", bb->index);
4930 /* If we have a single predecessor record the equivalence from a
4931 possible condition on the predecessor edge. */
4932 edge pred_e = single_pred_edge_ignoring_loop_edges (bb, false);
4933 if (pred_e)
4935 /* Check if there are multiple executable successor edges in
4936 the source block. Otherwise there is no additional info
4937 to be recorded. */
4938 edge_iterator ei;
4939 edge e2;
4940 FOR_EACH_EDGE (e2, ei, pred_e->src->succs)
4941 if (e2 != pred_e
4942 && e2->flags & EDGE_EXECUTABLE)
4943 break;
4944 if (e2 && (e2->flags & EDGE_EXECUTABLE))
4946 gimple *stmt = last_stmt (pred_e->src);
4947 if (stmt
4948 && gimple_code (stmt) == GIMPLE_COND)
4950 enum tree_code code = gimple_cond_code (stmt);
4951 tree lhs = gimple_cond_lhs (stmt);
4952 tree rhs = gimple_cond_rhs (stmt);
4953 record_conds (bb, code, lhs, rhs,
4954 (pred_e->flags & EDGE_TRUE_VALUE) != 0);
4955 code = invert_tree_comparison (code, HONOR_NANS (lhs));
4956 if (code != ERROR_MARK)
4957 record_conds (bb, code, lhs, rhs,
4958 (pred_e->flags & EDGE_TRUE_VALUE) == 0);
4963 /* Value-number all defs in the basic-block. */
4964 for (gphi_iterator gsi = gsi_start_phis (bb);
4965 !gsi_end_p (gsi); gsi_next (&gsi))
4967 gphi *phi = gsi.phi ();
4968 tree res = PHI_RESULT (phi);
4969 if (!VN_INFO (res)->visited)
4970 DFS (res);
4972 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
4973 !gsi_end_p (gsi); gsi_next (&gsi))
4975 ssa_op_iter i;
4976 tree op;
4977 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
4978 if (!VN_INFO (op)->visited)
4979 DFS (op);
4982 /* Finally look at the last stmt. */
4983 gimple *stmt = last_stmt (bb);
4984 if (!stmt)
4985 return NULL;
4987 enum gimple_code code = gimple_code (stmt);
4988 if (code != GIMPLE_COND
4989 && code != GIMPLE_SWITCH
4990 && code != GIMPLE_GOTO)
4991 return NULL;
4993 if (dump_file && (dump_flags & TDF_DETAILS))
4995 fprintf (dump_file, "Visiting control stmt ending BB %d: ", bb->index);
4996 print_gimple_stmt (dump_file, stmt, 0);
4999 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
5000 if value-numbering can prove they are not reachable. Handling
5001 computed gotos is also possible. */
5002 tree val;
5003 switch (code)
5005 case GIMPLE_COND:
5007 tree lhs = vn_valueize (gimple_cond_lhs (stmt));
5008 tree rhs = vn_valueize (gimple_cond_rhs (stmt));
5009 val = gimple_simplify (gimple_cond_code (stmt),
5010 boolean_type_node, lhs, rhs,
5011 NULL, vn_valueize);
5012 /* If that didn't simplify to a constant see if we have recorded
5013 temporary expressions from taken edges. */
5014 if (!val || TREE_CODE (val) != INTEGER_CST)
5016 tree ops[2];
5017 ops[0] = lhs;
5018 ops[1] = rhs;
5019 val = vn_nary_op_lookup_pieces (2, gimple_cond_code (stmt),
5020 boolean_type_node, ops, NULL);
5022 break;
5024 case GIMPLE_SWITCH:
5025 val = gimple_switch_index (as_a <gswitch *> (stmt));
5026 break;
5027 case GIMPLE_GOTO:
5028 val = gimple_goto_dest (stmt);
5029 break;
5030 default:
5031 gcc_unreachable ();
5033 if (!val)
5034 return NULL;
5036 edge taken = find_taken_edge (bb, vn_valueize (val));
5037 if (!taken)
5038 return NULL;
5040 if (dump_file && (dump_flags & TDF_DETAILS))
5041 fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
5042 "not executable\n", bb->index, bb->index, taken->dest->index);
5044 return taken;
5047 /* Do SCCVN. Returns true if it finished, false if we bailed out
5048 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
5049 how we use the alias oracle walking during the VN process. */
5051 void
5052 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
5054 size_t i;
5056 default_vn_walk_kind = default_vn_walk_kind_;
5058 init_scc_vn ();
5060 /* Collect pointers we know point to readonly memory. */
5061 const_parms = BITMAP_ALLOC (NULL);
5062 tree fnspec = lookup_attribute ("fn spec",
5063 TYPE_ATTRIBUTES (TREE_TYPE (cfun->decl)));
5064 if (fnspec)
5066 fnspec = TREE_VALUE (TREE_VALUE (fnspec));
5067 i = 1;
5068 for (tree arg = DECL_ARGUMENTS (cfun->decl);
5069 arg; arg = DECL_CHAIN (arg), ++i)
5071 if (i >= (unsigned) TREE_STRING_LENGTH (fnspec))
5072 break;
5073 if (TREE_STRING_POINTER (fnspec)[i] == 'R'
5074 || TREE_STRING_POINTER (fnspec)[i] == 'r')
5076 tree name = ssa_default_def (cfun, arg);
5077 if (name)
5078 bitmap_set_bit (const_parms, SSA_NAME_VERSION (name));
5083 /* Walk all blocks in dominator order, value-numbering stmts
5084 SSA defs and decide whether outgoing edges are not executable. */
5085 sccvn_dom_walker walker;
5086 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5088 /* Initialize the value ids and prune out remaining VN_TOPs
5089 from dead code. */
5090 tree name;
5091 FOR_EACH_SSA_NAME (i, name, cfun)
5093 vn_ssa_aux_t info = VN_INFO (name);
5094 if (!info->visited
5095 || info->valnum == VN_TOP)
5096 info->valnum = name;
5097 if (info->valnum == name)
5098 info->value_id = get_next_value_id ();
5099 else if (is_gimple_min_invariant (info->valnum))
5100 info->value_id = get_or_alloc_constant_value_id (info->valnum);
5103 /* Propagate. */
5104 FOR_EACH_SSA_NAME (i, name, cfun)
5106 vn_ssa_aux_t info = VN_INFO (name);
5107 if (TREE_CODE (info->valnum) == SSA_NAME
5108 && info->valnum != name
5109 && info->value_id != VN_INFO (info->valnum)->value_id)
5110 info->value_id = VN_INFO (info->valnum)->value_id;
5113 set_hashtable_value_ids ();
5115 if (dump_file && (dump_flags & TDF_DETAILS))
5117 fprintf (dump_file, "Value numbers:\n");
5118 FOR_EACH_SSA_NAME (i, name, cfun)
5120 if (VN_INFO (name)->visited
5121 && SSA_VAL (name) != name)
5123 print_generic_expr (dump_file, name);
5124 fprintf (dump_file, " = ");
5125 print_generic_expr (dump_file, SSA_VAL (name));
5126 fprintf (dump_file, "\n");
5132 /* Return the maximum value id we have ever seen. */
5134 unsigned int
5135 get_max_value_id (void)
5137 return next_value_id;
5140 /* Return the next unique value id. */
5142 unsigned int
5143 get_next_value_id (void)
5145 return next_value_id++;
5149 /* Compare two expressions E1 and E2 and return true if they are equal. */
5151 bool
5152 expressions_equal_p (tree e1, tree e2)
5154 /* The obvious case. */
5155 if (e1 == e2)
5156 return true;
5158 /* If either one is VN_TOP consider them equal. */
5159 if (e1 == VN_TOP || e2 == VN_TOP)
5160 return true;
5162 /* If only one of them is null, they cannot be equal. */
5163 if (!e1 || !e2)
5164 return false;
5166 /* Now perform the actual comparison. */
5167 if (TREE_CODE (e1) == TREE_CODE (e2)
5168 && operand_equal_p (e1, e2, OEP_PURE_SAME))
5169 return true;
5171 return false;
5175 /* Return true if the nary operation NARY may trap. This is a copy
5176 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
5178 bool
5179 vn_nary_may_trap (vn_nary_op_t nary)
5181 tree type;
5182 tree rhs2 = NULL_TREE;
5183 bool honor_nans = false;
5184 bool honor_snans = false;
5185 bool fp_operation = false;
5186 bool honor_trapv = false;
5187 bool handled, ret;
5188 unsigned i;
5190 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
5191 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
5192 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
5194 type = nary->type;
5195 fp_operation = FLOAT_TYPE_P (type);
5196 if (fp_operation)
5198 honor_nans = flag_trapping_math && !flag_finite_math_only;
5199 honor_snans = flag_signaling_nans != 0;
5201 else if (INTEGRAL_TYPE_P (type)
5202 && TYPE_OVERFLOW_TRAPS (type))
5203 honor_trapv = true;
5205 if (nary->length >= 2)
5206 rhs2 = nary->op[1];
5207 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
5208 honor_trapv,
5209 honor_nans, honor_snans, rhs2,
5210 &handled);
5211 if (handled
5212 && ret)
5213 return true;
5215 for (i = 0; i < nary->length; ++i)
5216 if (tree_could_trap_p (nary->op[i]))
5217 return true;
5219 return false;
5223 class eliminate_dom_walker : public dom_walker
5225 public:
5226 eliminate_dom_walker (cdi_direction, bitmap);
5227 ~eliminate_dom_walker ();
5229 virtual edge before_dom_children (basic_block);
5230 virtual void after_dom_children (basic_block);
5232 tree eliminate_avail (tree op);
5233 void eliminate_push_avail (tree op);
5234 tree eliminate_insert (gimple_stmt_iterator *gsi, tree val);
5236 bool do_pre;
5237 unsigned int el_todo;
5238 unsigned int eliminations;
5239 unsigned int insertions;
5241 /* SSA names that had their defs inserted by PRE if do_pre. */
5242 bitmap inserted_exprs;
5244 /* Blocks with statements that have had their EH properties changed. */
5245 bitmap need_eh_cleanup;
5247 /* Blocks with statements that have had their AB properties changed. */
5248 bitmap need_ab_cleanup;
5250 auto_vec<gimple *> to_remove;
5251 auto_vec<gimple *> to_fixup;
5252 auto_vec<tree> avail;
5253 auto_vec<tree> avail_stack;
5256 eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction,
5257 bitmap inserted_exprs_)
5258 : dom_walker (direction), do_pre (inserted_exprs_ != NULL),
5259 el_todo (0), eliminations (0), insertions (0),
5260 inserted_exprs (inserted_exprs_)
5262 need_eh_cleanup = BITMAP_ALLOC (NULL);
5263 need_ab_cleanup = BITMAP_ALLOC (NULL);
5266 eliminate_dom_walker::~eliminate_dom_walker ()
5268 BITMAP_FREE (need_eh_cleanup);
5269 BITMAP_FREE (need_ab_cleanup);
5272 /* Return a leader for OP that is available at the current point of the
5273 eliminate domwalk. */
5275 tree
5276 eliminate_dom_walker::eliminate_avail (tree op)
5278 tree valnum = VN_INFO (op)->valnum;
5279 if (TREE_CODE (valnum) == SSA_NAME)
5281 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
5282 return valnum;
5283 if (avail.length () > SSA_NAME_VERSION (valnum))
5284 return avail[SSA_NAME_VERSION (valnum)];
5286 else if (is_gimple_min_invariant (valnum))
5287 return valnum;
5288 return NULL_TREE;
5291 /* At the current point of the eliminate domwalk make OP available. */
5293 void
5294 eliminate_dom_walker::eliminate_push_avail (tree op)
5296 tree valnum = VN_INFO (op)->valnum;
5297 if (TREE_CODE (valnum) == SSA_NAME)
5299 if (avail.length () <= SSA_NAME_VERSION (valnum))
5300 avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1);
5301 tree pushop = op;
5302 if (avail[SSA_NAME_VERSION (valnum)])
5303 pushop = avail[SSA_NAME_VERSION (valnum)];
5304 avail_stack.safe_push (pushop);
5305 avail[SSA_NAME_VERSION (valnum)] = op;
5309 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
5310 the leader for the expression if insertion was successful. */
5312 tree
5313 eliminate_dom_walker::eliminate_insert (gimple_stmt_iterator *gsi, tree val)
5315 /* We can insert a sequence with a single assignment only. */
5316 gimple_seq stmts = VN_INFO (val)->expr;
5317 if (!gimple_seq_singleton_p (stmts))
5318 return NULL_TREE;
5319 gassign *stmt = dyn_cast <gassign *> (gimple_seq_first_stmt (stmts));
5320 if (!stmt
5321 || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
5322 && gimple_assign_rhs_code (stmt) != VIEW_CONVERT_EXPR
5323 && gimple_assign_rhs_code (stmt) != BIT_FIELD_REF
5324 && (gimple_assign_rhs_code (stmt) != BIT_AND_EXPR
5325 || TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)))
5326 return NULL_TREE;
5328 tree op = gimple_assign_rhs1 (stmt);
5329 if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR
5330 || gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5331 op = TREE_OPERAND (op, 0);
5332 tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (op) : op;
5333 if (!leader)
5334 return NULL_TREE;
5336 tree res;
5337 stmts = NULL;
5338 if (gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5339 res = gimple_build (&stmts, BIT_FIELD_REF,
5340 TREE_TYPE (val), leader,
5341 TREE_OPERAND (gimple_assign_rhs1 (stmt), 1),
5342 TREE_OPERAND (gimple_assign_rhs1 (stmt), 2));
5343 else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
5344 res = gimple_build (&stmts, BIT_AND_EXPR,
5345 TREE_TYPE (val), leader, gimple_assign_rhs2 (stmt));
5346 else
5347 res = gimple_build (&stmts, gimple_assign_rhs_code (stmt),
5348 TREE_TYPE (val), leader);
5349 if (TREE_CODE (res) != SSA_NAME
5350 || SSA_NAME_IS_DEFAULT_DEF (res)
5351 || gimple_bb (SSA_NAME_DEF_STMT (res)))
5353 gimple_seq_discard (stmts);
5355 /* During propagation we have to treat SSA info conservatively
5356 and thus we can end up simplifying the inserted expression
5357 at elimination time to sth not defined in stmts. */
5358 /* But then this is a redundancy we failed to detect. Which means
5359 res now has two values. That doesn't play well with how
5360 we track availability here, so give up. */
5361 if (dump_file && (dump_flags & TDF_DETAILS))
5363 if (TREE_CODE (res) == SSA_NAME)
5364 res = eliminate_avail (res);
5365 if (res)
5367 fprintf (dump_file, "Failed to insert expression for value ");
5368 print_generic_expr (dump_file, val);
5369 fprintf (dump_file, " which is really fully redundant to ");
5370 print_generic_expr (dump_file, res);
5371 fprintf (dump_file, "\n");
5375 return NULL_TREE;
5377 else
5379 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
5380 VN_INFO_GET (res)->valnum = val;
5383 insertions++;
5384 if (dump_file && (dump_flags & TDF_DETAILS))
5386 fprintf (dump_file, "Inserted ");
5387 print_gimple_stmt (dump_file, SSA_NAME_DEF_STMT (res), 0);
5390 return res;
5395 /* Perform elimination for the basic-block B during the domwalk. */
5397 edge
5398 eliminate_dom_walker::before_dom_children (basic_block b)
5400 /* Mark new bb. */
5401 avail_stack.safe_push (NULL_TREE);
5403 /* Skip unreachable blocks marked unreachable during the SCCVN domwalk. */
5404 edge_iterator ei;
5405 edge e;
5406 FOR_EACH_EDGE (e, ei, b->preds)
5407 if (e->flags & EDGE_EXECUTABLE)
5408 break;
5409 if (! e)
5410 return NULL;
5412 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
5414 gphi *phi = gsi.phi ();
5415 tree res = PHI_RESULT (phi);
5417 if (virtual_operand_p (res))
5419 gsi_next (&gsi);
5420 continue;
5423 tree sprime = eliminate_avail (res);
5424 if (sprime
5425 && sprime != res)
5427 if (dump_file && (dump_flags & TDF_DETAILS))
5429 fprintf (dump_file, "Replaced redundant PHI node defining ");
5430 print_generic_expr (dump_file, res);
5431 fprintf (dump_file, " with ");
5432 print_generic_expr (dump_file, sprime);
5433 fprintf (dump_file, "\n");
5436 /* If we inserted this PHI node ourself, it's not an elimination. */
5437 if (! inserted_exprs
5438 || ! bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
5439 eliminations++;
5441 /* If we will propagate into all uses don't bother to do
5442 anything. */
5443 if (may_propagate_copy (res, sprime))
5445 /* Mark the PHI for removal. */
5446 to_remove.safe_push (phi);
5447 gsi_next (&gsi);
5448 continue;
5451 remove_phi_node (&gsi, false);
5453 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
5454 sprime = fold_convert (TREE_TYPE (res), sprime);
5455 gimple *stmt = gimple_build_assign (res, sprime);
5456 gimple_stmt_iterator gsi2 = gsi_after_labels (b);
5457 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
5458 continue;
5461 eliminate_push_avail (res);
5462 gsi_next (&gsi);
5465 for (gimple_stmt_iterator gsi = gsi_start_bb (b);
5466 !gsi_end_p (gsi);
5467 gsi_next (&gsi))
5469 tree sprime = NULL_TREE;
5470 gimple *stmt = gsi_stmt (gsi);
5471 tree lhs = gimple_get_lhs (stmt);
5472 if (lhs && TREE_CODE (lhs) == SSA_NAME
5473 && !gimple_has_volatile_ops (stmt)
5474 /* See PR43491. Do not replace a global register variable when
5475 it is a the RHS of an assignment. Do replace local register
5476 variables since gcc does not guarantee a local variable will
5477 be allocated in register.
5478 ??? The fix isn't effective here. This should instead
5479 be ensured by not value-numbering them the same but treating
5480 them like volatiles? */
5481 && !(gimple_assign_single_p (stmt)
5482 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
5483 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))
5484 && is_global_var (gimple_assign_rhs1 (stmt)))))
5486 sprime = eliminate_avail (lhs);
5487 if (!sprime)
5489 /* If there is no existing usable leader but SCCVN thinks
5490 it has an expression it wants to use as replacement,
5491 insert that. */
5492 tree val = VN_INFO (lhs)->valnum;
5493 if (val != VN_TOP
5494 && TREE_CODE (val) == SSA_NAME
5495 && VN_INFO (val)->needs_insertion
5496 && VN_INFO (val)->expr != NULL
5497 && (sprime = eliminate_insert (&gsi, val)) != NULL_TREE)
5498 eliminate_push_avail (sprime);
5501 /* If this now constitutes a copy duplicate points-to
5502 and range info appropriately. This is especially
5503 important for inserted code. See tree-ssa-copy.c
5504 for similar code. */
5505 if (sprime
5506 && TREE_CODE (sprime) == SSA_NAME)
5508 basic_block sprime_b = gimple_bb (SSA_NAME_DEF_STMT (sprime));
5509 if (POINTER_TYPE_P (TREE_TYPE (lhs))
5510 && VN_INFO_PTR_INFO (lhs)
5511 && ! VN_INFO_PTR_INFO (sprime))
5513 duplicate_ssa_name_ptr_info (sprime,
5514 VN_INFO_PTR_INFO (lhs));
5515 if (b != sprime_b)
5516 mark_ptr_info_alignment_unknown
5517 (SSA_NAME_PTR_INFO (sprime));
5519 else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
5520 && VN_INFO_RANGE_INFO (lhs)
5521 && ! VN_INFO_RANGE_INFO (sprime)
5522 && b == sprime_b)
5523 duplicate_ssa_name_range_info (sprime,
5524 VN_INFO_RANGE_TYPE (lhs),
5525 VN_INFO_RANGE_INFO (lhs));
5528 /* Inhibit the use of an inserted PHI on a loop header when
5529 the address of the memory reference is a simple induction
5530 variable. In other cases the vectorizer won't do anything
5531 anyway (either it's loop invariant or a complicated
5532 expression). */
5533 if (sprime
5534 && TREE_CODE (sprime) == SSA_NAME
5535 && do_pre
5536 && (flag_tree_loop_vectorize || flag_tree_parallelize_loops > 1)
5537 && loop_outer (b->loop_father)
5538 && has_zero_uses (sprime)
5539 && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
5540 && gimple_assign_load_p (stmt))
5542 gimple *def_stmt = SSA_NAME_DEF_STMT (sprime);
5543 basic_block def_bb = gimple_bb (def_stmt);
5544 if (gimple_code (def_stmt) == GIMPLE_PHI
5545 && def_bb->loop_father->header == def_bb)
5547 loop_p loop = def_bb->loop_father;
5548 ssa_op_iter iter;
5549 tree op;
5550 bool found = false;
5551 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5553 affine_iv iv;
5554 def_bb = gimple_bb (SSA_NAME_DEF_STMT (op));
5555 if (def_bb
5556 && flow_bb_inside_loop_p (loop, def_bb)
5557 && simple_iv (loop, loop, op, &iv, true))
5559 found = true;
5560 break;
5563 if (found)
5565 if (dump_file && (dump_flags & TDF_DETAILS))
5567 fprintf (dump_file, "Not replacing ");
5568 print_gimple_expr (dump_file, stmt, 0);
5569 fprintf (dump_file, " with ");
5570 print_generic_expr (dump_file, sprime);
5571 fprintf (dump_file, " which would add a loop"
5572 " carried dependence to loop %d\n",
5573 loop->num);
5575 /* Don't keep sprime available. */
5576 sprime = NULL_TREE;
5581 if (sprime)
5583 /* If we can propagate the value computed for LHS into
5584 all uses don't bother doing anything with this stmt. */
5585 if (may_propagate_copy (lhs, sprime))
5587 /* Mark it for removal. */
5588 to_remove.safe_push (stmt);
5590 /* ??? Don't count copy/constant propagations. */
5591 if (gimple_assign_single_p (stmt)
5592 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5593 || gimple_assign_rhs1 (stmt) == sprime))
5594 continue;
5596 if (dump_file && (dump_flags & TDF_DETAILS))
5598 fprintf (dump_file, "Replaced ");
5599 print_gimple_expr (dump_file, stmt, 0);
5600 fprintf (dump_file, " with ");
5601 print_generic_expr (dump_file, sprime);
5602 fprintf (dump_file, " in all uses of ");
5603 print_gimple_stmt (dump_file, stmt, 0);
5606 eliminations++;
5607 continue;
5610 /* If this is an assignment from our leader (which
5611 happens in the case the value-number is a constant)
5612 then there is nothing to do. */
5613 if (gimple_assign_single_p (stmt)
5614 && sprime == gimple_assign_rhs1 (stmt))
5615 continue;
5617 /* Else replace its RHS. */
5618 bool can_make_abnormal_goto
5619 = is_gimple_call (stmt)
5620 && stmt_can_make_abnormal_goto (stmt);
5622 if (dump_file && (dump_flags & TDF_DETAILS))
5624 fprintf (dump_file, "Replaced ");
5625 print_gimple_expr (dump_file, stmt, 0);
5626 fprintf (dump_file, " with ");
5627 print_generic_expr (dump_file, sprime);
5628 fprintf (dump_file, " in ");
5629 print_gimple_stmt (dump_file, stmt, 0);
5632 eliminations++;
5633 gimple *orig_stmt = stmt;
5634 if (!useless_type_conversion_p (TREE_TYPE (lhs),
5635 TREE_TYPE (sprime)))
5636 sprime = fold_convert (TREE_TYPE (lhs), sprime);
5637 tree vdef = gimple_vdef (stmt);
5638 tree vuse = gimple_vuse (stmt);
5639 propagate_tree_value_into_stmt (&gsi, sprime);
5640 stmt = gsi_stmt (gsi);
5641 update_stmt (stmt);
5642 if (vdef != gimple_vdef (stmt))
5643 VN_INFO (vdef)->valnum = vuse;
5645 /* If we removed EH side-effects from the statement, clean
5646 its EH information. */
5647 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
5649 bitmap_set_bit (need_eh_cleanup,
5650 gimple_bb (stmt)->index);
5651 if (dump_file && (dump_flags & TDF_DETAILS))
5652 fprintf (dump_file, " Removed EH side-effects.\n");
5655 /* Likewise for AB side-effects. */
5656 if (can_make_abnormal_goto
5657 && !stmt_can_make_abnormal_goto (stmt))
5659 bitmap_set_bit (need_ab_cleanup,
5660 gimple_bb (stmt)->index);
5661 if (dump_file && (dump_flags & TDF_DETAILS))
5662 fprintf (dump_file, " Removed AB side-effects.\n");
5665 continue;
5669 /* If the statement is a scalar store, see if the expression
5670 has the same value number as its rhs. If so, the store is
5671 dead. */
5672 if (gimple_assign_single_p (stmt)
5673 && !gimple_has_volatile_ops (stmt)
5674 && !is_gimple_reg (gimple_assign_lhs (stmt))
5675 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5676 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
5678 tree val;
5679 tree rhs = gimple_assign_rhs1 (stmt);
5680 vn_reference_t vnresult;
5681 val = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_WALKREWRITE,
5682 &vnresult, false);
5683 if (TREE_CODE (rhs) == SSA_NAME)
5684 rhs = VN_INFO (rhs)->valnum;
5685 if (val
5686 && operand_equal_p (val, rhs, 0))
5688 /* We can only remove the later store if the former aliases
5689 at least all accesses the later one does or if the store
5690 was to readonly memory storing the same value. */
5691 alias_set_type set = get_alias_set (lhs);
5692 if (! vnresult
5693 || vnresult->set == set
5694 || alias_set_subset_of (set, vnresult->set))
5696 if (dump_file && (dump_flags & TDF_DETAILS))
5698 fprintf (dump_file, "Deleted redundant store ");
5699 print_gimple_stmt (dump_file, stmt, 0);
5702 /* Queue stmt for removal. */
5703 to_remove.safe_push (stmt);
5704 continue;
5709 /* If this is a control statement value numbering left edges
5710 unexecuted on force the condition in a way consistent with
5711 that. */
5712 if (gcond *cond = dyn_cast <gcond *> (stmt))
5714 if ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE)
5715 ^ (EDGE_SUCC (b, 1)->flags & EDGE_EXECUTABLE))
5717 if (dump_file && (dump_flags & TDF_DETAILS))
5719 fprintf (dump_file, "Removing unexecutable edge from ");
5720 print_gimple_stmt (dump_file, stmt, 0);
5722 if (((EDGE_SUCC (b, 0)->flags & EDGE_TRUE_VALUE) != 0)
5723 == ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE) != 0))
5724 gimple_cond_make_true (cond);
5725 else
5726 gimple_cond_make_false (cond);
5727 update_stmt (cond);
5728 el_todo |= TODO_cleanup_cfg;
5729 continue;
5733 bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
5734 bool was_noreturn = (is_gimple_call (stmt)
5735 && gimple_call_noreturn_p (stmt));
5736 tree vdef = gimple_vdef (stmt);
5737 tree vuse = gimple_vuse (stmt);
5739 /* If we didn't replace the whole stmt (or propagate the result
5740 into all uses), replace all uses on this stmt with their
5741 leaders. */
5742 bool modified = false;
5743 use_operand_p use_p;
5744 ssa_op_iter iter;
5745 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5747 tree use = USE_FROM_PTR (use_p);
5748 /* ??? The call code above leaves stmt operands un-updated. */
5749 if (TREE_CODE (use) != SSA_NAME)
5750 continue;
5751 tree sprime = eliminate_avail (use);
5752 if (sprime && sprime != use
5753 && may_propagate_copy (use, sprime)
5754 /* We substitute into debug stmts to avoid excessive
5755 debug temporaries created by removed stmts, but we need
5756 to avoid doing so for inserted sprimes as we never want
5757 to create debug temporaries for them. */
5758 && (!inserted_exprs
5759 || TREE_CODE (sprime) != SSA_NAME
5760 || !is_gimple_debug (stmt)
5761 || !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
5763 propagate_value (use_p, sprime);
5764 modified = true;
5768 /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
5769 into which is a requirement for the IPA devirt machinery. */
5770 gimple *old_stmt = stmt;
5771 if (modified)
5773 /* If a formerly non-invariant ADDR_EXPR is turned into an
5774 invariant one it was on a separate stmt. */
5775 if (gimple_assign_single_p (stmt)
5776 && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
5777 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
5778 gimple_stmt_iterator prev = gsi;
5779 gsi_prev (&prev);
5780 if (fold_stmt (&gsi))
5782 /* fold_stmt may have created new stmts inbetween
5783 the previous stmt and the folded stmt. Mark
5784 all defs created there as varying to not confuse
5785 the SCCVN machinery as we're using that even during
5786 elimination. */
5787 if (gsi_end_p (prev))
5788 prev = gsi_start_bb (b);
5789 else
5790 gsi_next (&prev);
5791 if (gsi_stmt (prev) != gsi_stmt (gsi))
5794 tree def;
5795 ssa_op_iter dit;
5796 FOR_EACH_SSA_TREE_OPERAND (def, gsi_stmt (prev),
5797 dit, SSA_OP_ALL_DEFS)
5798 /* As existing DEFs may move between stmts
5799 we have to guard VN_INFO_GET. */
5800 if (! has_VN_INFO (def))
5801 VN_INFO_GET (def)->valnum = def;
5802 if (gsi_stmt (prev) == gsi_stmt (gsi))
5803 break;
5804 gsi_next (&prev);
5806 while (1);
5808 stmt = gsi_stmt (gsi);
5809 /* In case we folded the stmt away schedule the NOP for removal. */
5810 if (gimple_nop_p (stmt))
5811 to_remove.safe_push (stmt);
5814 /* Visit indirect calls and turn them into direct calls if
5815 possible using the devirtualization machinery. Do this before
5816 checking for required EH/abnormal/noreturn cleanup as devird
5817 may expose more of those. */
5818 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
5820 tree fn = gimple_call_fn (call_stmt);
5821 if (fn
5822 && flag_devirtualize
5823 && virtual_method_call_p (fn))
5825 tree otr_type = obj_type_ref_class (fn);
5826 unsigned HOST_WIDE_INT otr_tok
5827 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn));
5828 tree instance;
5829 ipa_polymorphic_call_context context (current_function_decl,
5830 fn, stmt, &instance);
5831 context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn),
5832 otr_type, stmt);
5833 bool final;
5834 vec <cgraph_node *> targets
5835 = possible_polymorphic_call_targets (obj_type_ref_class (fn),
5836 otr_tok, context, &final);
5837 if (dump_file)
5838 dump_possible_polymorphic_call_targets (dump_file,
5839 obj_type_ref_class (fn),
5840 otr_tok, context);
5841 if (final && targets.length () <= 1 && dbg_cnt (devirt))
5843 tree fn;
5844 if (targets.length () == 1)
5845 fn = targets[0]->decl;
5846 else
5847 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5848 if (dump_enabled_p ())
5850 location_t loc = gimple_location (stmt);
5851 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
5852 "converting indirect call to "
5853 "function %s\n",
5854 lang_hooks.decl_printable_name (fn, 2));
5856 gimple_call_set_fndecl (call_stmt, fn);
5857 /* If changing the call to __builtin_unreachable
5858 or similar noreturn function, adjust gimple_call_fntype
5859 too. */
5860 if (gimple_call_noreturn_p (call_stmt)
5861 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
5862 && TYPE_ARG_TYPES (TREE_TYPE (fn))
5863 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn)))
5864 == void_type_node))
5865 gimple_call_set_fntype (call_stmt, TREE_TYPE (fn));
5866 maybe_remove_unused_call_args (cfun, call_stmt);
5867 modified = true;
5872 if (modified)
5874 /* When changing a call into a noreturn call, cfg cleanup
5875 is needed to fix up the noreturn call. */
5876 if (!was_noreturn
5877 && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
5878 to_fixup.safe_push (stmt);
5879 /* When changing a condition or switch into one we know what
5880 edge will be executed, schedule a cfg cleanup. */
5881 if ((gimple_code (stmt) == GIMPLE_COND
5882 && (gimple_cond_true_p (as_a <gcond *> (stmt))
5883 || gimple_cond_false_p (as_a <gcond *> (stmt))))
5884 || (gimple_code (stmt) == GIMPLE_SWITCH
5885 && TREE_CODE (gimple_switch_index
5886 (as_a <gswitch *> (stmt))) == INTEGER_CST))
5887 el_todo |= TODO_cleanup_cfg;
5888 /* If we removed EH side-effects from the statement, clean
5889 its EH information. */
5890 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
5892 bitmap_set_bit (need_eh_cleanup,
5893 gimple_bb (stmt)->index);
5894 if (dump_file && (dump_flags & TDF_DETAILS))
5895 fprintf (dump_file, " Removed EH side-effects.\n");
5897 /* Likewise for AB side-effects. */
5898 if (can_make_abnormal_goto
5899 && !stmt_can_make_abnormal_goto (stmt))
5901 bitmap_set_bit (need_ab_cleanup,
5902 gimple_bb (stmt)->index);
5903 if (dump_file && (dump_flags & TDF_DETAILS))
5904 fprintf (dump_file, " Removed AB side-effects.\n");
5906 update_stmt (stmt);
5907 if (vdef != gimple_vdef (stmt))
5908 VN_INFO (vdef)->valnum = vuse;
5911 /* Make new values available - for fully redundant LHS we
5912 continue with the next stmt above and skip this. */
5913 def_operand_p defp;
5914 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
5915 eliminate_push_avail (DEF_FROM_PTR (defp));
5918 /* Replace destination PHI arguments. */
5919 FOR_EACH_EDGE (e, ei, b->succs)
5920 if (e->flags & EDGE_EXECUTABLE)
5921 for (gphi_iterator gsi = gsi_start_phis (e->dest);
5922 !gsi_end_p (gsi);
5923 gsi_next (&gsi))
5925 gphi *phi = gsi.phi ();
5926 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
5927 tree arg = USE_FROM_PTR (use_p);
5928 if (TREE_CODE (arg) != SSA_NAME
5929 || virtual_operand_p (arg))
5930 continue;
5931 tree sprime = eliminate_avail (arg);
5932 if (sprime && may_propagate_copy (arg, sprime))
5933 propagate_value (use_p, sprime);
5935 return NULL;
5938 /* Make no longer available leaders no longer available. */
5940 void
5941 eliminate_dom_walker::after_dom_children (basic_block)
5943 tree entry;
5944 while ((entry = avail_stack.pop ()) != NULL_TREE)
5946 tree valnum = VN_INFO (entry)->valnum;
5947 tree old = avail[SSA_NAME_VERSION (valnum)];
5948 if (old == entry)
5949 avail[SSA_NAME_VERSION (valnum)] = NULL_TREE;
5950 else
5951 avail[SSA_NAME_VERSION (valnum)] = entry;
5955 /* Eliminate fully redundant computations. */
5957 unsigned int
5958 vn_eliminate (bitmap inserted_exprs)
5960 eliminate_dom_walker el (CDI_DOMINATORS, inserted_exprs);
5961 el.avail.reserve (num_ssa_names);
5963 el.walk (cfun->cfg->x_entry_block_ptr);
5965 /* We cannot remove stmts during BB walk, especially not release SSA
5966 names there as this confuses the VN machinery. The stmts ending
5967 up in to_remove are either stores or simple copies.
5968 Remove stmts in reverse order to make debug stmt creation possible. */
5969 while (!el.to_remove.is_empty ())
5971 gimple *stmt = el.to_remove.pop ();
5973 if (dump_file && (dump_flags & TDF_DETAILS))
5975 fprintf (dump_file, "Removing dead stmt ");
5976 print_gimple_stmt (dump_file, stmt, 0, 0);
5979 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5980 if (gimple_code (stmt) == GIMPLE_PHI)
5981 remove_phi_node (&gsi, true);
5982 else
5984 basic_block bb = gimple_bb (stmt);
5985 unlink_stmt_vdef (stmt);
5986 if (gsi_remove (&gsi, true))
5987 bitmap_set_bit (el.need_eh_cleanup, bb->index);
5988 if (is_gimple_call (stmt) && stmt_can_make_abnormal_goto (stmt))
5989 bitmap_set_bit (el.need_ab_cleanup, bb->index);
5990 release_defs (stmt);
5993 /* Removing a stmt may expose a forwarder block. */
5994 el.el_todo |= TODO_cleanup_cfg;
5997 /* Fixup stmts that became noreturn calls. This may require splitting
5998 blocks and thus isn't possible during the dominator walk. Do this
5999 in reverse order so we don't inadvertedly remove a stmt we want to
6000 fixup by visiting a dominating now noreturn call first. */
6001 while (!el.to_fixup.is_empty ())
6003 gimple *stmt = el.to_fixup.pop ();
6005 if (dump_file && (dump_flags & TDF_DETAILS))
6007 fprintf (dump_file, "Fixing up noreturn call ");
6008 print_gimple_stmt (dump_file, stmt, 0);
6011 if (fixup_noreturn_call (stmt))
6012 el.el_todo |= TODO_cleanup_cfg;
6015 bool do_eh_cleanup = !bitmap_empty_p (el.need_eh_cleanup);
6016 bool do_ab_cleanup = !bitmap_empty_p (el.need_ab_cleanup);
6018 if (do_eh_cleanup)
6019 gimple_purge_all_dead_eh_edges (el.need_eh_cleanup);
6021 if (do_ab_cleanup)
6022 gimple_purge_all_dead_abnormal_call_edges (el.need_ab_cleanup);
6024 if (do_eh_cleanup || do_ab_cleanup)
6025 el.el_todo |= TODO_cleanup_cfg;
6027 statistics_counter_event (cfun, "Eliminated", el.eliminations);
6028 statistics_counter_event (cfun, "Insertions", el.insertions);
6030 return el.el_todo;
6034 namespace {
6036 const pass_data pass_data_fre =
6038 GIMPLE_PASS, /* type */
6039 "fre", /* name */
6040 OPTGROUP_NONE, /* optinfo_flags */
6041 TV_TREE_FRE, /* tv_id */
6042 ( PROP_cfg | PROP_ssa ), /* properties_required */
6043 0, /* properties_provided */
6044 0, /* properties_destroyed */
6045 0, /* todo_flags_start */
6046 0, /* todo_flags_finish */
6049 class pass_fre : public gimple_opt_pass
6051 public:
6052 pass_fre (gcc::context *ctxt)
6053 : gimple_opt_pass (pass_data_fre, ctxt)
6056 /* opt_pass methods: */
6057 opt_pass * clone () { return new pass_fre (m_ctxt); }
6058 virtual bool gate (function *) { return flag_tree_fre != 0; }
6059 virtual unsigned int execute (function *);
6061 }; // class pass_fre
6063 unsigned int
6064 pass_fre::execute (function *)
6066 unsigned int todo = 0;
6068 run_scc_vn (VN_WALKREWRITE);
6070 /* Remove all the redundant expressions. */
6071 todo |= vn_eliminate (NULL);
6073 scc_vn_restore_ssa_info ();
6074 free_scc_vn ();
6076 return todo;
6079 } // anon namespace
6081 gimple_opt_pass *
6082 make_pass_fre (gcc::context *ctxt)
6084 return new pass_fre (ctxt);