poly_int: get_addr_base_and_unit_offset
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blobb59d8eb35149112c01e9e252991ba7b8c7772333
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2017 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "insn-config.h"
32 #include "memmodel.h"
33 #include "emit-rtl.h"
34 #include "cgraph.h"
35 #include "gimple-pretty-print.h"
36 #include "alias.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "cfganal.h"
40 #include "tree-inline.h"
41 #include "internal-fn.h"
42 #include "gimple-fold.h"
43 #include "tree-eh.h"
44 #include "gimplify.h"
45 #include "flags.h"
46 #include "dojump.h"
47 #include "explow.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "stmt.h"
51 #include "expr.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "dumpfile.h"
55 #include "cfgloop.h"
56 #include "params.h"
57 #include "tree-ssa-propagate.h"
58 #include "tree-cfg.h"
59 #include "domwalk.h"
60 #include "gimple-iterator.h"
61 #include "gimple-match.h"
62 #include "stringpool.h"
63 #include "attribs.h"
64 #include "tree-pass.h"
65 #include "statistics.h"
66 #include "langhooks.h"
67 #include "ipa-utils.h"
68 #include "dbgcnt.h"
69 #include "tree-cfgcleanup.h"
70 #include "tree-ssa-loop.h"
71 #include "tree-scalar-evolution.h"
72 #include "tree-ssa-sccvn.h"
74 /* This algorithm is based on the SCC algorithm presented by Keith
75 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
76 (http://citeseer.ist.psu.edu/41805.html). In
77 straight line code, it is equivalent to a regular hash based value
78 numbering that is performed in reverse postorder.
80 For code with cycles, there are two alternatives, both of which
81 require keeping the hashtables separate from the actual list of
82 value numbers for SSA names.
84 1. Iterate value numbering in an RPO walk of the blocks, removing
85 all the entries from the hashtable after each iteration (but
86 keeping the SSA name->value number mapping between iterations).
87 Iterate until it does not change.
89 2. Perform value numbering as part of an SCC walk on the SSA graph,
90 iterating only the cycles in the SSA graph until they do not change
91 (using a separate, optimistic hashtable for value numbering the SCC
92 operands).
94 The second is not just faster in practice (because most SSA graph
95 cycles do not involve all the variables in the graph), it also has
96 some nice properties.
98 One of these nice properties is that when we pop an SCC off the
99 stack, we are guaranteed to have processed all the operands coming from
100 *outside of that SCC*, so we do not need to do anything special to
101 ensure they have value numbers.
103 Another nice property is that the SCC walk is done as part of a DFS
104 of the SSA graph, which makes it easy to perform combining and
105 simplifying operations at the same time.
107 The code below is deliberately written in a way that makes it easy
108 to separate the SCC walk from the other work it does.
110 In order to propagate constants through the code, we track which
111 expressions contain constants, and use those while folding. In
112 theory, we could also track expressions whose value numbers are
113 replaced, in case we end up folding based on expression
114 identities.
116 In order to value number memory, we assign value numbers to vuses.
117 This enables us to note that, for example, stores to the same
118 address of the same value from the same starting memory states are
119 equivalent.
120 TODO:
122 1. We can iterate only the changing portions of the SCC's, but
123 I have not seen an SCC big enough for this to be a win.
124 2. If you differentiate between phi nodes for loops and phi nodes
125 for if-then-else, you can properly consider phi nodes in different
126 blocks for equivalence.
127 3. We could value number vuses in more cases, particularly, whole
128 structure copies.
132 static tree *last_vuse_ptr;
133 static vn_lookup_kind vn_walk_kind;
134 static vn_lookup_kind default_vn_walk_kind;
135 bitmap const_parms;
137 /* vn_nary_op hashtable helpers. */
139 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
141 typedef vn_nary_op_s *compare_type;
142 static inline hashval_t hash (const vn_nary_op_s *);
143 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
146 /* Return the computed hashcode for nary operation P1. */
148 inline hashval_t
149 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
151 return vno1->hashcode;
154 /* Compare nary operations P1 and P2 and return true if they are
155 equivalent. */
157 inline bool
158 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
160 return vn_nary_op_eq (vno1, vno2);
163 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
164 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
167 /* vn_phi hashtable helpers. */
169 static int
170 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
172 struct vn_phi_hasher : pointer_hash <vn_phi_s>
174 static inline hashval_t hash (const vn_phi_s *);
175 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
176 static inline void remove (vn_phi_s *);
179 /* Return the computed hashcode for phi operation P1. */
181 inline hashval_t
182 vn_phi_hasher::hash (const vn_phi_s *vp1)
184 return vp1->hashcode;
187 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
189 inline bool
190 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
192 return vn_phi_eq (vp1, vp2);
195 /* Free a phi operation structure VP. */
197 inline void
198 vn_phi_hasher::remove (vn_phi_s *phi)
200 phi->phiargs.release ();
203 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
204 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
207 /* Compare two reference operands P1 and P2 for equality. Return true if
208 they are equal, and false otherwise. */
210 static int
211 vn_reference_op_eq (const void *p1, const void *p2)
213 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
214 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
216 return (vro1->opcode == vro2->opcode
217 /* We do not care for differences in type qualification. */
218 && (vro1->type == vro2->type
219 || (vro1->type && vro2->type
220 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
221 TYPE_MAIN_VARIANT (vro2->type))))
222 && expressions_equal_p (vro1->op0, vro2->op0)
223 && expressions_equal_p (vro1->op1, vro2->op1)
224 && expressions_equal_p (vro1->op2, vro2->op2));
227 /* Free a reference operation structure VP. */
229 static inline void
230 free_reference (vn_reference_s *vr)
232 vr->operands.release ();
236 /* vn_reference hashtable helpers. */
238 struct vn_reference_hasher : pointer_hash <vn_reference_s>
240 static inline hashval_t hash (const vn_reference_s *);
241 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
242 static inline void remove (vn_reference_s *);
245 /* Return the hashcode for a given reference operation P1. */
247 inline hashval_t
248 vn_reference_hasher::hash (const vn_reference_s *vr1)
250 return vr1->hashcode;
253 inline bool
254 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
256 return vn_reference_eq (v, c);
259 inline void
260 vn_reference_hasher::remove (vn_reference_s *v)
262 free_reference (v);
265 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
266 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
269 /* The set of hashtables and alloc_pool's for their items. */
271 typedef struct vn_tables_s
273 vn_nary_op_table_type *nary;
274 vn_phi_table_type *phis;
275 vn_reference_table_type *references;
276 struct obstack nary_obstack;
277 object_allocator<vn_phi_s> *phis_pool;
278 object_allocator<vn_reference_s> *references_pool;
279 } *vn_tables_t;
282 /* vn_constant hashtable helpers. */
284 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
286 static inline hashval_t hash (const vn_constant_s *);
287 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
290 /* Hash table hash function for vn_constant_t. */
292 inline hashval_t
293 vn_constant_hasher::hash (const vn_constant_s *vc1)
295 return vc1->hashcode;
298 /* Hash table equality function for vn_constant_t. */
300 inline bool
301 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
303 if (vc1->hashcode != vc2->hashcode)
304 return false;
306 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
309 static hash_table<vn_constant_hasher> *constant_to_value_id;
310 static bitmap constant_value_ids;
313 /* Valid hashtables storing information we have proven to be
314 correct. */
316 static vn_tables_t valid_info;
318 /* Optimistic hashtables storing information we are making assumptions about
319 during iterations. */
321 static vn_tables_t optimistic_info;
323 /* Pointer to the set of hashtables that is currently being used.
324 Should always point to either the optimistic_info, or the
325 valid_info. */
327 static vn_tables_t current_info;
330 /* Reverse post order index for each basic block. */
332 static int *rpo_numbers;
334 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
336 /* Return the SSA value of the VUSE x, supporting released VDEFs
337 during elimination which will value-number the VDEF to the
338 associated VUSE (but not substitute in the whole lattice). */
340 static inline tree
341 vuse_ssa_val (tree x)
343 if (!x)
344 return NULL_TREE;
348 tree tem = SSA_VAL (x);
349 /* stmt walking can walk over a backedge and reach code we didn't
350 value-number yet. */
351 if (tem == VN_TOP)
352 return x;
353 x = tem;
355 while (SSA_NAME_IN_FREE_LIST (x));
357 return x;
360 /* This represents the top of the VN lattice, which is the universal
361 value. */
363 tree VN_TOP;
365 /* Unique counter for our value ids. */
367 static unsigned int next_value_id;
369 /* Next DFS number and the stack for strongly connected component
370 detection. */
372 static unsigned int next_dfs_num;
373 static vec<tree> sccstack;
377 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
378 are allocated on an obstack for locality reasons, and to free them
379 without looping over the vec. */
381 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
382 static struct obstack vn_ssa_aux_obstack;
384 /* Return whether there is value numbering information for a given SSA name. */
386 bool
387 has_VN_INFO (tree name)
389 if (SSA_NAME_VERSION (name) < vn_ssa_aux_table.length ())
390 return vn_ssa_aux_table[SSA_NAME_VERSION (name)] != NULL;
391 return false;
394 /* Return the value numbering information for a given SSA name. */
396 vn_ssa_aux_t
397 VN_INFO (tree name)
399 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
400 gcc_checking_assert (res);
401 return res;
404 /* Set the value numbering info for a given SSA name to a given
405 value. */
407 static inline void
408 VN_INFO_SET (tree name, vn_ssa_aux_t value)
410 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
413 /* Initialize the value numbering info for a given SSA name.
414 This should be called just once for every SSA name. */
416 vn_ssa_aux_t
417 VN_INFO_GET (tree name)
419 vn_ssa_aux_t newinfo;
421 gcc_assert (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ()
422 || vn_ssa_aux_table[SSA_NAME_VERSION (name)] == NULL);
423 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
424 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
425 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
426 vn_ssa_aux_table.safe_grow_cleared (SSA_NAME_VERSION (name) + 1);
427 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
428 return newinfo;
432 /* Return the vn_kind the expression computed by the stmt should be
433 associated with. */
435 enum vn_kind
436 vn_get_stmt_kind (gimple *stmt)
438 switch (gimple_code (stmt))
440 case GIMPLE_CALL:
441 return VN_REFERENCE;
442 case GIMPLE_PHI:
443 return VN_PHI;
444 case GIMPLE_ASSIGN:
446 enum tree_code code = gimple_assign_rhs_code (stmt);
447 tree rhs1 = gimple_assign_rhs1 (stmt);
448 switch (get_gimple_rhs_class (code))
450 case GIMPLE_UNARY_RHS:
451 case GIMPLE_BINARY_RHS:
452 case GIMPLE_TERNARY_RHS:
453 return VN_NARY;
454 case GIMPLE_SINGLE_RHS:
455 switch (TREE_CODE_CLASS (code))
457 case tcc_reference:
458 /* VOP-less references can go through unary case. */
459 if ((code == REALPART_EXPR
460 || code == IMAGPART_EXPR
461 || code == VIEW_CONVERT_EXPR
462 || code == BIT_FIELD_REF)
463 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
464 return VN_NARY;
466 /* Fallthrough. */
467 case tcc_declaration:
468 return VN_REFERENCE;
470 case tcc_constant:
471 return VN_CONSTANT;
473 default:
474 if (code == ADDR_EXPR)
475 return (is_gimple_min_invariant (rhs1)
476 ? VN_CONSTANT : VN_REFERENCE);
477 else if (code == CONSTRUCTOR)
478 return VN_NARY;
479 return VN_NONE;
481 default:
482 return VN_NONE;
485 default:
486 return VN_NONE;
490 /* Lookup a value id for CONSTANT and return it. If it does not
491 exist returns 0. */
493 unsigned int
494 get_constant_value_id (tree constant)
496 vn_constant_s **slot;
497 struct vn_constant_s vc;
499 vc.hashcode = vn_hash_constant_with_type (constant);
500 vc.constant = constant;
501 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
502 if (slot)
503 return (*slot)->value_id;
504 return 0;
507 /* Lookup a value id for CONSTANT, and if it does not exist, create a
508 new one and return it. If it does exist, return it. */
510 unsigned int
511 get_or_alloc_constant_value_id (tree constant)
513 vn_constant_s **slot;
514 struct vn_constant_s vc;
515 vn_constant_t vcp;
517 vc.hashcode = vn_hash_constant_with_type (constant);
518 vc.constant = constant;
519 slot = constant_to_value_id->find_slot (&vc, INSERT);
520 if (*slot)
521 return (*slot)->value_id;
523 vcp = XNEW (struct vn_constant_s);
524 vcp->hashcode = vc.hashcode;
525 vcp->constant = constant;
526 vcp->value_id = get_next_value_id ();
527 *slot = vcp;
528 bitmap_set_bit (constant_value_ids, vcp->value_id);
529 return vcp->value_id;
532 /* Return true if V is a value id for a constant. */
534 bool
535 value_id_constant_p (unsigned int v)
537 return bitmap_bit_p (constant_value_ids, v);
540 /* Compute the hash for a reference operand VRO1. */
542 static void
543 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
545 hstate.add_int (vro1->opcode);
546 if (vro1->op0)
547 inchash::add_expr (vro1->op0, hstate);
548 if (vro1->op1)
549 inchash::add_expr (vro1->op1, hstate);
550 if (vro1->op2)
551 inchash::add_expr (vro1->op2, hstate);
554 /* Compute a hash for the reference operation VR1 and return it. */
556 static hashval_t
557 vn_reference_compute_hash (const vn_reference_t vr1)
559 inchash::hash hstate;
560 hashval_t result;
561 int i;
562 vn_reference_op_t vro;
563 poly_int64 off = -1;
564 bool deref = false;
566 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
568 if (vro->opcode == MEM_REF)
569 deref = true;
570 else if (vro->opcode != ADDR_EXPR)
571 deref = false;
572 if (maybe_ne (vro->off, -1))
574 if (known_eq (off, -1))
575 off = 0;
576 off += vro->off;
578 else
580 if (maybe_ne (off, -1)
581 && maybe_ne (off, 0))
582 hstate.add_poly_int (off);
583 off = -1;
584 if (deref
585 && vro->opcode == ADDR_EXPR)
587 if (vro->op0)
589 tree op = TREE_OPERAND (vro->op0, 0);
590 hstate.add_int (TREE_CODE (op));
591 inchash::add_expr (op, hstate);
594 else
595 vn_reference_op_compute_hash (vro, hstate);
598 result = hstate.end ();
599 /* ??? We would ICE later if we hash instead of adding that in. */
600 if (vr1->vuse)
601 result += SSA_NAME_VERSION (vr1->vuse);
603 return result;
606 /* Return true if reference operations VR1 and VR2 are equivalent. This
607 means they have the same set of operands and vuses. */
609 bool
610 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
612 unsigned i, j;
614 /* Early out if this is not a hash collision. */
615 if (vr1->hashcode != vr2->hashcode)
616 return false;
618 /* The VOP needs to be the same. */
619 if (vr1->vuse != vr2->vuse)
620 return false;
622 /* If the operands are the same we are done. */
623 if (vr1->operands == vr2->operands)
624 return true;
626 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
627 return false;
629 if (INTEGRAL_TYPE_P (vr1->type)
630 && INTEGRAL_TYPE_P (vr2->type))
632 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
633 return false;
635 else if (INTEGRAL_TYPE_P (vr1->type)
636 && (TYPE_PRECISION (vr1->type)
637 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
638 return false;
639 else if (INTEGRAL_TYPE_P (vr2->type)
640 && (TYPE_PRECISION (vr2->type)
641 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
642 return false;
644 i = 0;
645 j = 0;
648 poly_int64 off1 = 0, off2 = 0;
649 vn_reference_op_t vro1, vro2;
650 vn_reference_op_s tem1, tem2;
651 bool deref1 = false, deref2 = false;
652 for (; vr1->operands.iterate (i, &vro1); i++)
654 if (vro1->opcode == MEM_REF)
655 deref1 = true;
656 /* Do not look through a storage order barrier. */
657 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
658 return false;
659 if (known_eq (vro1->off, -1))
660 break;
661 off1 += vro1->off;
663 for (; vr2->operands.iterate (j, &vro2); j++)
665 if (vro2->opcode == MEM_REF)
666 deref2 = true;
667 /* Do not look through a storage order barrier. */
668 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
669 return false;
670 if (known_eq (vro2->off, -1))
671 break;
672 off2 += vro2->off;
674 if (maybe_ne (off1, off2))
675 return false;
676 if (deref1 && vro1->opcode == ADDR_EXPR)
678 memset (&tem1, 0, sizeof (tem1));
679 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
680 tem1.type = TREE_TYPE (tem1.op0);
681 tem1.opcode = TREE_CODE (tem1.op0);
682 vro1 = &tem1;
683 deref1 = false;
685 if (deref2 && vro2->opcode == ADDR_EXPR)
687 memset (&tem2, 0, sizeof (tem2));
688 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
689 tem2.type = TREE_TYPE (tem2.op0);
690 tem2.opcode = TREE_CODE (tem2.op0);
691 vro2 = &tem2;
692 deref2 = false;
694 if (deref1 != deref2)
695 return false;
696 if (!vn_reference_op_eq (vro1, vro2))
697 return false;
698 ++j;
699 ++i;
701 while (vr1->operands.length () != i
702 || vr2->operands.length () != j);
704 return true;
707 /* Copy the operations present in load/store REF into RESULT, a vector of
708 vn_reference_op_s's. */
710 static void
711 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
713 if (TREE_CODE (ref) == TARGET_MEM_REF)
715 vn_reference_op_s temp;
717 result->reserve (3);
719 memset (&temp, 0, sizeof (temp));
720 temp.type = TREE_TYPE (ref);
721 temp.opcode = TREE_CODE (ref);
722 temp.op0 = TMR_INDEX (ref);
723 temp.op1 = TMR_STEP (ref);
724 temp.op2 = TMR_OFFSET (ref);
725 temp.off = -1;
726 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
727 temp.base = MR_DEPENDENCE_BASE (ref);
728 result->quick_push (temp);
730 memset (&temp, 0, sizeof (temp));
731 temp.type = NULL_TREE;
732 temp.opcode = ERROR_MARK;
733 temp.op0 = TMR_INDEX2 (ref);
734 temp.off = -1;
735 result->quick_push (temp);
737 memset (&temp, 0, sizeof (temp));
738 temp.type = NULL_TREE;
739 temp.opcode = TREE_CODE (TMR_BASE (ref));
740 temp.op0 = TMR_BASE (ref);
741 temp.off = -1;
742 result->quick_push (temp);
743 return;
746 /* For non-calls, store the information that makes up the address. */
747 tree orig = ref;
748 while (ref)
750 vn_reference_op_s temp;
752 memset (&temp, 0, sizeof (temp));
753 temp.type = TREE_TYPE (ref);
754 temp.opcode = TREE_CODE (ref);
755 temp.off = -1;
757 switch (temp.opcode)
759 case MODIFY_EXPR:
760 temp.op0 = TREE_OPERAND (ref, 1);
761 break;
762 case WITH_SIZE_EXPR:
763 temp.op0 = TREE_OPERAND (ref, 1);
764 temp.off = 0;
765 break;
766 case MEM_REF:
767 /* The base address gets its own vn_reference_op_s structure. */
768 temp.op0 = TREE_OPERAND (ref, 1);
770 offset_int off = mem_ref_offset (ref);
771 if (wi::fits_shwi_p (off))
772 temp.off = off.to_shwi ();
774 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
775 temp.base = MR_DEPENDENCE_BASE (ref);
776 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
777 break;
778 case BIT_FIELD_REF:
779 /* Record bits, position and storage order. */
780 temp.op0 = TREE_OPERAND (ref, 1);
781 temp.op1 = TREE_OPERAND (ref, 2);
782 if (tree_fits_shwi_p (TREE_OPERAND (ref, 2)))
784 HOST_WIDE_INT off = tree_to_shwi (TREE_OPERAND (ref, 2));
785 if (off % BITS_PER_UNIT == 0)
786 temp.off = off / BITS_PER_UNIT;
788 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
789 break;
790 case COMPONENT_REF:
791 /* The field decl is enough to unambiguously specify the field,
792 a matching type is not necessary and a mismatching type
793 is always a spurious difference. */
794 temp.type = NULL_TREE;
795 temp.op0 = TREE_OPERAND (ref, 1);
796 temp.op1 = TREE_OPERAND (ref, 2);
798 tree this_offset = component_ref_field_offset (ref);
799 if (this_offset
800 && poly_int_tree_p (this_offset))
802 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
803 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
805 poly_offset_int off
806 = (wi::to_poly_offset (this_offset)
807 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
808 /* Probibit value-numbering zero offset components
809 of addresses the same before the pass folding
810 __builtin_object_size had a chance to run
811 (checking cfun->after_inlining does the
812 trick here). */
813 if (TREE_CODE (orig) != ADDR_EXPR
814 || maybe_ne (off, 0)
815 || cfun->after_inlining)
816 off.to_shwi (&temp.off);
820 break;
821 case ARRAY_RANGE_REF:
822 case ARRAY_REF:
824 tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
825 /* Record index as operand. */
826 temp.op0 = TREE_OPERAND (ref, 1);
827 /* Always record lower bounds and element size. */
828 temp.op1 = array_ref_low_bound (ref);
829 /* But record element size in units of the type alignment. */
830 temp.op2 = TREE_OPERAND (ref, 3);
831 temp.align = eltype->type_common.align;
832 if (! temp.op2)
833 temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
834 size_int (TYPE_ALIGN_UNIT (eltype)));
835 if (poly_int_tree_p (temp.op0)
836 && poly_int_tree_p (temp.op1)
837 && TREE_CODE (temp.op2) == INTEGER_CST)
839 poly_offset_int off = ((wi::to_poly_offset (temp.op0)
840 - wi::to_poly_offset (temp.op1))
841 * wi::to_offset (temp.op2)
842 * vn_ref_op_align_unit (&temp));
843 off.to_shwi (&temp.off);
846 break;
847 case VAR_DECL:
848 if (DECL_HARD_REGISTER (ref))
850 temp.op0 = ref;
851 break;
853 /* Fallthru. */
854 case PARM_DECL:
855 case CONST_DECL:
856 case RESULT_DECL:
857 /* Canonicalize decls to MEM[&decl] which is what we end up with
858 when valueizing MEM[ptr] with ptr = &decl. */
859 temp.opcode = MEM_REF;
860 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
861 temp.off = 0;
862 result->safe_push (temp);
863 temp.opcode = ADDR_EXPR;
864 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
865 temp.type = TREE_TYPE (temp.op0);
866 temp.off = -1;
867 break;
868 case STRING_CST:
869 case INTEGER_CST:
870 case COMPLEX_CST:
871 case VECTOR_CST:
872 case REAL_CST:
873 case FIXED_CST:
874 case CONSTRUCTOR:
875 case SSA_NAME:
876 temp.op0 = ref;
877 break;
878 case ADDR_EXPR:
879 if (is_gimple_min_invariant (ref))
881 temp.op0 = ref;
882 break;
884 break;
885 /* These are only interesting for their operands, their
886 existence, and their type. They will never be the last
887 ref in the chain of references (IE they require an
888 operand), so we don't have to put anything
889 for op* as it will be handled by the iteration */
890 case REALPART_EXPR:
891 temp.off = 0;
892 break;
893 case VIEW_CONVERT_EXPR:
894 temp.off = 0;
895 temp.reverse = storage_order_barrier_p (ref);
896 break;
897 case IMAGPART_EXPR:
898 /* This is only interesting for its constant offset. */
899 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
900 break;
901 default:
902 gcc_unreachable ();
904 result->safe_push (temp);
906 if (REFERENCE_CLASS_P (ref)
907 || TREE_CODE (ref) == MODIFY_EXPR
908 || TREE_CODE (ref) == WITH_SIZE_EXPR
909 || (TREE_CODE (ref) == ADDR_EXPR
910 && !is_gimple_min_invariant (ref)))
911 ref = TREE_OPERAND (ref, 0);
912 else
913 ref = NULL_TREE;
917 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
918 operands in *OPS, the reference alias set SET and the reference type TYPE.
919 Return true if something useful was produced. */
921 bool
922 ao_ref_init_from_vn_reference (ao_ref *ref,
923 alias_set_type set, tree type,
924 vec<vn_reference_op_s> ops)
926 vn_reference_op_t op;
927 unsigned i;
928 tree base = NULL_TREE;
929 tree *op0_p = &base;
930 poly_offset_int offset = 0;
931 poly_offset_int max_size;
932 poly_offset_int size = -1;
933 tree size_tree = NULL_TREE;
934 alias_set_type base_alias_set = -1;
936 /* First get the final access size from just the outermost expression. */
937 op = &ops[0];
938 if (op->opcode == COMPONENT_REF)
939 size_tree = DECL_SIZE (op->op0);
940 else if (op->opcode == BIT_FIELD_REF)
941 size_tree = op->op0;
942 else
944 machine_mode mode = TYPE_MODE (type);
945 if (mode == BLKmode)
946 size_tree = TYPE_SIZE (type);
947 else
948 size = GET_MODE_BITSIZE (mode);
950 if (size_tree != NULL_TREE
951 && poly_int_tree_p (size_tree))
952 size = wi::to_poly_offset (size_tree);
954 /* Initially, maxsize is the same as the accessed element size.
955 In the following it will only grow (or become -1). */
956 max_size = size;
958 /* Compute cumulative bit-offset for nested component-refs and array-refs,
959 and find the ultimate containing object. */
960 FOR_EACH_VEC_ELT (ops, i, op)
962 switch (op->opcode)
964 /* These may be in the reference ops, but we cannot do anything
965 sensible with them here. */
966 case ADDR_EXPR:
967 /* Apart from ADDR_EXPR arguments to MEM_REF. */
968 if (base != NULL_TREE
969 && TREE_CODE (base) == MEM_REF
970 && op->op0
971 && DECL_P (TREE_OPERAND (op->op0, 0)))
973 vn_reference_op_t pop = &ops[i-1];
974 base = TREE_OPERAND (op->op0, 0);
975 if (known_eq (pop->off, -1))
977 max_size = -1;
978 offset = 0;
980 else
981 offset += pop->off * BITS_PER_UNIT;
982 op0_p = NULL;
983 break;
985 /* Fallthru. */
986 case CALL_EXPR:
987 return false;
989 /* Record the base objects. */
990 case MEM_REF:
991 base_alias_set = get_deref_alias_set (op->op0);
992 *op0_p = build2 (MEM_REF, op->type,
993 NULL_TREE, op->op0);
994 MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
995 MR_DEPENDENCE_BASE (*op0_p) = op->base;
996 op0_p = &TREE_OPERAND (*op0_p, 0);
997 break;
999 case VAR_DECL:
1000 case PARM_DECL:
1001 case RESULT_DECL:
1002 case SSA_NAME:
1003 *op0_p = op->op0;
1004 op0_p = NULL;
1005 break;
1007 /* And now the usual component-reference style ops. */
1008 case BIT_FIELD_REF:
1009 offset += wi::to_offset (op->op1);
1010 break;
1012 case COMPONENT_REF:
1014 tree field = op->op0;
1015 /* We do not have a complete COMPONENT_REF tree here so we
1016 cannot use component_ref_field_offset. Do the interesting
1017 parts manually. */
1018 tree this_offset = DECL_FIELD_OFFSET (field);
1020 if (op->op1 || !poly_int_tree_p (this_offset))
1021 max_size = -1;
1022 else
1024 poly_offset_int woffset = (wi::to_poly_offset (this_offset)
1025 << LOG2_BITS_PER_UNIT);
1026 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1027 offset += woffset;
1029 break;
1032 case ARRAY_RANGE_REF:
1033 case ARRAY_REF:
1034 /* We recorded the lower bound and the element size. */
1035 if (!poly_int_tree_p (op->op0)
1036 || !poly_int_tree_p (op->op1)
1037 || TREE_CODE (op->op2) != INTEGER_CST)
1038 max_size = -1;
1039 else
1041 poly_offset_int woffset
1042 = wi::sext (wi::to_poly_offset (op->op0)
1043 - wi::to_poly_offset (op->op1),
1044 TYPE_PRECISION (TREE_TYPE (op->op0)));
1045 woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
1046 woffset <<= LOG2_BITS_PER_UNIT;
1047 offset += woffset;
1049 break;
1051 case REALPART_EXPR:
1052 break;
1054 case IMAGPART_EXPR:
1055 offset += size;
1056 break;
1058 case VIEW_CONVERT_EXPR:
1059 break;
1061 case STRING_CST:
1062 case INTEGER_CST:
1063 case COMPLEX_CST:
1064 case VECTOR_CST:
1065 case REAL_CST:
1066 case CONSTRUCTOR:
1067 case CONST_DECL:
1068 return false;
1070 default:
1071 return false;
1075 if (base == NULL_TREE)
1076 return false;
1078 ref->ref = NULL_TREE;
1079 ref->base = base;
1080 ref->ref_alias_set = set;
1081 if (base_alias_set != -1)
1082 ref->base_alias_set = base_alias_set;
1083 else
1084 ref->base_alias_set = get_alias_set (base);
1085 /* We discount volatiles from value-numbering elsewhere. */
1086 ref->volatile_p = false;
1088 if (!size.to_shwi (&ref->size) || maybe_lt (ref->size, 0))
1090 ref->offset = 0;
1091 ref->size = -1;
1092 ref->max_size = -1;
1093 return true;
1096 if (!offset.to_shwi (&ref->offset))
1098 ref->offset = 0;
1099 ref->max_size = -1;
1100 return true;
1103 if (!max_size.to_shwi (&ref->max_size) || maybe_lt (ref->max_size, 0))
1104 ref->max_size = -1;
1106 return true;
1109 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1110 vn_reference_op_s's. */
1112 static void
1113 copy_reference_ops_from_call (gcall *call,
1114 vec<vn_reference_op_s> *result)
1116 vn_reference_op_s temp;
1117 unsigned i;
1118 tree lhs = gimple_call_lhs (call);
1119 int lr;
1121 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1122 different. By adding the lhs here in the vector, we ensure that the
1123 hashcode is different, guaranteeing a different value number. */
1124 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1126 memset (&temp, 0, sizeof (temp));
1127 temp.opcode = MODIFY_EXPR;
1128 temp.type = TREE_TYPE (lhs);
1129 temp.op0 = lhs;
1130 temp.off = -1;
1131 result->safe_push (temp);
1134 /* Copy the type, opcode, function, static chain and EH region, if any. */
1135 memset (&temp, 0, sizeof (temp));
1136 temp.type = gimple_call_return_type (call);
1137 temp.opcode = CALL_EXPR;
1138 temp.op0 = gimple_call_fn (call);
1139 temp.op1 = gimple_call_chain (call);
1140 if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1141 temp.op2 = size_int (lr);
1142 temp.off = -1;
1143 if (gimple_call_with_bounds_p (call))
1144 temp.with_bounds = 1;
1145 result->safe_push (temp);
1147 /* Copy the call arguments. As they can be references as well,
1148 just chain them together. */
1149 for (i = 0; i < gimple_call_num_args (call); ++i)
1151 tree callarg = gimple_call_arg (call, i);
1152 copy_reference_ops_from_ref (callarg, result);
1156 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1157 *I_P to point to the last element of the replacement. */
1158 static bool
1159 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1160 unsigned int *i_p)
1162 unsigned int i = *i_p;
1163 vn_reference_op_t op = &(*ops)[i];
1164 vn_reference_op_t mem_op = &(*ops)[i - 1];
1165 tree addr_base;
1166 poly_int64 addr_offset = 0;
1168 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1169 from .foo.bar to the preceding MEM_REF offset and replace the
1170 address with &OBJ. */
1171 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1172 &addr_offset);
1173 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1174 if (addr_base != TREE_OPERAND (op->op0, 0))
1176 poly_offset_int off
1177 = (poly_offset_int::from (wi::to_poly_wide (mem_op->op0),
1178 SIGNED)
1179 + addr_offset);
1180 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1181 op->op0 = build_fold_addr_expr (addr_base);
1182 if (tree_fits_shwi_p (mem_op->op0))
1183 mem_op->off = tree_to_shwi (mem_op->op0);
1184 else
1185 mem_op->off = -1;
1186 return true;
1188 return false;
1191 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1192 *I_P to point to the last element of the replacement. */
1193 static bool
1194 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1195 unsigned int *i_p)
1197 unsigned int i = *i_p;
1198 vn_reference_op_t op = &(*ops)[i];
1199 vn_reference_op_t mem_op = &(*ops)[i - 1];
1200 gimple *def_stmt;
1201 enum tree_code code;
1202 poly_offset_int off;
1204 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1205 if (!is_gimple_assign (def_stmt))
1206 return false;
1208 code = gimple_assign_rhs_code (def_stmt);
1209 if (code != ADDR_EXPR
1210 && code != POINTER_PLUS_EXPR)
1211 return false;
1213 off = poly_offset_int::from (wi::to_poly_wide (mem_op->op0), SIGNED);
1215 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1216 from .foo.bar to the preceding MEM_REF offset and replace the
1217 address with &OBJ. */
1218 if (code == ADDR_EXPR)
1220 tree addr, addr_base;
1221 poly_int64 addr_offset;
1223 addr = gimple_assign_rhs1 (def_stmt);
1224 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1225 &addr_offset);
1226 /* If that didn't work because the address isn't invariant propagate
1227 the reference tree from the address operation in case the current
1228 dereference isn't offsetted. */
1229 if (!addr_base
1230 && *i_p == ops->length () - 1
1231 && known_eq (off, 0)
1232 /* This makes us disable this transform for PRE where the
1233 reference ops might be also used for code insertion which
1234 is invalid. */
1235 && default_vn_walk_kind == VN_WALKREWRITE)
1237 auto_vec<vn_reference_op_s, 32> tem;
1238 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1239 /* Make sure to preserve TBAA info. The only objects not
1240 wrapped in MEM_REFs that can have their address taken are
1241 STRING_CSTs. */
1242 if (tem.length () >= 2
1243 && tem[tem.length () - 2].opcode == MEM_REF)
1245 vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1246 new_mem_op->op0
1247 = wide_int_to_tree (TREE_TYPE (mem_op->op0),
1248 wi::to_poly_wide (new_mem_op->op0));
1250 else
1251 gcc_assert (tem.last ().opcode == STRING_CST);
1252 ops->pop ();
1253 ops->pop ();
1254 ops->safe_splice (tem);
1255 --*i_p;
1256 return true;
1258 if (!addr_base
1259 || TREE_CODE (addr_base) != MEM_REF)
1260 return false;
1262 off += addr_offset;
1263 off += mem_ref_offset (addr_base);
1264 op->op0 = TREE_OPERAND (addr_base, 0);
1266 else
1268 tree ptr, ptroff;
1269 ptr = gimple_assign_rhs1 (def_stmt);
1270 ptroff = gimple_assign_rhs2 (def_stmt);
1271 if (TREE_CODE (ptr) != SSA_NAME
1272 || TREE_CODE (ptroff) != INTEGER_CST)
1273 return false;
1275 off += wi::to_offset (ptroff);
1276 op->op0 = ptr;
1279 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1280 if (tree_fits_shwi_p (mem_op->op0))
1281 mem_op->off = tree_to_shwi (mem_op->op0);
1282 else
1283 mem_op->off = -1;
1284 if (TREE_CODE (op->op0) == SSA_NAME)
1285 op->op0 = SSA_VAL (op->op0);
1286 if (TREE_CODE (op->op0) != SSA_NAME)
1287 op->opcode = TREE_CODE (op->op0);
1289 /* And recurse. */
1290 if (TREE_CODE (op->op0) == SSA_NAME)
1291 vn_reference_maybe_forwprop_address (ops, i_p);
1292 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1293 vn_reference_fold_indirect (ops, i_p);
1294 return true;
1297 /* Optimize the reference REF to a constant if possible or return
1298 NULL_TREE if not. */
1300 tree
1301 fully_constant_vn_reference_p (vn_reference_t ref)
1303 vec<vn_reference_op_s> operands = ref->operands;
1304 vn_reference_op_t op;
1306 /* Try to simplify the translated expression if it is
1307 a call to a builtin function with at most two arguments. */
1308 op = &operands[0];
1309 if (op->opcode == CALL_EXPR
1310 && TREE_CODE (op->op0) == ADDR_EXPR
1311 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1312 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1313 && operands.length () >= 2
1314 && operands.length () <= 3)
1316 vn_reference_op_t arg0, arg1 = NULL;
1317 bool anyconst = false;
1318 arg0 = &operands[1];
1319 if (operands.length () > 2)
1320 arg1 = &operands[2];
1321 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1322 || (arg0->opcode == ADDR_EXPR
1323 && is_gimple_min_invariant (arg0->op0)))
1324 anyconst = true;
1325 if (arg1
1326 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1327 || (arg1->opcode == ADDR_EXPR
1328 && is_gimple_min_invariant (arg1->op0))))
1329 anyconst = true;
1330 if (anyconst)
1332 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1333 arg1 ? 2 : 1,
1334 arg0->op0,
1335 arg1 ? arg1->op0 : NULL);
1336 if (folded
1337 && TREE_CODE (folded) == NOP_EXPR)
1338 folded = TREE_OPERAND (folded, 0);
1339 if (folded
1340 && is_gimple_min_invariant (folded))
1341 return folded;
1345 /* Simplify reads from constants or constant initializers. */
1346 else if (BITS_PER_UNIT == 8
1347 && is_gimple_reg_type (ref->type)
1348 && (!INTEGRAL_TYPE_P (ref->type)
1349 || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
1351 poly_int64 off = 0;
1352 HOST_WIDE_INT size;
1353 if (INTEGRAL_TYPE_P (ref->type))
1354 size = TYPE_PRECISION (ref->type);
1355 else
1356 size = tree_to_shwi (TYPE_SIZE (ref->type));
1357 if (size % BITS_PER_UNIT != 0
1358 || size > MAX_BITSIZE_MODE_ANY_MODE)
1359 return NULL_TREE;
1360 size /= BITS_PER_UNIT;
1361 unsigned i;
1362 for (i = 0; i < operands.length (); ++i)
1364 if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1366 ++i;
1367 break;
1369 if (known_eq (operands[i].off, -1))
1370 return NULL_TREE;
1371 off += operands[i].off;
1372 if (operands[i].opcode == MEM_REF)
1374 ++i;
1375 break;
1378 vn_reference_op_t base = &operands[--i];
1379 tree ctor = error_mark_node;
1380 tree decl = NULL_TREE;
1381 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1382 ctor = base->op0;
1383 else if (base->opcode == MEM_REF
1384 && base[1].opcode == ADDR_EXPR
1385 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1386 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL
1387 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == STRING_CST))
1389 decl = TREE_OPERAND (base[1].op0, 0);
1390 if (TREE_CODE (decl) == STRING_CST)
1391 ctor = decl;
1392 else
1393 ctor = ctor_for_folding (decl);
1395 if (ctor == NULL_TREE)
1396 return build_zero_cst (ref->type);
1397 else if (ctor != error_mark_node)
1399 HOST_WIDE_INT const_off;
1400 if (decl)
1402 tree res = fold_ctor_reference (ref->type, ctor,
1403 off * BITS_PER_UNIT,
1404 size * BITS_PER_UNIT, decl);
1405 if (res)
1407 STRIP_USELESS_TYPE_CONVERSION (res);
1408 if (is_gimple_min_invariant (res))
1409 return res;
1412 else if (off.is_constant (&const_off))
1414 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1415 int len = native_encode_expr (ctor, buf, size, const_off);
1416 if (len > 0)
1417 return native_interpret_expr (ref->type, buf, len);
1422 return NULL_TREE;
1425 /* Return true if OPS contain a storage order barrier. */
1427 static bool
1428 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1430 vn_reference_op_t op;
1431 unsigned i;
1433 FOR_EACH_VEC_ELT (ops, i, op)
1434 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1435 return true;
1437 return false;
1440 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1441 structures into their value numbers. This is done in-place, and
1442 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1443 whether any operands were valueized. */
1445 static vec<vn_reference_op_s>
1446 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1448 vn_reference_op_t vro;
1449 unsigned int i;
1451 *valueized_anything = false;
1453 FOR_EACH_VEC_ELT (orig, i, vro)
1455 if (vro->opcode == SSA_NAME
1456 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1458 tree tem = SSA_VAL (vro->op0);
1459 if (tem != vro->op0)
1461 *valueized_anything = true;
1462 vro->op0 = tem;
1464 /* If it transforms from an SSA_NAME to a constant, update
1465 the opcode. */
1466 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1467 vro->opcode = TREE_CODE (vro->op0);
1469 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1471 tree tem = SSA_VAL (vro->op1);
1472 if (tem != vro->op1)
1474 *valueized_anything = true;
1475 vro->op1 = tem;
1478 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1480 tree tem = SSA_VAL (vro->op2);
1481 if (tem != vro->op2)
1483 *valueized_anything = true;
1484 vro->op2 = tem;
1487 /* If it transforms from an SSA_NAME to an address, fold with
1488 a preceding indirect reference. */
1489 if (i > 0
1490 && vro->op0
1491 && TREE_CODE (vro->op0) == ADDR_EXPR
1492 && orig[i - 1].opcode == MEM_REF)
1494 if (vn_reference_fold_indirect (&orig, &i))
1495 *valueized_anything = true;
1497 else if (i > 0
1498 && vro->opcode == SSA_NAME
1499 && orig[i - 1].opcode == MEM_REF)
1501 if (vn_reference_maybe_forwprop_address (&orig, &i))
1502 *valueized_anything = true;
1504 /* If it transforms a non-constant ARRAY_REF into a constant
1505 one, adjust the constant offset. */
1506 else if (vro->opcode == ARRAY_REF
1507 && known_eq (vro->off, -1)
1508 && poly_int_tree_p (vro->op0)
1509 && poly_int_tree_p (vro->op1)
1510 && TREE_CODE (vro->op2) == INTEGER_CST)
1512 poly_offset_int off = ((wi::to_poly_offset (vro->op0)
1513 - wi::to_poly_offset (vro->op1))
1514 * wi::to_offset (vro->op2)
1515 * vn_ref_op_align_unit (vro));
1516 off.to_shwi (&vro->off);
1520 return orig;
1523 static vec<vn_reference_op_s>
1524 valueize_refs (vec<vn_reference_op_s> orig)
1526 bool tem;
1527 return valueize_refs_1 (orig, &tem);
1530 static vec<vn_reference_op_s> shared_lookup_references;
1532 /* Create a vector of vn_reference_op_s structures from REF, a
1533 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1534 this function. *VALUEIZED_ANYTHING will specify whether any
1535 operands were valueized. */
1537 static vec<vn_reference_op_s>
1538 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1540 if (!ref)
1541 return vNULL;
1542 shared_lookup_references.truncate (0);
1543 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1544 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1545 valueized_anything);
1546 return shared_lookup_references;
1549 /* Create a vector of vn_reference_op_s structures from CALL, a
1550 call statement. The vector is shared among all callers of
1551 this function. */
1553 static vec<vn_reference_op_s>
1554 valueize_shared_reference_ops_from_call (gcall *call)
1556 if (!call)
1557 return vNULL;
1558 shared_lookup_references.truncate (0);
1559 copy_reference_ops_from_call (call, &shared_lookup_references);
1560 shared_lookup_references = valueize_refs (shared_lookup_references);
1561 return shared_lookup_references;
1564 /* Lookup a SCCVN reference operation VR in the current hash table.
1565 Returns the resulting value number if it exists in the hash table,
1566 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1567 vn_reference_t stored in the hashtable if something is found. */
1569 static tree
1570 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1572 vn_reference_s **slot;
1573 hashval_t hash;
1575 hash = vr->hashcode;
1576 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1577 if (!slot && current_info == optimistic_info)
1578 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1579 if (slot)
1581 if (vnresult)
1582 *vnresult = (vn_reference_t)*slot;
1583 return ((vn_reference_t)*slot)->result;
1586 return NULL_TREE;
1589 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1590 with the current VUSE and performs the expression lookup. */
1592 static void *
1593 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1594 unsigned int cnt, void *vr_)
1596 vn_reference_t vr = (vn_reference_t)vr_;
1597 vn_reference_s **slot;
1598 hashval_t hash;
1600 /* This bounds the stmt walks we perform on reference lookups
1601 to O(1) instead of O(N) where N is the number of dominating
1602 stores. */
1603 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1604 return (void *)-1;
1606 if (last_vuse_ptr)
1607 *last_vuse_ptr = vuse;
1609 /* Fixup vuse and hash. */
1610 if (vr->vuse)
1611 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1612 vr->vuse = vuse_ssa_val (vuse);
1613 if (vr->vuse)
1614 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1616 hash = vr->hashcode;
1617 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1618 if (!slot && current_info == optimistic_info)
1619 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1620 if (slot)
1621 return *slot;
1623 return NULL;
1626 /* Lookup an existing or insert a new vn_reference entry into the
1627 value table for the VUSE, SET, TYPE, OPERANDS reference which
1628 has the value VALUE which is either a constant or an SSA name. */
1630 static vn_reference_t
1631 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1632 alias_set_type set,
1633 tree type,
1634 vec<vn_reference_op_s,
1635 va_heap> operands,
1636 tree value)
1638 vn_reference_s vr1;
1639 vn_reference_t result;
1640 unsigned value_id;
1641 vr1.vuse = vuse;
1642 vr1.operands = operands;
1643 vr1.type = type;
1644 vr1.set = set;
1645 vr1.hashcode = vn_reference_compute_hash (&vr1);
1646 if (vn_reference_lookup_1 (&vr1, &result))
1647 return result;
1648 if (TREE_CODE (value) == SSA_NAME)
1649 value_id = VN_INFO (value)->value_id;
1650 else
1651 value_id = get_or_alloc_constant_value_id (value);
1652 return vn_reference_insert_pieces (vuse, set, type,
1653 operands.copy (), value, value_id);
1656 static vn_nary_op_t vn_nary_op_insert_stmt (gimple *stmt, tree result);
1657 static unsigned mprts_hook_cnt;
1659 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
1661 static tree
1662 vn_lookup_simplify_result (code_helper rcode, tree type, tree *ops_)
1664 if (!rcode.is_tree_code ())
1665 return NULL_TREE;
1666 tree *ops = ops_;
1667 unsigned int length = TREE_CODE_LENGTH ((tree_code) rcode);
1668 if (rcode == CONSTRUCTOR
1669 /* ??? We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
1670 and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree. */
1671 && TREE_CODE (ops_[0]) == CONSTRUCTOR)
1673 length = CONSTRUCTOR_NELTS (ops_[0]);
1674 ops = XALLOCAVEC (tree, length);
1675 for (unsigned i = 0; i < length; ++i)
1676 ops[i] = CONSTRUCTOR_ELT (ops_[0], i)->value;
1678 vn_nary_op_t vnresult = NULL;
1679 tree res = vn_nary_op_lookup_pieces (length, (tree_code) rcode,
1680 type, ops, &vnresult);
1681 /* We can end up endlessly recursing simplifications if the lookup above
1682 presents us with a def-use chain that mirrors the original simplification.
1683 See PR80887 for an example. Limit successful lookup artificially
1684 to 10 times if we are called as mprts_hook. */
1685 if (res
1686 && mprts_hook
1687 && --mprts_hook_cnt == 0)
1689 if (dump_file && (dump_flags & TDF_DETAILS))
1690 fprintf (dump_file, "Resetting mprts_hook after too many "
1691 "invocations.\n");
1692 mprts_hook = NULL;
1694 return res;
1697 /* Return a value-number for RCODE OPS... either by looking up an existing
1698 value-number for the simplified result or by inserting the operation if
1699 INSERT is true. */
1701 static tree
1702 vn_nary_build_or_lookup_1 (code_helper rcode, tree type, tree *ops,
1703 bool insert)
1705 tree result = NULL_TREE;
1706 /* We will be creating a value number for
1707 RCODE (OPS...).
1708 So first simplify and lookup this expression to see if it
1709 is already available. */
1710 mprts_hook = vn_lookup_simplify_result;
1711 mprts_hook_cnt = 9;
1712 bool res = false;
1713 switch (TREE_CODE_LENGTH ((tree_code) rcode))
1715 case 1:
1716 res = gimple_resimplify1 (NULL, &rcode, type, ops, vn_valueize);
1717 break;
1718 case 2:
1719 res = gimple_resimplify2 (NULL, &rcode, type, ops, vn_valueize);
1720 break;
1721 case 3:
1722 res = gimple_resimplify3 (NULL, &rcode, type, ops, vn_valueize);
1723 break;
1725 mprts_hook = NULL;
1726 gimple *new_stmt = NULL;
1727 if (res
1728 && gimple_simplified_result_is_gimple_val (rcode, ops))
1729 /* The expression is already available. */
1730 result = ops[0];
1731 else
1733 tree val = vn_lookup_simplify_result (rcode, type, ops);
1734 if (!val && insert)
1736 gimple_seq stmts = NULL;
1737 result = maybe_push_res_to_seq (rcode, type, ops, &stmts);
1738 if (result)
1740 gcc_assert (gimple_seq_singleton_p (stmts));
1741 new_stmt = gimple_seq_first_stmt (stmts);
1744 else
1745 /* The expression is already available. */
1746 result = val;
1748 if (new_stmt)
1750 /* The expression is not yet available, value-number lhs to
1751 the new SSA_NAME we created. */
1752 /* Initialize value-number information properly. */
1753 VN_INFO_GET (result)->valnum = result;
1754 VN_INFO (result)->value_id = get_next_value_id ();
1755 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
1756 new_stmt);
1757 VN_INFO (result)->needs_insertion = true;
1758 /* ??? PRE phi-translation inserts NARYs without corresponding
1759 SSA name result. Re-use those but set their result according
1760 to the stmt we just built. */
1761 vn_nary_op_t nary = NULL;
1762 vn_nary_op_lookup_stmt (new_stmt, &nary);
1763 if (nary)
1765 gcc_assert (nary->result == NULL_TREE);
1766 nary->result = gimple_assign_lhs (new_stmt);
1768 /* As all "inserted" statements are singleton SCCs, insert
1769 to the valid table. This is strictly needed to
1770 avoid re-generating new value SSA_NAMEs for the same
1771 expression during SCC iteration over and over (the
1772 optimistic table gets cleared after each iteration).
1773 We do not need to insert into the optimistic table, as
1774 lookups there will fall back to the valid table. */
1775 else if (current_info == optimistic_info)
1777 current_info = valid_info;
1778 vn_nary_op_insert_stmt (new_stmt, result);
1779 current_info = optimistic_info;
1781 else
1782 vn_nary_op_insert_stmt (new_stmt, result);
1783 if (dump_file && (dump_flags & TDF_DETAILS))
1785 fprintf (dump_file, "Inserting name ");
1786 print_generic_expr (dump_file, result);
1787 fprintf (dump_file, " for expression ");
1788 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
1789 fprintf (dump_file, "\n");
1792 return result;
1795 /* Return a value-number for RCODE OPS... either by looking up an existing
1796 value-number for the simplified result or by inserting the operation. */
1798 static tree
1799 vn_nary_build_or_lookup (code_helper rcode, tree type, tree *ops)
1801 return vn_nary_build_or_lookup_1 (rcode, type, ops, true);
1804 /* Try to simplify the expression RCODE OPS... of type TYPE and return
1805 its value if present. */
1807 tree
1808 vn_nary_simplify (vn_nary_op_t nary)
1810 if (nary->length > 3)
1811 return NULL_TREE;
1812 tree ops[3];
1813 memcpy (ops, nary->op, sizeof (tree) * nary->length);
1814 return vn_nary_build_or_lookup_1 (nary->opcode, nary->type, ops, false);
1818 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1819 from the statement defining VUSE and if not successful tries to
1820 translate *REFP and VR_ through an aggregate copy at the definition
1821 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
1822 of *REF and *VR. If only disambiguation was performed then
1823 *DISAMBIGUATE_ONLY is set to true. */
1825 static void *
1826 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1827 bool *disambiguate_only)
1829 vn_reference_t vr = (vn_reference_t)vr_;
1830 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
1831 tree base = ao_ref_base (ref);
1832 HOST_WIDE_INT offseti, maxsizei;
1833 static vec<vn_reference_op_s> lhs_ops;
1834 ao_ref lhs_ref;
1835 bool lhs_ref_ok = false;
1836 poly_int64 copy_size;
1838 /* If the reference is based on a parameter that was determined as
1839 pointing to readonly memory it doesn't change. */
1840 if (TREE_CODE (base) == MEM_REF
1841 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
1842 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
1843 && bitmap_bit_p (const_parms,
1844 SSA_NAME_VERSION (TREE_OPERAND (base, 0))))
1846 *disambiguate_only = true;
1847 return NULL;
1850 /* First try to disambiguate after value-replacing in the definitions LHS. */
1851 if (is_gimple_assign (def_stmt))
1853 tree lhs = gimple_assign_lhs (def_stmt);
1854 bool valueized_anything = false;
1855 /* Avoid re-allocation overhead. */
1856 lhs_ops.truncate (0);
1857 copy_reference_ops_from_ref (lhs, &lhs_ops);
1858 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1859 if (valueized_anything)
1861 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1862 get_alias_set (lhs),
1863 TREE_TYPE (lhs), lhs_ops);
1864 if (lhs_ref_ok
1865 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1867 *disambiguate_only = true;
1868 return NULL;
1871 else
1873 ao_ref_init (&lhs_ref, lhs);
1874 lhs_ref_ok = true;
1877 /* If we reach a clobbering statement try to skip it and see if
1878 we find a VN result with exactly the same value as the
1879 possible clobber. In this case we can ignore the clobber
1880 and return the found value.
1881 Note that we don't need to worry about partial overlapping
1882 accesses as we then can use TBAA to disambiguate against the
1883 clobbering statement when looking up a load (thus the
1884 VN_WALKREWRITE guard). */
1885 if (vn_walk_kind == VN_WALKREWRITE
1886 && is_gimple_reg_type (TREE_TYPE (lhs))
1887 && types_compatible_p (TREE_TYPE (lhs), vr->type))
1889 tree *saved_last_vuse_ptr = last_vuse_ptr;
1890 /* Do not update last_vuse_ptr in vn_reference_lookup_2. */
1891 last_vuse_ptr = NULL;
1892 tree saved_vuse = vr->vuse;
1893 hashval_t saved_hashcode = vr->hashcode;
1894 void *res = vn_reference_lookup_2 (ref,
1895 gimple_vuse (def_stmt), 0, vr);
1896 /* Need to restore vr->vuse and vr->hashcode. */
1897 vr->vuse = saved_vuse;
1898 vr->hashcode = saved_hashcode;
1899 last_vuse_ptr = saved_last_vuse_ptr;
1900 if (res && res != (void *)-1)
1902 vn_reference_t vnresult = (vn_reference_t) res;
1903 if (vnresult->result
1904 && operand_equal_p (vnresult->result,
1905 gimple_assign_rhs1 (def_stmt), 0))
1906 return res;
1910 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1911 && gimple_call_num_args (def_stmt) <= 4)
1913 /* For builtin calls valueize its arguments and call the
1914 alias oracle again. Valueization may improve points-to
1915 info of pointers and constify size and position arguments.
1916 Originally this was motivated by PR61034 which has
1917 conditional calls to free falsely clobbering ref because
1918 of imprecise points-to info of the argument. */
1919 tree oldargs[4];
1920 bool valueized_anything = false;
1921 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1923 oldargs[i] = gimple_call_arg (def_stmt, i);
1924 tree val = vn_valueize (oldargs[i]);
1925 if (val != oldargs[i])
1927 gimple_call_set_arg (def_stmt, i, val);
1928 valueized_anything = true;
1931 if (valueized_anything)
1933 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1934 ref);
1935 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1936 gimple_call_set_arg (def_stmt, i, oldargs[i]);
1937 if (!res)
1939 *disambiguate_only = true;
1940 return NULL;
1945 if (*disambiguate_only)
1946 return (void *)-1;
1948 /* If we cannot constrain the size of the reference we cannot
1949 test if anything kills it. */
1950 if (!ref->max_size_known_p ())
1951 return (void *)-1;
1953 poly_int64 offset = ref->offset;
1954 poly_int64 maxsize = ref->max_size;
1956 /* We can't deduce anything useful from clobbers. */
1957 if (gimple_clobber_p (def_stmt))
1958 return (void *)-1;
1960 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1961 from that definition.
1962 1) Memset. */
1963 if (is_gimple_reg_type (vr->type)
1964 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1965 && integer_zerop (gimple_call_arg (def_stmt, 1))
1966 && poly_int_tree_p (gimple_call_arg (def_stmt, 2))
1967 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1969 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1970 tree base2;
1971 poly_int64 offset2, size2, maxsize2;
1972 bool reverse;
1973 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
1974 &reverse);
1975 tree len = gimple_call_arg (def_stmt, 2);
1976 if (known_size_p (maxsize2)
1977 && operand_equal_p (base, base2, 0)
1978 && known_subrange_p (offset, maxsize, offset2,
1979 wi::to_poly_offset (len) << LOG2_BITS_PER_UNIT))
1981 tree val = build_zero_cst (vr->type);
1982 return vn_reference_lookup_or_insert_for_pieces
1983 (vuse, vr->set, vr->type, vr->operands, val);
1987 /* 2) Assignment from an empty CONSTRUCTOR. */
1988 else if (is_gimple_reg_type (vr->type)
1989 && gimple_assign_single_p (def_stmt)
1990 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1991 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1993 tree base2;
1994 poly_int64 offset2, size2, maxsize2;
1995 bool reverse;
1996 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1997 &offset2, &size2, &maxsize2, &reverse);
1998 if (known_size_p (maxsize2)
1999 && operand_equal_p (base, base2, 0)
2000 && known_subrange_p (offset, maxsize, offset2, size2))
2002 tree val = build_zero_cst (vr->type);
2003 return vn_reference_lookup_or_insert_for_pieces
2004 (vuse, vr->set, vr->type, vr->operands, val);
2008 /* 3) Assignment from a constant. We can use folds native encode/interpret
2009 routines to extract the assigned bits. */
2010 else if (known_eq (ref->size, maxsize)
2011 && is_gimple_reg_type (vr->type)
2012 && !contains_storage_order_barrier_p (vr->operands)
2013 && gimple_assign_single_p (def_stmt)
2014 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
2015 /* native_encode and native_decode operate on arrays of bytes
2016 and so fundamentally need a compile-time size and offset. */
2017 && maxsize.is_constant (&maxsizei)
2018 && maxsizei % BITS_PER_UNIT == 0
2019 && offset.is_constant (&offseti)
2020 && offseti % BITS_PER_UNIT == 0
2021 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
2022 || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
2023 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
2025 tree base2;
2026 HOST_WIDE_INT offset2, size2;
2027 bool reverse;
2028 base2 = get_ref_base_and_extent_hwi (gimple_assign_lhs (def_stmt),
2029 &offset2, &size2, &reverse);
2030 if (base2
2031 && !reverse
2032 && size2 % BITS_PER_UNIT == 0
2033 && offset2 % BITS_PER_UNIT == 0
2034 && operand_equal_p (base, base2, 0)
2035 && known_subrange_p (offseti, maxsizei, offset2, size2))
2037 /* We support up to 512-bit values (for V8DFmode). */
2038 unsigned char buffer[64];
2039 int len;
2041 tree rhs = gimple_assign_rhs1 (def_stmt);
2042 if (TREE_CODE (rhs) == SSA_NAME)
2043 rhs = SSA_VAL (rhs);
2044 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
2045 buffer, sizeof (buffer));
2046 if (len > 0)
2048 tree type = vr->type;
2049 /* Make sure to interpret in a type that has a range
2050 covering the whole access size. */
2051 if (INTEGRAL_TYPE_P (vr->type)
2052 && maxsizei != TYPE_PRECISION (vr->type))
2053 type = build_nonstandard_integer_type (maxsizei,
2054 TYPE_UNSIGNED (type));
2055 tree val = native_interpret_expr (type,
2056 buffer
2057 + ((offseti - offset2)
2058 / BITS_PER_UNIT),
2059 maxsizei / BITS_PER_UNIT);
2060 /* If we chop off bits because the types precision doesn't
2061 match the memory access size this is ok when optimizing
2062 reads but not when called from the DSE code during
2063 elimination. */
2064 if (val
2065 && type != vr->type)
2067 if (! int_fits_type_p (val, vr->type))
2068 val = NULL_TREE;
2069 else
2070 val = fold_convert (vr->type, val);
2073 if (val)
2074 return vn_reference_lookup_or_insert_for_pieces
2075 (vuse, vr->set, vr->type, vr->operands, val);
2080 /* 4) Assignment from an SSA name which definition we may be able
2081 to access pieces from. */
2082 else if (known_eq (ref->size, maxsize)
2083 && is_gimple_reg_type (vr->type)
2084 && !contains_storage_order_barrier_p (vr->operands)
2085 && gimple_assign_single_p (def_stmt)
2086 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
2088 tree base2;
2089 poly_int64 offset2, size2, maxsize2;
2090 bool reverse;
2091 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2092 &offset2, &size2, &maxsize2,
2093 &reverse);
2094 if (!reverse
2095 && known_size_p (maxsize2)
2096 && known_eq (maxsize2, size2)
2097 && operand_equal_p (base, base2, 0)
2098 && known_subrange_p (offset, maxsize, offset2, size2)
2099 /* ??? We can't handle bitfield precision extracts without
2100 either using an alternate type for the BIT_FIELD_REF and
2101 then doing a conversion or possibly adjusting the offset
2102 according to endianness. */
2103 && (! INTEGRAL_TYPE_P (vr->type)
2104 || known_eq (ref->size, TYPE_PRECISION (vr->type)))
2105 && multiple_p (ref->size, BITS_PER_UNIT))
2107 code_helper rcode = BIT_FIELD_REF;
2108 tree ops[3];
2109 ops[0] = SSA_VAL (gimple_assign_rhs1 (def_stmt));
2110 ops[1] = bitsize_int (ref->size);
2111 ops[2] = bitsize_int (offset - offset2);
2112 tree val = vn_nary_build_or_lookup (rcode, vr->type, ops);
2113 if (val
2114 && (TREE_CODE (val) != SSA_NAME
2115 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2117 vn_reference_t res = vn_reference_lookup_or_insert_for_pieces
2118 (vuse, vr->set, vr->type, vr->operands, val);
2119 return res;
2124 /* 5) For aggregate copies translate the reference through them if
2125 the copy kills ref. */
2126 else if (vn_walk_kind == VN_WALKREWRITE
2127 && gimple_assign_single_p (def_stmt)
2128 && (DECL_P (gimple_assign_rhs1 (def_stmt))
2129 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
2130 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
2132 tree base2;
2133 int i, j, k;
2134 auto_vec<vn_reference_op_s> rhs;
2135 vn_reference_op_t vro;
2136 ao_ref r;
2138 if (!lhs_ref_ok)
2139 return (void *)-1;
2141 /* See if the assignment kills REF. */
2142 base2 = ao_ref_base (&lhs_ref);
2143 if (!lhs_ref.max_size_known_p ()
2144 || (base != base2
2145 && (TREE_CODE (base) != MEM_REF
2146 || TREE_CODE (base2) != MEM_REF
2147 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
2148 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
2149 TREE_OPERAND (base2, 1))))
2150 || !stmt_kills_ref_p (def_stmt, ref))
2151 return (void *)-1;
2153 /* Find the common base of ref and the lhs. lhs_ops already
2154 contains valueized operands for the lhs. */
2155 i = vr->operands.length () - 1;
2156 j = lhs_ops.length () - 1;
2157 while (j >= 0 && i >= 0
2158 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
2160 i--;
2161 j--;
2164 /* ??? The innermost op should always be a MEM_REF and we already
2165 checked that the assignment to the lhs kills vr. Thus for
2166 aggregate copies using char[] types the vn_reference_op_eq
2167 may fail when comparing types for compatibility. But we really
2168 don't care here - further lookups with the rewritten operands
2169 will simply fail if we messed up types too badly. */
2170 poly_int64 extra_off = 0;
2171 if (j == 0 && i >= 0
2172 && lhs_ops[0].opcode == MEM_REF
2173 && maybe_ne (lhs_ops[0].off, -1))
2175 if (known_eq (lhs_ops[0].off, vr->operands[i].off))
2176 i--, j--;
2177 else if (vr->operands[i].opcode == MEM_REF
2178 && maybe_ne (vr->operands[i].off, -1))
2180 extra_off = vr->operands[i].off - lhs_ops[0].off;
2181 i--, j--;
2185 /* i now points to the first additional op.
2186 ??? LHS may not be completely contained in VR, one or more
2187 VIEW_CONVERT_EXPRs could be in its way. We could at least
2188 try handling outermost VIEW_CONVERT_EXPRs. */
2189 if (j != -1)
2190 return (void *)-1;
2192 /* Punt if the additional ops contain a storage order barrier. */
2193 for (k = i; k >= 0; k--)
2195 vro = &vr->operands[k];
2196 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
2197 return (void *)-1;
2200 /* Now re-write REF to be based on the rhs of the assignment. */
2201 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
2203 /* Apply an extra offset to the inner MEM_REF of the RHS. */
2204 if (maybe_ne (extra_off, 0))
2206 if (rhs.length () < 2
2207 || rhs[0].opcode != MEM_REF
2208 || known_eq (rhs[0].off, -1))
2209 return (void *)-1;
2210 rhs[0].off += extra_off;
2211 rhs[0].op0 = int_const_binop (PLUS_EXPR, rhs[0].op0,
2212 build_int_cst (TREE_TYPE (rhs[0].op0),
2213 extra_off));
2216 /* We need to pre-pend vr->operands[0..i] to rhs. */
2217 vec<vn_reference_op_s> old = vr->operands;
2218 if (i + 1 + rhs.length () > vr->operands.length ())
2219 vr->operands.safe_grow (i + 1 + rhs.length ());
2220 else
2221 vr->operands.truncate (i + 1 + rhs.length ());
2222 FOR_EACH_VEC_ELT (rhs, j, vro)
2223 vr->operands[i + 1 + j] = *vro;
2224 vr->operands = valueize_refs (vr->operands);
2225 if (old == shared_lookup_references)
2226 shared_lookup_references = vr->operands;
2227 vr->hashcode = vn_reference_compute_hash (vr);
2229 /* Try folding the new reference to a constant. */
2230 tree val = fully_constant_vn_reference_p (vr);
2231 if (val)
2232 return vn_reference_lookup_or_insert_for_pieces
2233 (vuse, vr->set, vr->type, vr->operands, val);
2235 /* Adjust *ref from the new operands. */
2236 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2237 return (void *)-1;
2238 /* This can happen with bitfields. */
2239 if (maybe_ne (ref->size, r.size))
2240 return (void *)-1;
2241 *ref = r;
2243 /* Do not update last seen VUSE after translating. */
2244 last_vuse_ptr = NULL;
2246 /* Keep looking for the adjusted *REF / VR pair. */
2247 return NULL;
2250 /* 6) For memcpy copies translate the reference through them if
2251 the copy kills ref. */
2252 else if (vn_walk_kind == VN_WALKREWRITE
2253 && is_gimple_reg_type (vr->type)
2254 /* ??? Handle BCOPY as well. */
2255 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2256 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2257 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2258 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2259 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2260 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2261 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2262 && poly_int_tree_p (gimple_call_arg (def_stmt, 2), &copy_size))
2264 tree lhs, rhs;
2265 ao_ref r;
2266 poly_int64 rhs_offset, lhs_offset;
2267 vn_reference_op_s op;
2268 poly_uint64 mem_offset;
2269 poly_int64 at, byte_maxsize;
2271 /* Only handle non-variable, addressable refs. */
2272 if (maybe_ne (ref->size, maxsize)
2273 || !multiple_p (offset, BITS_PER_UNIT, &at)
2274 || !multiple_p (maxsize, BITS_PER_UNIT, &byte_maxsize))
2275 return (void *)-1;
2277 /* Extract a pointer base and an offset for the destination. */
2278 lhs = gimple_call_arg (def_stmt, 0);
2279 lhs_offset = 0;
2280 if (TREE_CODE (lhs) == SSA_NAME)
2282 lhs = SSA_VAL (lhs);
2283 if (TREE_CODE (lhs) == SSA_NAME)
2285 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
2286 if (gimple_assign_single_p (def_stmt)
2287 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2288 lhs = gimple_assign_rhs1 (def_stmt);
2291 if (TREE_CODE (lhs) == ADDR_EXPR)
2293 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2294 &lhs_offset);
2295 if (!tem)
2296 return (void *)-1;
2297 if (TREE_CODE (tem) == MEM_REF
2298 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
2300 lhs = TREE_OPERAND (tem, 0);
2301 if (TREE_CODE (lhs) == SSA_NAME)
2302 lhs = SSA_VAL (lhs);
2303 lhs_offset += mem_offset;
2305 else if (DECL_P (tem))
2306 lhs = build_fold_addr_expr (tem);
2307 else
2308 return (void *)-1;
2310 if (TREE_CODE (lhs) != SSA_NAME
2311 && TREE_CODE (lhs) != ADDR_EXPR)
2312 return (void *)-1;
2314 /* Extract a pointer base and an offset for the source. */
2315 rhs = gimple_call_arg (def_stmt, 1);
2316 rhs_offset = 0;
2317 if (TREE_CODE (rhs) == SSA_NAME)
2318 rhs = SSA_VAL (rhs);
2319 if (TREE_CODE (rhs) == ADDR_EXPR)
2321 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2322 &rhs_offset);
2323 if (!tem)
2324 return (void *)-1;
2325 if (TREE_CODE (tem) == MEM_REF
2326 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
2328 rhs = TREE_OPERAND (tem, 0);
2329 rhs_offset += mem_offset;
2331 else if (DECL_P (tem)
2332 || TREE_CODE (tem) == STRING_CST)
2333 rhs = build_fold_addr_expr (tem);
2334 else
2335 return (void *)-1;
2337 if (TREE_CODE (rhs) != SSA_NAME
2338 && TREE_CODE (rhs) != ADDR_EXPR)
2339 return (void *)-1;
2341 /* The bases of the destination and the references have to agree. */
2342 if (TREE_CODE (base) == MEM_REF)
2344 if (TREE_OPERAND (base, 0) != lhs
2345 || !poly_int_tree_p (TREE_OPERAND (base, 1), &mem_offset))
2346 return (void *) -1;
2347 at += mem_offset;
2349 else if (!DECL_P (base)
2350 || TREE_CODE (lhs) != ADDR_EXPR
2351 || TREE_OPERAND (lhs, 0) != base)
2352 return (void *)-1;
2354 /* If the access is completely outside of the memcpy destination
2355 area there is no aliasing. */
2356 if (!ranges_maybe_overlap_p (lhs_offset, copy_size, at, byte_maxsize))
2357 return NULL;
2358 /* And the access has to be contained within the memcpy destination. */
2359 if (!known_subrange_p (at, byte_maxsize, lhs_offset, copy_size))
2360 return (void *)-1;
2362 /* Make room for 2 operands in the new reference. */
2363 if (vr->operands.length () < 2)
2365 vec<vn_reference_op_s> old = vr->operands;
2366 vr->operands.safe_grow_cleared (2);
2367 if (old == shared_lookup_references)
2368 shared_lookup_references = vr->operands;
2370 else
2371 vr->operands.truncate (2);
2373 /* The looked-through reference is a simple MEM_REF. */
2374 memset (&op, 0, sizeof (op));
2375 op.type = vr->type;
2376 op.opcode = MEM_REF;
2377 op.op0 = build_int_cst (ptr_type_node, at - lhs_offset + rhs_offset);
2378 op.off = at - lhs_offset + rhs_offset;
2379 vr->operands[0] = op;
2380 op.type = TREE_TYPE (rhs);
2381 op.opcode = TREE_CODE (rhs);
2382 op.op0 = rhs;
2383 op.off = -1;
2384 vr->operands[1] = op;
2385 vr->hashcode = vn_reference_compute_hash (vr);
2387 /* Try folding the new reference to a constant. */
2388 tree val = fully_constant_vn_reference_p (vr);
2389 if (val)
2390 return vn_reference_lookup_or_insert_for_pieces
2391 (vuse, vr->set, vr->type, vr->operands, val);
2393 /* Adjust *ref from the new operands. */
2394 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2395 return (void *)-1;
2396 /* This can happen with bitfields. */
2397 if (maybe_ne (ref->size, r.size))
2398 return (void *)-1;
2399 *ref = r;
2401 /* Do not update last seen VUSE after translating. */
2402 last_vuse_ptr = NULL;
2404 /* Keep looking for the adjusted *REF / VR pair. */
2405 return NULL;
2408 /* Bail out and stop walking. */
2409 return (void *)-1;
2412 /* Return a reference op vector from OP that can be used for
2413 vn_reference_lookup_pieces. The caller is responsible for releasing
2414 the vector. */
2416 vec<vn_reference_op_s>
2417 vn_reference_operands_for_lookup (tree op)
2419 bool valueized;
2420 return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
2423 /* Lookup a reference operation by it's parts, in the current hash table.
2424 Returns the resulting value number if it exists in the hash table,
2425 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2426 vn_reference_t stored in the hashtable if something is found. */
2428 tree
2429 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2430 vec<vn_reference_op_s> operands,
2431 vn_reference_t *vnresult, vn_lookup_kind kind)
2433 struct vn_reference_s vr1;
2434 vn_reference_t tmp;
2435 tree cst;
2437 if (!vnresult)
2438 vnresult = &tmp;
2439 *vnresult = NULL;
2441 vr1.vuse = vuse_ssa_val (vuse);
2442 shared_lookup_references.truncate (0);
2443 shared_lookup_references.safe_grow (operands.length ());
2444 memcpy (shared_lookup_references.address (),
2445 operands.address (),
2446 sizeof (vn_reference_op_s)
2447 * operands.length ());
2448 vr1.operands = operands = shared_lookup_references
2449 = valueize_refs (shared_lookup_references);
2450 vr1.type = type;
2451 vr1.set = set;
2452 vr1.hashcode = vn_reference_compute_hash (&vr1);
2453 if ((cst = fully_constant_vn_reference_p (&vr1)))
2454 return cst;
2456 vn_reference_lookup_1 (&vr1, vnresult);
2457 if (!*vnresult
2458 && kind != VN_NOWALK
2459 && vr1.vuse)
2461 ao_ref r;
2462 vn_walk_kind = kind;
2463 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2464 *vnresult =
2465 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2466 vn_reference_lookup_2,
2467 vn_reference_lookup_3,
2468 vuse_ssa_val, &vr1);
2469 gcc_checking_assert (vr1.operands == shared_lookup_references);
2472 if (*vnresult)
2473 return (*vnresult)->result;
2475 return NULL_TREE;
2478 /* Lookup OP in the current hash table, and return the resulting value
2479 number if it exists in the hash table. Return NULL_TREE if it does
2480 not exist in the hash table or if the result field of the structure
2481 was NULL.. VNRESULT will be filled in with the vn_reference_t
2482 stored in the hashtable if one exists. When TBAA_P is false assume
2483 we are looking up a store and treat it as having alias-set zero. */
2485 tree
2486 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2487 vn_reference_t *vnresult, bool tbaa_p)
2489 vec<vn_reference_op_s> operands;
2490 struct vn_reference_s vr1;
2491 tree cst;
2492 bool valuezied_anything;
2494 if (vnresult)
2495 *vnresult = NULL;
2497 vr1.vuse = vuse_ssa_val (vuse);
2498 vr1.operands = operands
2499 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2500 vr1.type = TREE_TYPE (op);
2501 vr1.set = tbaa_p ? get_alias_set (op) : 0;
2502 vr1.hashcode = vn_reference_compute_hash (&vr1);
2503 if ((cst = fully_constant_vn_reference_p (&vr1)))
2504 return cst;
2506 if (kind != VN_NOWALK
2507 && vr1.vuse)
2509 vn_reference_t wvnresult;
2510 ao_ref r;
2511 /* Make sure to use a valueized reference if we valueized anything.
2512 Otherwise preserve the full reference for advanced TBAA. */
2513 if (!valuezied_anything
2514 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2515 vr1.operands))
2516 ao_ref_init (&r, op);
2517 if (! tbaa_p)
2518 r.ref_alias_set = r.base_alias_set = 0;
2519 vn_walk_kind = kind;
2520 wvnresult =
2521 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2522 vn_reference_lookup_2,
2523 vn_reference_lookup_3,
2524 vuse_ssa_val, &vr1);
2525 gcc_checking_assert (vr1.operands == shared_lookup_references);
2526 if (wvnresult)
2528 if (vnresult)
2529 *vnresult = wvnresult;
2530 return wvnresult->result;
2533 return NULL_TREE;
2536 return vn_reference_lookup_1 (&vr1, vnresult);
2539 /* Lookup CALL in the current hash table and return the entry in
2540 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2542 void
2543 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2544 vn_reference_t vr)
2546 if (vnresult)
2547 *vnresult = NULL;
2549 tree vuse = gimple_vuse (call);
2551 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2552 vr->operands = valueize_shared_reference_ops_from_call (call);
2553 vr->type = gimple_expr_type (call);
2554 vr->set = 0;
2555 vr->hashcode = vn_reference_compute_hash (vr);
2556 vn_reference_lookup_1 (vr, vnresult);
2559 /* Insert OP into the current hash table with a value number of
2560 RESULT, and return the resulting reference structure we created. */
2562 static vn_reference_t
2563 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2565 vn_reference_s **slot;
2566 vn_reference_t vr1;
2567 bool tem;
2569 vr1 = current_info->references_pool->allocate ();
2570 if (TREE_CODE (result) == SSA_NAME)
2571 vr1->value_id = VN_INFO (result)->value_id;
2572 else
2573 vr1->value_id = get_or_alloc_constant_value_id (result);
2574 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2575 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2576 vr1->type = TREE_TYPE (op);
2577 vr1->set = get_alias_set (op);
2578 vr1->hashcode = vn_reference_compute_hash (vr1);
2579 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2580 vr1->result_vdef = vdef;
2582 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2583 INSERT);
2585 /* Because we lookup stores using vuses, and value number failures
2586 using the vdefs (see visit_reference_op_store for how and why),
2587 it's possible that on failure we may try to insert an already
2588 inserted store. This is not wrong, there is no ssa name for a
2589 store that we could use as a differentiator anyway. Thus, unlike
2590 the other lookup functions, you cannot gcc_assert (!*slot)
2591 here. */
2593 /* But free the old slot in case of a collision. */
2594 if (*slot)
2595 free_reference (*slot);
2597 *slot = vr1;
2598 return vr1;
2601 /* Insert a reference by it's pieces into the current hash table with
2602 a value number of RESULT. Return the resulting reference
2603 structure we created. */
2605 vn_reference_t
2606 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2607 vec<vn_reference_op_s> operands,
2608 tree result, unsigned int value_id)
2611 vn_reference_s **slot;
2612 vn_reference_t vr1;
2614 vr1 = current_info->references_pool->allocate ();
2615 vr1->value_id = value_id;
2616 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2617 vr1->operands = valueize_refs (operands);
2618 vr1->type = type;
2619 vr1->set = set;
2620 vr1->hashcode = vn_reference_compute_hash (vr1);
2621 if (result && TREE_CODE (result) == SSA_NAME)
2622 result = SSA_VAL (result);
2623 vr1->result = result;
2625 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2626 INSERT);
2628 /* At this point we should have all the things inserted that we have
2629 seen before, and we should never try inserting something that
2630 already exists. */
2631 gcc_assert (!*slot);
2632 if (*slot)
2633 free_reference (*slot);
2635 *slot = vr1;
2636 return vr1;
2639 /* Compute and return the hash value for nary operation VBO1. */
2641 static hashval_t
2642 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2644 inchash::hash hstate;
2645 unsigned i;
2647 for (i = 0; i < vno1->length; ++i)
2648 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2649 vno1->op[i] = SSA_VAL (vno1->op[i]);
2651 if (((vno1->length == 2
2652 && commutative_tree_code (vno1->opcode))
2653 || (vno1->length == 3
2654 && commutative_ternary_tree_code (vno1->opcode)))
2655 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2656 std::swap (vno1->op[0], vno1->op[1]);
2657 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
2658 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2660 std::swap (vno1->op[0], vno1->op[1]);
2661 vno1->opcode = swap_tree_comparison (vno1->opcode);
2664 hstate.add_int (vno1->opcode);
2665 for (i = 0; i < vno1->length; ++i)
2666 inchash::add_expr (vno1->op[i], hstate);
2668 return hstate.end ();
2671 /* Compare nary operations VNO1 and VNO2 and return true if they are
2672 equivalent. */
2674 bool
2675 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2677 unsigned i;
2679 if (vno1->hashcode != vno2->hashcode)
2680 return false;
2682 if (vno1->length != vno2->length)
2683 return false;
2685 if (vno1->opcode != vno2->opcode
2686 || !types_compatible_p (vno1->type, vno2->type))
2687 return false;
2689 for (i = 0; i < vno1->length; ++i)
2690 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2691 return false;
2693 /* BIT_INSERT_EXPR has an implict operand as the type precision
2694 of op1. Need to check to make sure they are the same. */
2695 if (vno1->opcode == BIT_INSERT_EXPR
2696 && TREE_CODE (vno1->op[1]) == INTEGER_CST
2697 && TYPE_PRECISION (TREE_TYPE (vno1->op[1]))
2698 != TYPE_PRECISION (TREE_TYPE (vno2->op[1])))
2699 return false;
2701 return true;
2704 /* Initialize VNO from the pieces provided. */
2706 static void
2707 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2708 enum tree_code code, tree type, tree *ops)
2710 vno->opcode = code;
2711 vno->length = length;
2712 vno->type = type;
2713 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2716 /* Initialize VNO from OP. */
2718 static void
2719 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2721 unsigned i;
2723 vno->opcode = TREE_CODE (op);
2724 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2725 vno->type = TREE_TYPE (op);
2726 for (i = 0; i < vno->length; ++i)
2727 vno->op[i] = TREE_OPERAND (op, i);
2730 /* Return the number of operands for a vn_nary ops structure from STMT. */
2732 static unsigned int
2733 vn_nary_length_from_stmt (gimple *stmt)
2735 switch (gimple_assign_rhs_code (stmt))
2737 case REALPART_EXPR:
2738 case IMAGPART_EXPR:
2739 case VIEW_CONVERT_EXPR:
2740 return 1;
2742 case BIT_FIELD_REF:
2743 return 3;
2745 case CONSTRUCTOR:
2746 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2748 default:
2749 return gimple_num_ops (stmt) - 1;
2753 /* Initialize VNO from STMT. */
2755 static void
2756 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
2758 unsigned i;
2760 vno->opcode = gimple_assign_rhs_code (stmt);
2761 vno->type = gimple_expr_type (stmt);
2762 switch (vno->opcode)
2764 case REALPART_EXPR:
2765 case IMAGPART_EXPR:
2766 case VIEW_CONVERT_EXPR:
2767 vno->length = 1;
2768 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2769 break;
2771 case BIT_FIELD_REF:
2772 vno->length = 3;
2773 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2774 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2775 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2776 break;
2778 case CONSTRUCTOR:
2779 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2780 for (i = 0; i < vno->length; ++i)
2781 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2782 break;
2784 default:
2785 gcc_checking_assert (!gimple_assign_single_p (stmt));
2786 vno->length = gimple_num_ops (stmt) - 1;
2787 for (i = 0; i < vno->length; ++i)
2788 vno->op[i] = gimple_op (stmt, i + 1);
2792 /* Compute the hashcode for VNO and look for it in the hash table;
2793 return the resulting value number if it exists in the hash table.
2794 Return NULL_TREE if it does not exist in the hash table or if the
2795 result field of the operation is NULL. VNRESULT will contain the
2796 vn_nary_op_t from the hashtable if it exists. */
2798 static tree
2799 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2801 vn_nary_op_s **slot;
2803 if (vnresult)
2804 *vnresult = NULL;
2806 vno->hashcode = vn_nary_op_compute_hash (vno);
2807 slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2808 NO_INSERT);
2809 if (!slot && current_info == optimistic_info)
2810 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2811 NO_INSERT);
2812 if (!slot)
2813 return NULL_TREE;
2814 if (vnresult)
2815 *vnresult = *slot;
2816 return (*slot)->result;
2819 /* Lookup a n-ary operation by its pieces and return the resulting value
2820 number if it exists in the hash table. Return NULL_TREE if it does
2821 not exist in the hash table or if the result field of the operation
2822 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2823 if it exists. */
2825 tree
2826 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2827 tree type, tree *ops, vn_nary_op_t *vnresult)
2829 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2830 sizeof_vn_nary_op (length));
2831 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2832 return vn_nary_op_lookup_1 (vno1, vnresult);
2835 /* Lookup OP in the current hash table, and return the resulting value
2836 number if it exists in the hash table. Return NULL_TREE if it does
2837 not exist in the hash table or if the result field of the operation
2838 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2839 if it exists. */
2841 tree
2842 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2844 vn_nary_op_t vno1
2845 = XALLOCAVAR (struct vn_nary_op_s,
2846 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2847 init_vn_nary_op_from_op (vno1, op);
2848 return vn_nary_op_lookup_1 (vno1, vnresult);
2851 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2852 value number if it exists in the hash table. Return NULL_TREE if
2853 it does not exist in the hash table. VNRESULT will contain the
2854 vn_nary_op_t from the hashtable if it exists. */
2856 tree
2857 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
2859 vn_nary_op_t vno1
2860 = XALLOCAVAR (struct vn_nary_op_s,
2861 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2862 init_vn_nary_op_from_stmt (vno1, stmt);
2863 return vn_nary_op_lookup_1 (vno1, vnresult);
2866 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2868 static vn_nary_op_t
2869 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2871 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2874 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2875 obstack. */
2877 static vn_nary_op_t
2878 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2880 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2881 &current_info->nary_obstack);
2883 vno1->value_id = value_id;
2884 vno1->length = length;
2885 vno1->result = result;
2887 return vno1;
2890 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2891 VNO->HASHCODE first. */
2893 static vn_nary_op_t
2894 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
2895 bool compute_hash)
2897 vn_nary_op_s **slot;
2899 if (compute_hash)
2900 vno->hashcode = vn_nary_op_compute_hash (vno);
2902 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
2903 /* While we do not want to insert things twice it's awkward to
2904 avoid it in the case where visit_nary_op pattern-matches stuff
2905 and ends up simplifying the replacement to itself. We then
2906 get two inserts, one from visit_nary_op and one from
2907 vn_nary_build_or_lookup.
2908 So allow inserts with the same value number. */
2909 if (*slot && (*slot)->result == vno->result)
2910 return *slot;
2912 gcc_assert (!*slot);
2914 *slot = vno;
2915 return vno;
2918 /* Insert a n-ary operation into the current hash table using it's
2919 pieces. Return the vn_nary_op_t structure we created and put in
2920 the hashtable. */
2922 vn_nary_op_t
2923 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2924 tree type, tree *ops,
2925 tree result, unsigned int value_id)
2927 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2928 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2929 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2932 /* Insert OP into the current hash table with a value number of
2933 RESULT. Return the vn_nary_op_t structure we created and put in
2934 the hashtable. */
2936 vn_nary_op_t
2937 vn_nary_op_insert (tree op, tree result)
2939 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2940 vn_nary_op_t vno1;
2942 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2943 init_vn_nary_op_from_op (vno1, op);
2944 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2947 /* Insert the rhs of STMT into the current hash table with a value number of
2948 RESULT. */
2950 static vn_nary_op_t
2951 vn_nary_op_insert_stmt (gimple *stmt, tree result)
2953 vn_nary_op_t vno1
2954 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2955 result, VN_INFO (result)->value_id);
2956 init_vn_nary_op_from_stmt (vno1, stmt);
2957 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2960 /* Compute a hashcode for PHI operation VP1 and return it. */
2962 static inline hashval_t
2963 vn_phi_compute_hash (vn_phi_t vp1)
2965 inchash::hash hstate (vp1->phiargs.length () > 2
2966 ? vp1->block->index : vp1->phiargs.length ());
2967 tree phi1op;
2968 tree type;
2969 edge e;
2970 edge_iterator ei;
2972 /* If all PHI arguments are constants we need to distinguish
2973 the PHI node via its type. */
2974 type = vp1->type;
2975 hstate.merge_hash (vn_hash_type (type));
2977 FOR_EACH_EDGE (e, ei, vp1->block->preds)
2979 /* Don't hash backedge values they need to be handled as VN_TOP
2980 for optimistic value-numbering. */
2981 if (e->flags & EDGE_DFS_BACK)
2982 continue;
2984 phi1op = vp1->phiargs[e->dest_idx];
2985 if (phi1op == VN_TOP)
2986 continue;
2987 inchash::add_expr (phi1op, hstate);
2990 return hstate.end ();
2994 /* Return true if COND1 and COND2 represent the same condition, set
2995 *INVERTED_P if one needs to be inverted to make it the same as
2996 the other. */
2998 static bool
2999 cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
3000 gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
3002 enum tree_code code1 = gimple_cond_code (cond1);
3003 enum tree_code code2 = gimple_cond_code (cond2);
3005 *inverted_p = false;
3006 if (code1 == code2)
3008 else if (code1 == swap_tree_comparison (code2))
3009 std::swap (lhs2, rhs2);
3010 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
3011 *inverted_p = true;
3012 else if (code1 == invert_tree_comparison
3013 (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
3015 std::swap (lhs2, rhs2);
3016 *inverted_p = true;
3018 else
3019 return false;
3021 return ((expressions_equal_p (lhs1, lhs2)
3022 && expressions_equal_p (rhs1, rhs2))
3023 || (commutative_tree_code (code1)
3024 && expressions_equal_p (lhs1, rhs2)
3025 && expressions_equal_p (rhs1, lhs2)));
3028 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
3030 static int
3031 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
3033 if (vp1->hashcode != vp2->hashcode)
3034 return false;
3036 if (vp1->block != vp2->block)
3038 if (vp1->phiargs.length () != vp2->phiargs.length ())
3039 return false;
3041 switch (vp1->phiargs.length ())
3043 case 1:
3044 /* Single-arg PHIs are just copies. */
3045 break;
3047 case 2:
3049 /* Rule out backedges into the PHI. */
3050 if (vp1->block->loop_father->header == vp1->block
3051 || vp2->block->loop_father->header == vp2->block)
3052 return false;
3054 /* If the PHI nodes do not have compatible types
3055 they are not the same. */
3056 if (!types_compatible_p (vp1->type, vp2->type))
3057 return false;
3059 basic_block idom1
3060 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3061 basic_block idom2
3062 = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
3063 /* If the immediate dominator end in switch stmts multiple
3064 values may end up in the same PHI arg via intermediate
3065 CFG merges. */
3066 if (EDGE_COUNT (idom1->succs) != 2
3067 || EDGE_COUNT (idom2->succs) != 2)
3068 return false;
3070 /* Verify the controlling stmt is the same. */
3071 gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1));
3072 gcond *last2 = safe_dyn_cast <gcond *> (last_stmt (idom2));
3073 if (! last1 || ! last2)
3074 return false;
3075 bool inverted_p;
3076 if (! cond_stmts_equal_p (last1, vp1->cclhs, vp1->ccrhs,
3077 last2, vp2->cclhs, vp2->ccrhs,
3078 &inverted_p))
3079 return false;
3081 /* Get at true/false controlled edges into the PHI. */
3082 edge te1, te2, fe1, fe2;
3083 if (! extract_true_false_controlled_edges (idom1, vp1->block,
3084 &te1, &fe1)
3085 || ! extract_true_false_controlled_edges (idom2, vp2->block,
3086 &te2, &fe2))
3087 return false;
3089 /* Swap edges if the second condition is the inverted of the
3090 first. */
3091 if (inverted_p)
3092 std::swap (te2, fe2);
3094 /* ??? Handle VN_TOP specially. */
3095 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
3096 vp2->phiargs[te2->dest_idx])
3097 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
3098 vp2->phiargs[fe2->dest_idx]))
3099 return false;
3101 return true;
3104 default:
3105 return false;
3109 /* If the PHI nodes do not have compatible types
3110 they are not the same. */
3111 if (!types_compatible_p (vp1->type, vp2->type))
3112 return false;
3114 /* Any phi in the same block will have it's arguments in the
3115 same edge order, because of how we store phi nodes. */
3116 int i;
3117 tree phi1op;
3118 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
3120 tree phi2op = vp2->phiargs[i];
3121 if (phi1op == VN_TOP || phi2op == VN_TOP)
3122 continue;
3123 if (!expressions_equal_p (phi1op, phi2op))
3124 return false;
3127 return true;
3130 static vec<tree> shared_lookup_phiargs;
3132 /* Lookup PHI in the current hash table, and return the resulting
3133 value number if it exists in the hash table. Return NULL_TREE if
3134 it does not exist in the hash table. */
3136 static tree
3137 vn_phi_lookup (gimple *phi)
3139 vn_phi_s **slot;
3140 struct vn_phi_s vp1;
3141 edge e;
3142 edge_iterator ei;
3144 shared_lookup_phiargs.truncate (0);
3145 shared_lookup_phiargs.safe_grow (gimple_phi_num_args (phi));
3147 /* Canonicalize the SSA_NAME's to their value number. */
3148 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3150 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3151 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3152 shared_lookup_phiargs[e->dest_idx] = def;
3154 vp1.type = TREE_TYPE (gimple_phi_result (phi));
3155 vp1.phiargs = shared_lookup_phiargs;
3156 vp1.block = gimple_bb (phi);
3157 /* Extract values of the controlling condition. */
3158 vp1.cclhs = NULL_TREE;
3159 vp1.ccrhs = NULL_TREE;
3160 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1.block);
3161 if (EDGE_COUNT (idom1->succs) == 2)
3162 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3164 vp1.cclhs = vn_valueize (gimple_cond_lhs (last1));
3165 vp1.ccrhs = vn_valueize (gimple_cond_rhs (last1));
3167 vp1.hashcode = vn_phi_compute_hash (&vp1);
3168 slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3169 NO_INSERT);
3170 if (!slot && current_info == optimistic_info)
3171 slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3172 NO_INSERT);
3173 if (!slot)
3174 return NULL_TREE;
3175 return (*slot)->result;
3178 /* Insert PHI into the current hash table with a value number of
3179 RESULT. */
3181 static vn_phi_t
3182 vn_phi_insert (gimple *phi, tree result)
3184 vn_phi_s **slot;
3185 vn_phi_t vp1 = current_info->phis_pool->allocate ();
3186 vec<tree> args = vNULL;
3187 edge e;
3188 edge_iterator ei;
3190 args.safe_grow (gimple_phi_num_args (phi));
3192 /* Canonicalize the SSA_NAME's to their value number. */
3193 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3195 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3196 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3197 args[e->dest_idx] = def;
3199 vp1->value_id = VN_INFO (result)->value_id;
3200 vp1->type = TREE_TYPE (gimple_phi_result (phi));
3201 vp1->phiargs = args;
3202 vp1->block = gimple_bb (phi);
3203 /* Extract values of the controlling condition. */
3204 vp1->cclhs = NULL_TREE;
3205 vp1->ccrhs = NULL_TREE;
3206 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3207 if (EDGE_COUNT (idom1->succs) == 2)
3208 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3210 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
3211 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
3213 vp1->result = result;
3214 vp1->hashcode = vn_phi_compute_hash (vp1);
3216 slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
3218 /* Because we iterate over phi operations more than once, it's
3219 possible the slot might already exist here, hence no assert.*/
3220 *slot = vp1;
3221 return vp1;
3225 /* Print set of components in strongly connected component SCC to OUT. */
3227 static void
3228 print_scc (FILE *out, vec<tree> scc)
3230 tree var;
3231 unsigned int i;
3233 fprintf (out, "SCC consists of %u:", scc.length ());
3234 FOR_EACH_VEC_ELT (scc, i, var)
3236 fprintf (out, " ");
3237 print_generic_expr (out, var);
3239 fprintf (out, "\n");
3242 /* Return true if BB1 is dominated by BB2 taking into account edges
3243 that are not executable. */
3245 static bool
3246 dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
3248 edge_iterator ei;
3249 edge e;
3251 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3252 return true;
3254 /* Before iterating we'd like to know if there exists a
3255 (executable) path from bb2 to bb1 at all, if not we can
3256 directly return false. For now simply iterate once. */
3258 /* Iterate to the single executable bb1 predecessor. */
3259 if (EDGE_COUNT (bb1->preds) > 1)
3261 edge prede = NULL;
3262 FOR_EACH_EDGE (e, ei, bb1->preds)
3263 if (e->flags & EDGE_EXECUTABLE)
3265 if (prede)
3267 prede = NULL;
3268 break;
3270 prede = e;
3272 if (prede)
3274 bb1 = prede->src;
3276 /* Re-do the dominance check with changed bb1. */
3277 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3278 return true;
3282 /* Iterate to the single executable bb2 successor. */
3283 edge succe = NULL;
3284 FOR_EACH_EDGE (e, ei, bb2->succs)
3285 if (e->flags & EDGE_EXECUTABLE)
3287 if (succe)
3289 succe = NULL;
3290 break;
3292 succe = e;
3294 if (succe)
3296 /* Verify the reached block is only reached through succe.
3297 If there is only one edge we can spare us the dominator
3298 check and iterate directly. */
3299 if (EDGE_COUNT (succe->dest->preds) > 1)
3301 FOR_EACH_EDGE (e, ei, succe->dest->preds)
3302 if (e != succe
3303 && (e->flags & EDGE_EXECUTABLE))
3305 succe = NULL;
3306 break;
3309 if (succe)
3311 bb2 = succe->dest;
3313 /* Re-do the dominance check with changed bb2. */
3314 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3315 return true;
3319 /* We could now iterate updating bb1 / bb2. */
3320 return false;
3323 /* Set the value number of FROM to TO, return true if it has changed
3324 as a result. */
3326 static inline bool
3327 set_ssa_val_to (tree from, tree to)
3329 tree currval = SSA_VAL (from);
3330 poly_int64 toff, coff;
3332 /* The only thing we allow as value numbers are ssa_names
3333 and invariants. So assert that here. We don't allow VN_TOP
3334 as visiting a stmt should produce a value-number other than
3335 that.
3336 ??? Still VN_TOP can happen for unreachable code, so force
3337 it to varying in that case. Not all code is prepared to
3338 get VN_TOP on valueization. */
3339 if (to == VN_TOP)
3341 if (dump_file && (dump_flags & TDF_DETAILS))
3342 fprintf (dump_file, "Forcing value number to varying on "
3343 "receiving VN_TOP\n");
3344 to = from;
3347 gcc_assert (to != NULL_TREE
3348 && ((TREE_CODE (to) == SSA_NAME
3349 && (to == from || SSA_VAL (to) == to))
3350 || is_gimple_min_invariant (to)));
3352 if (from != to)
3354 if (currval == from)
3356 if (dump_file && (dump_flags & TDF_DETAILS))
3358 fprintf (dump_file, "Not changing value number of ");
3359 print_generic_expr (dump_file, from);
3360 fprintf (dump_file, " from VARYING to ");
3361 print_generic_expr (dump_file, to);
3362 fprintf (dump_file, "\n");
3364 return false;
3366 else if (currval != VN_TOP
3367 && ! is_gimple_min_invariant (currval)
3368 && is_gimple_min_invariant (to))
3370 if (dump_file && (dump_flags & TDF_DETAILS))
3372 fprintf (dump_file, "Forcing VARYING instead of changing "
3373 "value number of ");
3374 print_generic_expr (dump_file, from);
3375 fprintf (dump_file, " from ");
3376 print_generic_expr (dump_file, currval);
3377 fprintf (dump_file, " (non-constant) to ");
3378 print_generic_expr (dump_file, to);
3379 fprintf (dump_file, " (constant)\n");
3381 to = from;
3383 else if (TREE_CODE (to) == SSA_NAME
3384 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
3385 to = from;
3388 if (dump_file && (dump_flags & TDF_DETAILS))
3390 fprintf (dump_file, "Setting value number of ");
3391 print_generic_expr (dump_file, from);
3392 fprintf (dump_file, " to ");
3393 print_generic_expr (dump_file, to);
3396 if (currval != to
3397 && !operand_equal_p (currval, to, 0)
3398 /* Different undefined SSA names are not actually different. See
3399 PR82320 for a testcase were we'd otherwise not terminate iteration. */
3400 && !(TREE_CODE (currval) == SSA_NAME
3401 && TREE_CODE (to) == SSA_NAME
3402 && ssa_undefined_value_p (currval, false)
3403 && ssa_undefined_value_p (to, false))
3404 /* ??? For addresses involving volatile objects or types operand_equal_p
3405 does not reliably detect ADDR_EXPRs as equal. We know we are only
3406 getting invariant gimple addresses here, so can use
3407 get_addr_base_and_unit_offset to do this comparison. */
3408 && !(TREE_CODE (currval) == ADDR_EXPR
3409 && TREE_CODE (to) == ADDR_EXPR
3410 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
3411 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
3412 && known_eq (coff, toff)))
3414 if (dump_file && (dump_flags & TDF_DETAILS))
3415 fprintf (dump_file, " (changed)\n");
3417 /* If we equate two SSA names we have to make the side-band info
3418 of the leader conservative (and remember whatever original value
3419 was present). */
3420 if (TREE_CODE (to) == SSA_NAME)
3422 if (INTEGRAL_TYPE_P (TREE_TYPE (to))
3423 && SSA_NAME_RANGE_INFO (to))
3425 if (SSA_NAME_IS_DEFAULT_DEF (to)
3426 || dominated_by_p_w_unex
3427 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3428 gimple_bb (SSA_NAME_DEF_STMT (to))))
3429 /* Keep the info from the dominator. */
3431 else
3433 /* Save old info. */
3434 if (! VN_INFO (to)->info.range_info)
3436 VN_INFO (to)->info.range_info = SSA_NAME_RANGE_INFO (to);
3437 VN_INFO (to)->range_info_anti_range_p
3438 = SSA_NAME_ANTI_RANGE_P (to);
3440 /* Rather than allocating memory and unioning the info
3441 just clear it. */
3442 if (dump_file && (dump_flags & TDF_DETAILS))
3444 fprintf (dump_file, "clearing range info of ");
3445 print_generic_expr (dump_file, to);
3446 fprintf (dump_file, "\n");
3448 SSA_NAME_RANGE_INFO (to) = NULL;
3451 else if (POINTER_TYPE_P (TREE_TYPE (to))
3452 && SSA_NAME_PTR_INFO (to))
3454 if (SSA_NAME_IS_DEFAULT_DEF (to)
3455 || dominated_by_p_w_unex
3456 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3457 gimple_bb (SSA_NAME_DEF_STMT (to))))
3458 /* Keep the info from the dominator. */
3460 else if (! SSA_NAME_PTR_INFO (from)
3461 /* Handle the case of trivially equivalent info. */
3462 || memcmp (SSA_NAME_PTR_INFO (to),
3463 SSA_NAME_PTR_INFO (from),
3464 sizeof (ptr_info_def)) != 0)
3466 /* Save old info. */
3467 if (! VN_INFO (to)->info.ptr_info)
3468 VN_INFO (to)->info.ptr_info = SSA_NAME_PTR_INFO (to);
3469 /* Rather than allocating memory and unioning the info
3470 just clear it. */
3471 if (dump_file && (dump_flags & TDF_DETAILS))
3473 fprintf (dump_file, "clearing points-to info of ");
3474 print_generic_expr (dump_file, to);
3475 fprintf (dump_file, "\n");
3477 SSA_NAME_PTR_INFO (to) = NULL;
3482 VN_INFO (from)->valnum = to;
3483 return true;
3485 if (dump_file && (dump_flags & TDF_DETAILS))
3486 fprintf (dump_file, "\n");
3487 return false;
3490 /* Mark as processed all the definitions in the defining stmt of USE, or
3491 the USE itself. */
3493 static void
3494 mark_use_processed (tree use)
3496 ssa_op_iter iter;
3497 def_operand_p defp;
3498 gimple *stmt = SSA_NAME_DEF_STMT (use);
3500 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
3502 VN_INFO (use)->use_processed = true;
3503 return;
3506 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3508 tree def = DEF_FROM_PTR (defp);
3510 VN_INFO (def)->use_processed = true;
3514 /* Set all definitions in STMT to value number to themselves.
3515 Return true if a value number changed. */
3517 static bool
3518 defs_to_varying (gimple *stmt)
3520 bool changed = false;
3521 ssa_op_iter iter;
3522 def_operand_p defp;
3524 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3526 tree def = DEF_FROM_PTR (defp);
3527 changed |= set_ssa_val_to (def, def);
3529 return changed;
3532 /* Visit a copy between LHS and RHS, return true if the value number
3533 changed. */
3535 static bool
3536 visit_copy (tree lhs, tree rhs)
3538 /* Valueize. */
3539 rhs = SSA_VAL (rhs);
3541 return set_ssa_val_to (lhs, rhs);
3544 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
3545 is the same. */
3547 static tree
3548 valueized_wider_op (tree wide_type, tree op)
3550 if (TREE_CODE (op) == SSA_NAME)
3551 op = SSA_VAL (op);
3553 /* Either we have the op widened available. */
3554 tree ops[3] = {};
3555 ops[0] = op;
3556 tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
3557 wide_type, ops, NULL);
3558 if (tem)
3559 return tem;
3561 /* Or the op is truncated from some existing value. */
3562 if (TREE_CODE (op) == SSA_NAME)
3564 gimple *def = SSA_NAME_DEF_STMT (op);
3565 if (is_gimple_assign (def)
3566 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3568 tem = gimple_assign_rhs1 (def);
3569 if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)))
3571 if (TREE_CODE (tem) == SSA_NAME)
3572 tem = SSA_VAL (tem);
3573 return tem;
3578 /* For constants simply extend it. */
3579 if (TREE_CODE (op) == INTEGER_CST)
3580 return wide_int_to_tree (wide_type, wi::to_wide (op));
3582 return NULL_TREE;
3585 /* Visit a nary operator RHS, value number it, and return true if the
3586 value number of LHS has changed as a result. */
3588 static bool
3589 visit_nary_op (tree lhs, gassign *stmt)
3591 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
3592 if (result)
3593 return set_ssa_val_to (lhs, result);
3595 /* Do some special pattern matching for redundancies of operations
3596 in different types. */
3597 enum tree_code code = gimple_assign_rhs_code (stmt);
3598 tree type = TREE_TYPE (lhs);
3599 tree rhs1 = gimple_assign_rhs1 (stmt);
3600 switch (code)
3602 CASE_CONVERT:
3603 /* Match arithmetic done in a different type where we can easily
3604 substitute the result from some earlier sign-changed or widened
3605 operation. */
3606 if (INTEGRAL_TYPE_P (type)
3607 && TREE_CODE (rhs1) == SSA_NAME
3608 /* We only handle sign-changes or zero-extension -> & mask. */
3609 && ((TYPE_UNSIGNED (TREE_TYPE (rhs1))
3610 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs1)))
3611 || TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (rhs1))))
3613 gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
3614 if (def
3615 && (gimple_assign_rhs_code (def) == PLUS_EXPR
3616 || gimple_assign_rhs_code (def) == MINUS_EXPR
3617 || gimple_assign_rhs_code (def) == MULT_EXPR))
3619 tree ops[3] = {};
3620 /* Either we have the op widened available. */
3621 ops[0] = valueized_wider_op (type,
3622 gimple_assign_rhs1 (def));
3623 if (ops[0])
3624 ops[1] = valueized_wider_op (type,
3625 gimple_assign_rhs2 (def));
3626 if (ops[0] && ops[1])
3628 ops[0] = vn_nary_op_lookup_pieces
3629 (2, gimple_assign_rhs_code (def), type, ops, NULL);
3630 /* We have wider operation available. */
3631 if (ops[0])
3633 unsigned lhs_prec = TYPE_PRECISION (type);
3634 unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1));
3635 if (lhs_prec == rhs_prec)
3637 ops[1] = NULL_TREE;
3638 result = vn_nary_build_or_lookup (NOP_EXPR,
3639 type, ops);
3640 if (result)
3642 bool changed = set_ssa_val_to (lhs, result);
3643 vn_nary_op_insert_stmt (stmt, result);
3644 return changed;
3647 else
3649 ops[1] = wide_int_to_tree (type,
3650 wi::mask (rhs_prec, false,
3651 lhs_prec));
3652 result = vn_nary_build_or_lookup (BIT_AND_EXPR,
3653 TREE_TYPE (lhs),
3654 ops);
3655 if (result)
3657 bool changed = set_ssa_val_to (lhs, result);
3658 vn_nary_op_insert_stmt (stmt, result);
3659 return changed;
3666 default:;
3669 bool changed = set_ssa_val_to (lhs, lhs);
3670 vn_nary_op_insert_stmt (stmt, lhs);
3671 return changed;
3674 /* Visit a call STMT storing into LHS. Return true if the value number
3675 of the LHS has changed as a result. */
3677 static bool
3678 visit_reference_op_call (tree lhs, gcall *stmt)
3680 bool changed = false;
3681 struct vn_reference_s vr1;
3682 vn_reference_t vnresult = NULL;
3683 tree vdef = gimple_vdef (stmt);
3685 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
3686 if (lhs && TREE_CODE (lhs) != SSA_NAME)
3687 lhs = NULL_TREE;
3689 vn_reference_lookup_call (stmt, &vnresult, &vr1);
3690 if (vnresult)
3692 if (vnresult->result_vdef && vdef)
3693 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
3694 else if (vdef)
3695 /* If the call was discovered to be pure or const reflect
3696 that as far as possible. */
3697 changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt)));
3699 if (!vnresult->result && lhs)
3700 vnresult->result = lhs;
3702 if (vnresult->result && lhs)
3703 changed |= set_ssa_val_to (lhs, vnresult->result);
3705 else
3707 vn_reference_t vr2;
3708 vn_reference_s **slot;
3709 tree vdef_val = vdef;
3710 if (vdef)
3712 /* If we value numbered an indirect functions function to
3713 one not clobbering memory value number its VDEF to its
3714 VUSE. */
3715 tree fn = gimple_call_fn (stmt);
3716 if (fn && TREE_CODE (fn) == SSA_NAME)
3718 fn = SSA_VAL (fn);
3719 if (TREE_CODE (fn) == ADDR_EXPR
3720 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
3721 && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
3722 & (ECF_CONST | ECF_PURE)))
3723 vdef_val = vuse_ssa_val (gimple_vuse (stmt));
3725 changed |= set_ssa_val_to (vdef, vdef_val);
3727 if (lhs)
3728 changed |= set_ssa_val_to (lhs, lhs);
3729 vr2 = current_info->references_pool->allocate ();
3730 vr2->vuse = vr1.vuse;
3731 /* As we are not walking the virtual operand chain we know the
3732 shared_lookup_references are still original so we can re-use
3733 them here. */
3734 vr2->operands = vr1.operands.copy ();
3735 vr2->type = vr1.type;
3736 vr2->set = vr1.set;
3737 vr2->hashcode = vr1.hashcode;
3738 vr2->result = lhs;
3739 vr2->result_vdef = vdef_val;
3740 slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3741 INSERT);
3742 gcc_assert (!*slot);
3743 *slot = vr2;
3746 return changed;
3749 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3750 and return true if the value number of the LHS has changed as a result. */
3752 static bool
3753 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
3755 bool changed = false;
3756 tree last_vuse;
3757 tree result;
3759 last_vuse = gimple_vuse (stmt);
3760 last_vuse_ptr = &last_vuse;
3761 result = vn_reference_lookup (op, gimple_vuse (stmt),
3762 default_vn_walk_kind, NULL, true);
3763 last_vuse_ptr = NULL;
3765 /* We handle type-punning through unions by value-numbering based
3766 on offset and size of the access. Be prepared to handle a
3767 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3768 if (result
3769 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3771 /* We will be setting the value number of lhs to the value number
3772 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3773 So first simplify and lookup this expression to see if it
3774 is already available. */
3775 code_helper rcode = VIEW_CONVERT_EXPR;
3776 tree ops[3] = { result };
3777 result = vn_nary_build_or_lookup (rcode, TREE_TYPE (op), ops);
3780 if (result)
3781 changed = set_ssa_val_to (lhs, result);
3782 else
3784 changed = set_ssa_val_to (lhs, lhs);
3785 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
3788 return changed;
3792 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3793 and return true if the value number of the LHS has changed as a result. */
3795 static bool
3796 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
3798 bool changed = false;
3799 vn_reference_t vnresult = NULL;
3800 tree assign;
3801 bool resultsame = false;
3802 tree vuse = gimple_vuse (stmt);
3803 tree vdef = gimple_vdef (stmt);
3805 if (TREE_CODE (op) == SSA_NAME)
3806 op = SSA_VAL (op);
3808 /* First we want to lookup using the *vuses* from the store and see
3809 if there the last store to this location with the same address
3810 had the same value.
3812 The vuses represent the memory state before the store. If the
3813 memory state, address, and value of the store is the same as the
3814 last store to this location, then this store will produce the
3815 same memory state as that store.
3817 In this case the vdef versions for this store are value numbered to those
3818 vuse versions, since they represent the same memory state after
3819 this store.
3821 Otherwise, the vdefs for the store are used when inserting into
3822 the table, since the store generates a new memory state. */
3824 vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
3825 if (vnresult
3826 && vnresult->result)
3828 tree result = vnresult->result;
3829 if (TREE_CODE (result) == SSA_NAME)
3830 result = SSA_VAL (result);
3831 resultsame = expressions_equal_p (result, op);
3832 if (resultsame)
3834 /* If the TBAA state isn't compatible for downstream reads
3835 we cannot value-number the VDEFs the same. */
3836 alias_set_type set = get_alias_set (lhs);
3837 if (vnresult->set != set
3838 && ! alias_set_subset_of (set, vnresult->set))
3839 resultsame = false;
3843 if (!resultsame)
3845 /* Only perform the following when being called from PRE
3846 which embeds tail merging. */
3847 if (default_vn_walk_kind == VN_WALK)
3849 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3850 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
3851 if (vnresult)
3853 VN_INFO (vdef)->use_processed = true;
3854 return set_ssa_val_to (vdef, vnresult->result_vdef);
3858 if (dump_file && (dump_flags & TDF_DETAILS))
3860 fprintf (dump_file, "No store match\n");
3861 fprintf (dump_file, "Value numbering store ");
3862 print_generic_expr (dump_file, lhs);
3863 fprintf (dump_file, " to ");
3864 print_generic_expr (dump_file, op);
3865 fprintf (dump_file, "\n");
3867 /* Have to set value numbers before insert, since insert is
3868 going to valueize the references in-place. */
3869 if (vdef)
3870 changed |= set_ssa_val_to (vdef, vdef);
3872 /* Do not insert structure copies into the tables. */
3873 if (is_gimple_min_invariant (op)
3874 || is_gimple_reg (op))
3875 vn_reference_insert (lhs, op, vdef, NULL);
3877 /* Only perform the following when being called from PRE
3878 which embeds tail merging. */
3879 if (default_vn_walk_kind == VN_WALK)
3881 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3882 vn_reference_insert (assign, lhs, vuse, vdef);
3885 else
3887 /* We had a match, so value number the vdef to have the value
3888 number of the vuse it came from. */
3890 if (dump_file && (dump_flags & TDF_DETAILS))
3891 fprintf (dump_file, "Store matched earlier value, "
3892 "value numbering store vdefs to matching vuses.\n");
3894 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3897 return changed;
3900 /* Visit and value number PHI, return true if the value number
3901 changed. */
3903 static bool
3904 visit_phi (gimple *phi)
3906 tree result, sameval = VN_TOP, seen_undef = NULL_TREE;
3907 unsigned n_executable = 0;
3908 bool allsame = true;
3909 edge_iterator ei;
3910 edge e;
3912 /* TODO: We could check for this in init_sccvn, and replace this
3913 with a gcc_assert. */
3914 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3915 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3917 /* See if all non-TOP arguments have the same value. TOP is
3918 equivalent to everything, so we can ignore it. */
3919 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3920 if (e->flags & EDGE_EXECUTABLE)
3922 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3924 ++n_executable;
3925 if (TREE_CODE (def) == SSA_NAME)
3926 def = SSA_VAL (def);
3927 if (def == VN_TOP)
3929 /* Ignore undefined defs for sameval but record one. */
3930 else if (TREE_CODE (def) == SSA_NAME
3931 && ssa_undefined_value_p (def, false))
3932 seen_undef = def;
3933 else if (sameval == VN_TOP)
3934 sameval = def;
3935 else if (!expressions_equal_p (def, sameval))
3937 allsame = false;
3938 break;
3943 /* If none of the edges was executable keep the value-number at VN_TOP,
3944 if only a single edge is exectuable use its value. */
3945 if (n_executable <= 1)
3946 result = seen_undef ? seen_undef : sameval;
3947 /* If we saw only undefined values and VN_TOP use one of the
3948 undefined values. */
3949 else if (sameval == VN_TOP)
3950 result = seen_undef ? seen_undef : sameval;
3951 /* First see if it is equivalent to a phi node in this block. We prefer
3952 this as it allows IV elimination - see PRs 66502 and 67167. */
3953 else if ((result = vn_phi_lookup (phi)))
3955 /* If all values are the same use that, unless we've seen undefined
3956 values as well and the value isn't constant.
3957 CCP/copyprop have the same restriction to not remove uninit warnings. */
3958 else if (allsame
3959 && (! seen_undef || is_gimple_min_invariant (sameval)))
3960 result = sameval;
3961 else
3963 result = PHI_RESULT (phi);
3964 /* Only insert PHIs that are varying, for constant value numbers
3965 we mess up equivalences otherwise as we are only comparing
3966 the immediate controlling predicates. */
3967 vn_phi_insert (phi, result);
3970 return set_ssa_val_to (PHI_RESULT (phi), result);
3973 /* Try to simplify RHS using equivalences and constant folding. */
3975 static tree
3976 try_to_simplify (gassign *stmt)
3978 enum tree_code code = gimple_assign_rhs_code (stmt);
3979 tree tem;
3981 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3982 in this case, there is no point in doing extra work. */
3983 if (code == SSA_NAME)
3984 return NULL_TREE;
3986 /* First try constant folding based on our current lattice. */
3987 mprts_hook = vn_lookup_simplify_result;
3988 mprts_hook_cnt = 9;
3989 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
3990 mprts_hook = NULL;
3991 if (tem
3992 && (TREE_CODE (tem) == SSA_NAME
3993 || is_gimple_min_invariant (tem)))
3994 return tem;
3996 return NULL_TREE;
3999 /* Visit and value number USE, return true if the value number
4000 changed. */
4002 static bool
4003 visit_use (tree use)
4005 bool changed = false;
4006 gimple *stmt = SSA_NAME_DEF_STMT (use);
4008 mark_use_processed (use);
4010 gcc_assert (!SSA_NAME_IN_FREE_LIST (use)
4011 && !SSA_NAME_IS_DEFAULT_DEF (use));
4013 if (dump_file && (dump_flags & TDF_DETAILS))
4015 fprintf (dump_file, "Value numbering ");
4016 print_generic_expr (dump_file, use);
4017 fprintf (dump_file, " stmt = ");
4018 print_gimple_stmt (dump_file, stmt, 0);
4021 if (gimple_code (stmt) == GIMPLE_PHI)
4022 changed = visit_phi (stmt);
4023 else if (gimple_has_volatile_ops (stmt))
4024 changed = defs_to_varying (stmt);
4025 else if (gassign *ass = dyn_cast <gassign *> (stmt))
4027 enum tree_code code = gimple_assign_rhs_code (ass);
4028 tree lhs = gimple_assign_lhs (ass);
4029 tree rhs1 = gimple_assign_rhs1 (ass);
4030 tree simplified;
4032 /* Shortcut for copies. Simplifying copies is pointless,
4033 since we copy the expression and value they represent. */
4034 if (code == SSA_NAME
4035 && TREE_CODE (lhs) == SSA_NAME)
4037 changed = visit_copy (lhs, rhs1);
4038 goto done;
4040 simplified = try_to_simplify (ass);
4041 if (simplified)
4043 if (dump_file && (dump_flags & TDF_DETAILS))
4045 fprintf (dump_file, "RHS ");
4046 print_gimple_expr (dump_file, ass, 0);
4047 fprintf (dump_file, " simplified to ");
4048 print_generic_expr (dump_file, simplified);
4049 fprintf (dump_file, "\n");
4052 /* Setting value numbers to constants will occasionally
4053 screw up phi congruence because constants are not
4054 uniquely associated with a single ssa name that can be
4055 looked up. */
4056 if (simplified
4057 && is_gimple_min_invariant (simplified)
4058 && TREE_CODE (lhs) == SSA_NAME)
4060 changed = set_ssa_val_to (lhs, simplified);
4061 goto done;
4063 else if (simplified
4064 && TREE_CODE (simplified) == SSA_NAME
4065 && TREE_CODE (lhs) == SSA_NAME)
4067 changed = visit_copy (lhs, simplified);
4068 goto done;
4071 if ((TREE_CODE (lhs) == SSA_NAME
4072 /* We can substitute SSA_NAMEs that are live over
4073 abnormal edges with their constant value. */
4074 && !(gimple_assign_copy_p (ass)
4075 && is_gimple_min_invariant (rhs1))
4076 && !(simplified
4077 && is_gimple_min_invariant (simplified))
4078 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4079 /* Stores or copies from SSA_NAMEs that are live over
4080 abnormal edges are a problem. */
4081 || (code == SSA_NAME
4082 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
4083 changed = defs_to_varying (ass);
4084 else if (REFERENCE_CLASS_P (lhs)
4085 || DECL_P (lhs))
4086 changed = visit_reference_op_store (lhs, rhs1, ass);
4087 else if (TREE_CODE (lhs) == SSA_NAME)
4089 if ((gimple_assign_copy_p (ass)
4090 && is_gimple_min_invariant (rhs1))
4091 || (simplified
4092 && is_gimple_min_invariant (simplified)))
4094 if (simplified)
4095 changed = set_ssa_val_to (lhs, simplified);
4096 else
4097 changed = set_ssa_val_to (lhs, rhs1);
4099 else
4101 /* Visit the original statement. */
4102 switch (vn_get_stmt_kind (ass))
4104 case VN_NARY:
4105 changed = visit_nary_op (lhs, ass);
4106 break;
4107 case VN_REFERENCE:
4108 changed = visit_reference_op_load (lhs, rhs1, ass);
4109 break;
4110 default:
4111 changed = defs_to_varying (ass);
4112 break;
4116 else
4117 changed = defs_to_varying (ass);
4119 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
4121 tree lhs = gimple_call_lhs (call_stmt);
4122 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4124 /* Try constant folding based on our current lattice. */
4125 tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
4126 vn_valueize);
4127 if (simplified)
4129 if (dump_file && (dump_flags & TDF_DETAILS))
4131 fprintf (dump_file, "call ");
4132 print_gimple_expr (dump_file, call_stmt, 0);
4133 fprintf (dump_file, " simplified to ");
4134 print_generic_expr (dump_file, simplified);
4135 fprintf (dump_file, "\n");
4138 /* Setting value numbers to constants will occasionally
4139 screw up phi congruence because constants are not
4140 uniquely associated with a single ssa name that can be
4141 looked up. */
4142 if (simplified
4143 && is_gimple_min_invariant (simplified))
4145 changed = set_ssa_val_to (lhs, simplified);
4146 if (gimple_vdef (call_stmt))
4147 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4148 SSA_VAL (gimple_vuse (call_stmt)));
4149 goto done;
4151 else if (simplified
4152 && TREE_CODE (simplified) == SSA_NAME)
4154 changed = visit_copy (lhs, simplified);
4155 if (gimple_vdef (call_stmt))
4156 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4157 SSA_VAL (gimple_vuse (call_stmt)));
4158 goto done;
4160 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4162 changed = defs_to_varying (call_stmt);
4163 goto done;
4167 /* Pick up flags from a devirtualization target. */
4168 tree fn = gimple_call_fn (stmt);
4169 int extra_fnflags = 0;
4170 if (fn && TREE_CODE (fn) == SSA_NAME)
4172 fn = SSA_VAL (fn);
4173 if (TREE_CODE (fn) == ADDR_EXPR
4174 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
4175 extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0));
4177 if (!gimple_call_internal_p (call_stmt)
4178 && (/* Calls to the same function with the same vuse
4179 and the same operands do not necessarily return the same
4180 value, unless they're pure or const. */
4181 ((gimple_call_flags (call_stmt) | extra_fnflags)
4182 & (ECF_PURE | ECF_CONST))
4183 /* If calls have a vdef, subsequent calls won't have
4184 the same incoming vuse. So, if 2 calls with vdef have the
4185 same vuse, we know they're not subsequent.
4186 We can value number 2 calls to the same function with the
4187 same vuse and the same operands which are not subsequent
4188 the same, because there is no code in the program that can
4189 compare the 2 values... */
4190 || (gimple_vdef (call_stmt)
4191 /* ... unless the call returns a pointer which does
4192 not alias with anything else. In which case the
4193 information that the values are distinct are encoded
4194 in the IL. */
4195 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
4196 /* Only perform the following when being called from PRE
4197 which embeds tail merging. */
4198 && default_vn_walk_kind == VN_WALK)))
4199 changed = visit_reference_op_call (lhs, call_stmt);
4200 else
4201 changed = defs_to_varying (call_stmt);
4203 else
4204 changed = defs_to_varying (stmt);
4205 done:
4206 return changed;
4209 /* Compare two operands by reverse postorder index */
4211 static int
4212 compare_ops (const void *pa, const void *pb)
4214 const tree opa = *((const tree *)pa);
4215 const tree opb = *((const tree *)pb);
4216 gimple *opstmta = SSA_NAME_DEF_STMT (opa);
4217 gimple *opstmtb = SSA_NAME_DEF_STMT (opb);
4218 basic_block bba;
4219 basic_block bbb;
4221 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
4222 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4223 else if (gimple_nop_p (opstmta))
4224 return -1;
4225 else if (gimple_nop_p (opstmtb))
4226 return 1;
4228 bba = gimple_bb (opstmta);
4229 bbb = gimple_bb (opstmtb);
4231 if (!bba && !bbb)
4232 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4233 else if (!bba)
4234 return -1;
4235 else if (!bbb)
4236 return 1;
4238 if (bba == bbb)
4240 if (gimple_code (opstmta) == GIMPLE_PHI
4241 && gimple_code (opstmtb) == GIMPLE_PHI)
4242 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4243 else if (gimple_code (opstmta) == GIMPLE_PHI)
4244 return -1;
4245 else if (gimple_code (opstmtb) == GIMPLE_PHI)
4246 return 1;
4247 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
4248 return gimple_uid (opstmta) - gimple_uid (opstmtb);
4249 else
4250 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4252 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
4255 /* Sort an array containing members of a strongly connected component
4256 SCC so that the members are ordered by RPO number.
4257 This means that when the sort is complete, iterating through the
4258 array will give you the members in RPO order. */
4260 static void
4261 sort_scc (vec<tree> scc)
4263 scc.qsort (compare_ops);
4266 /* Insert the no longer used nary ONARY to the hash INFO. */
4268 static void
4269 copy_nary (vn_nary_op_t onary, vn_tables_t info)
4271 size_t size = sizeof_vn_nary_op (onary->length);
4272 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
4273 &info->nary_obstack);
4274 memcpy (nary, onary, size);
4275 vn_nary_op_insert_into (nary, info->nary, false);
4278 /* Insert the no longer used phi OPHI to the hash INFO. */
4280 static void
4281 copy_phi (vn_phi_t ophi, vn_tables_t info)
4283 vn_phi_t phi = info->phis_pool->allocate ();
4284 vn_phi_s **slot;
4285 memcpy (phi, ophi, sizeof (*phi));
4286 ophi->phiargs.create (0);
4287 slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
4288 gcc_assert (!*slot);
4289 *slot = phi;
4292 /* Insert the no longer used reference OREF to the hash INFO. */
4294 static void
4295 copy_reference (vn_reference_t oref, vn_tables_t info)
4297 vn_reference_t ref;
4298 vn_reference_s **slot;
4299 ref = info->references_pool->allocate ();
4300 memcpy (ref, oref, sizeof (*ref));
4301 oref->operands.create (0);
4302 slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
4303 if (*slot)
4304 free_reference (*slot);
4305 *slot = ref;
4308 /* Process a strongly connected component in the SSA graph. */
4310 static void
4311 process_scc (vec<tree> scc)
4313 tree var;
4314 unsigned int i;
4315 unsigned int iterations = 0;
4316 bool changed = true;
4317 vn_nary_op_iterator_type hin;
4318 vn_phi_iterator_type hip;
4319 vn_reference_iterator_type hir;
4320 vn_nary_op_t nary;
4321 vn_phi_t phi;
4322 vn_reference_t ref;
4324 /* If the SCC has a single member, just visit it. */
4325 if (scc.length () == 1)
4327 tree use = scc[0];
4328 if (VN_INFO (use)->use_processed)
4329 return;
4330 /* We need to make sure it doesn't form a cycle itself, which can
4331 happen for self-referential PHI nodes. In that case we would
4332 end up inserting an expression with VN_TOP operands into the
4333 valid table which makes us derive bogus equivalences later.
4334 The cheapest way to check this is to assume it for all PHI nodes. */
4335 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
4336 /* Fallthru to iteration. */ ;
4337 else
4339 visit_use (use);
4340 return;
4344 if (dump_file && (dump_flags & TDF_DETAILS))
4345 print_scc (dump_file, scc);
4347 /* Iterate over the SCC with the optimistic table until it stops
4348 changing. */
4349 current_info = optimistic_info;
4350 while (changed)
4352 changed = false;
4353 iterations++;
4354 if (dump_file && (dump_flags & TDF_DETAILS))
4355 fprintf (dump_file, "Starting iteration %d\n", iterations);
4356 /* As we are value-numbering optimistically we have to
4357 clear the expression tables and the simplified expressions
4358 in each iteration until we converge. */
4359 optimistic_info->nary->empty ();
4360 optimistic_info->phis->empty ();
4361 optimistic_info->references->empty ();
4362 obstack_free (&optimistic_info->nary_obstack, NULL);
4363 gcc_obstack_init (&optimistic_info->nary_obstack);
4364 optimistic_info->phis_pool->release ();
4365 optimistic_info->references_pool->release ();
4366 FOR_EACH_VEC_ELT (scc, i, var)
4367 gcc_assert (!VN_INFO (var)->needs_insertion
4368 && VN_INFO (var)->expr == NULL);
4369 FOR_EACH_VEC_ELT (scc, i, var)
4370 changed |= visit_use (var);
4373 if (dump_file && (dump_flags & TDF_DETAILS))
4374 fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
4375 statistics_histogram_event (cfun, "SCC iterations", iterations);
4377 /* Finally, copy the contents of the no longer used optimistic
4378 table to the valid table. */
4379 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
4380 copy_nary (nary, valid_info);
4381 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
4382 copy_phi (phi, valid_info);
4383 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
4384 ref, vn_reference_t, hir)
4385 copy_reference (ref, valid_info);
4387 current_info = valid_info;
4391 /* Pop the components of the found SCC for NAME off the SCC stack
4392 and process them. Returns true if all went well, false if
4393 we run into resource limits. */
4395 static void
4396 extract_and_process_scc_for_name (tree name)
4398 auto_vec<tree> scc;
4399 tree x;
4401 /* Found an SCC, pop the components off the SCC stack and
4402 process them. */
4405 x = sccstack.pop ();
4407 VN_INFO (x)->on_sccstack = false;
4408 scc.safe_push (x);
4409 } while (x != name);
4411 /* Drop all defs in the SCC to varying in case a SCC turns out to be
4412 incredibly large.
4413 ??? Just switch to a non-optimistic mode that avoids any iteration. */
4414 if (scc.length () > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
4416 if (dump_file)
4418 print_scc (dump_file, scc);
4419 fprintf (dump_file, "WARNING: Giving up value-numbering SCC due to "
4420 "size %u exceeding %u\n", scc.length (),
4421 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
4423 tree var;
4424 unsigned i;
4425 FOR_EACH_VEC_ELT (scc, i, var)
4427 gimple *def = SSA_NAME_DEF_STMT (var);
4428 mark_use_processed (var);
4429 if (SSA_NAME_IS_DEFAULT_DEF (var)
4430 || gimple_code (def) == GIMPLE_PHI)
4431 set_ssa_val_to (var, var);
4432 else
4433 defs_to_varying (def);
4435 return;
4438 if (scc.length () > 1)
4439 sort_scc (scc);
4441 process_scc (scc);
4444 /* Depth first search on NAME to discover and process SCC's in the SSA
4445 graph.
4446 Execution of this algorithm relies on the fact that the SCC's are
4447 popped off the stack in topological order.
4448 Returns true if successful, false if we stopped processing SCC's due
4449 to resource constraints. */
4451 static void
4452 DFS (tree name)
4454 auto_vec<ssa_op_iter> itervec;
4455 auto_vec<tree> namevec;
4456 use_operand_p usep = NULL;
4457 gimple *defstmt;
4458 tree use;
4459 ssa_op_iter iter;
4461 start_over:
4462 /* SCC info */
4463 VN_INFO (name)->dfsnum = next_dfs_num++;
4464 VN_INFO (name)->visited = true;
4465 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
4467 sccstack.safe_push (name);
4468 VN_INFO (name)->on_sccstack = true;
4469 defstmt = SSA_NAME_DEF_STMT (name);
4471 /* Recursively DFS on our operands, looking for SCC's. */
4472 if (!gimple_nop_p (defstmt))
4474 /* Push a new iterator. */
4475 if (gphi *phi = dyn_cast <gphi *> (defstmt))
4476 usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
4477 else
4478 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
4480 else
4481 clear_and_done_ssa_iter (&iter);
4483 while (1)
4485 /* If we are done processing uses of a name, go up the stack
4486 of iterators and process SCCs as we found them. */
4487 if (op_iter_done (&iter))
4489 /* See if we found an SCC. */
4490 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
4491 extract_and_process_scc_for_name (name);
4493 /* Check if we are done. */
4494 if (namevec.is_empty ())
4495 return;
4497 /* Restore the last use walker and continue walking there. */
4498 use = name;
4499 name = namevec.pop ();
4500 memcpy (&iter, &itervec.last (),
4501 sizeof (ssa_op_iter));
4502 itervec.pop ();
4503 goto continue_walking;
4506 use = USE_FROM_PTR (usep);
4508 /* Since we handle phi nodes, we will sometimes get
4509 invariants in the use expression. */
4510 if (TREE_CODE (use) == SSA_NAME)
4512 if (! (VN_INFO (use)->visited))
4514 /* Recurse by pushing the current use walking state on
4515 the stack and starting over. */
4516 itervec.safe_push (iter);
4517 namevec.safe_push (name);
4518 name = use;
4519 goto start_over;
4521 continue_walking:
4522 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
4523 VN_INFO (use)->low);
4525 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
4526 && VN_INFO (use)->on_sccstack)
4528 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
4529 VN_INFO (name)->low);
4533 usep = op_iter_next_use (&iter);
4537 /* Allocate a value number table. */
4539 static void
4540 allocate_vn_table (vn_tables_t table)
4542 table->phis = new vn_phi_table_type (23);
4543 table->nary = new vn_nary_op_table_type (23);
4544 table->references = new vn_reference_table_type (23);
4546 gcc_obstack_init (&table->nary_obstack);
4547 table->phis_pool = new object_allocator<vn_phi_s> ("VN phis");
4548 table->references_pool = new object_allocator<vn_reference_s>
4549 ("VN references");
4552 /* Free a value number table. */
4554 static void
4555 free_vn_table (vn_tables_t table)
4557 delete table->phis;
4558 table->phis = NULL;
4559 delete table->nary;
4560 table->nary = NULL;
4561 delete table->references;
4562 table->references = NULL;
4563 obstack_free (&table->nary_obstack, NULL);
4564 delete table->phis_pool;
4565 delete table->references_pool;
4568 static void
4569 init_scc_vn (void)
4571 int j;
4572 int *rpo_numbers_temp;
4574 calculate_dominance_info (CDI_DOMINATORS);
4575 mark_dfs_back_edges ();
4577 sccstack.create (0);
4578 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
4580 constant_value_ids = BITMAP_ALLOC (NULL);
4582 next_dfs_num = 1;
4583 next_value_id = 1;
4585 vn_ssa_aux_table.create (num_ssa_names + 1);
4586 /* VEC_alloc doesn't actually grow it to the right size, it just
4587 preallocates the space to do so. */
4588 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
4589 gcc_obstack_init (&vn_ssa_aux_obstack);
4591 shared_lookup_phiargs.create (0);
4592 shared_lookup_references.create (0);
4593 rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
4594 rpo_numbers_temp =
4595 XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
4596 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4598 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4599 the i'th block in RPO order is bb. We want to map bb's to RPO
4600 numbers, so we need to rearrange this array. */
4601 for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
4602 rpo_numbers[rpo_numbers_temp[j]] = j;
4604 XDELETE (rpo_numbers_temp);
4606 VN_TOP = build_decl (UNKNOWN_LOCATION, VAR_DECL,
4607 get_identifier ("VN_TOP"), error_mark_node);
4609 renumber_gimple_stmt_uids ();
4611 /* Create the valid and optimistic value numbering tables. */
4612 valid_info = XCNEW (struct vn_tables_s);
4613 allocate_vn_table (valid_info);
4614 optimistic_info = XCNEW (struct vn_tables_s);
4615 allocate_vn_table (optimistic_info);
4616 current_info = valid_info;
4618 /* Create the VN_INFO structures, and initialize value numbers to
4619 TOP or VARYING for parameters. */
4620 size_t i;
4621 tree name;
4623 FOR_EACH_SSA_NAME (i, name, cfun)
4625 VN_INFO_GET (name)->valnum = VN_TOP;
4626 VN_INFO (name)->needs_insertion = false;
4627 VN_INFO (name)->expr = NULL;
4628 VN_INFO (name)->value_id = 0;
4630 if (!SSA_NAME_IS_DEFAULT_DEF (name))
4631 continue;
4633 switch (TREE_CODE (SSA_NAME_VAR (name)))
4635 case VAR_DECL:
4636 /* All undefined vars are VARYING. */
4637 VN_INFO (name)->valnum = name;
4638 VN_INFO (name)->visited = true;
4639 break;
4641 case PARM_DECL:
4642 /* Parameters are VARYING but we can record a condition
4643 if we know it is a non-NULL pointer. */
4644 VN_INFO (name)->visited = true;
4645 VN_INFO (name)->valnum = name;
4646 if (POINTER_TYPE_P (TREE_TYPE (name))
4647 && nonnull_arg_p (SSA_NAME_VAR (name)))
4649 tree ops[2];
4650 ops[0] = name;
4651 ops[1] = build_int_cst (TREE_TYPE (name), 0);
4652 vn_nary_op_insert_pieces (2, NE_EXPR, boolean_type_node, ops,
4653 boolean_true_node, 0);
4654 if (dump_file && (dump_flags & TDF_DETAILS))
4656 fprintf (dump_file, "Recording ");
4657 print_generic_expr (dump_file, name, TDF_SLIM);
4658 fprintf (dump_file, " != 0\n");
4661 break;
4663 case RESULT_DECL:
4664 /* If the result is passed by invisible reference the default
4665 def is initialized, otherwise it's uninitialized. Still
4666 undefined is varying. */
4667 VN_INFO (name)->visited = true;
4668 VN_INFO (name)->valnum = name;
4669 break;
4671 default:
4672 gcc_unreachable ();
4677 /* Restore SSA info that has been reset on value leaders. */
4679 void
4680 scc_vn_restore_ssa_info (void)
4682 unsigned i;
4683 tree name;
4685 FOR_EACH_SSA_NAME (i, name, cfun)
4687 if (has_VN_INFO (name))
4689 if (VN_INFO (name)->needs_insertion)
4691 else if (POINTER_TYPE_P (TREE_TYPE (name))
4692 && VN_INFO (name)->info.ptr_info)
4693 SSA_NAME_PTR_INFO (name) = VN_INFO (name)->info.ptr_info;
4694 else if (INTEGRAL_TYPE_P (TREE_TYPE (name))
4695 && VN_INFO (name)->info.range_info)
4697 SSA_NAME_RANGE_INFO (name) = VN_INFO (name)->info.range_info;
4698 SSA_NAME_ANTI_RANGE_P (name)
4699 = VN_INFO (name)->range_info_anti_range_p;
4705 void
4706 free_scc_vn (void)
4708 size_t i;
4709 tree name;
4711 delete constant_to_value_id;
4712 constant_to_value_id = NULL;
4713 BITMAP_FREE (constant_value_ids);
4714 shared_lookup_phiargs.release ();
4715 shared_lookup_references.release ();
4716 XDELETEVEC (rpo_numbers);
4718 FOR_EACH_SSA_NAME (i, name, cfun)
4720 if (has_VN_INFO (name)
4721 && VN_INFO (name)->needs_insertion)
4722 release_ssa_name (name);
4724 obstack_free (&vn_ssa_aux_obstack, NULL);
4725 vn_ssa_aux_table.release ();
4727 sccstack.release ();
4728 free_vn_table (valid_info);
4729 XDELETE (valid_info);
4730 free_vn_table (optimistic_info);
4731 XDELETE (optimistic_info);
4733 BITMAP_FREE (const_parms);
4736 /* Set *ID according to RESULT. */
4738 static void
4739 set_value_id_for_result (tree result, unsigned int *id)
4741 if (result && TREE_CODE (result) == SSA_NAME)
4742 *id = VN_INFO (result)->value_id;
4743 else if (result && is_gimple_min_invariant (result))
4744 *id = get_or_alloc_constant_value_id (result);
4745 else
4746 *id = get_next_value_id ();
4749 /* Set the value ids in the valid hash tables. */
4751 static void
4752 set_hashtable_value_ids (void)
4754 vn_nary_op_iterator_type hin;
4755 vn_phi_iterator_type hip;
4756 vn_reference_iterator_type hir;
4757 vn_nary_op_t vno;
4758 vn_reference_t vr;
4759 vn_phi_t vp;
4761 /* Now set the value ids of the things we had put in the hash
4762 table. */
4764 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4765 set_value_id_for_result (vno->result, &vno->value_id);
4767 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4768 set_value_id_for_result (vp->result, &vp->value_id);
4770 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4771 hir)
4772 set_value_id_for_result (vr->result, &vr->value_id);
4775 class sccvn_dom_walker : public dom_walker
4777 public:
4778 sccvn_dom_walker ()
4779 : dom_walker (CDI_DOMINATORS, true), cond_stack (0) {}
4781 virtual edge before_dom_children (basic_block);
4782 virtual void after_dom_children (basic_block);
4784 void record_cond (basic_block,
4785 enum tree_code code, tree lhs, tree rhs, bool value);
4786 void record_conds (basic_block,
4787 enum tree_code code, tree lhs, tree rhs, bool value);
4789 auto_vec<std::pair <basic_block, std::pair <vn_nary_op_t, vn_nary_op_t> > >
4790 cond_stack;
4793 /* Record a temporary condition for the BB and its dominated blocks. */
4795 void
4796 sccvn_dom_walker::record_cond (basic_block bb,
4797 enum tree_code code, tree lhs, tree rhs,
4798 bool value)
4800 tree ops[2] = { lhs, rhs };
4801 vn_nary_op_t old = NULL;
4802 if (vn_nary_op_lookup_pieces (2, code, boolean_type_node, ops, &old))
4803 current_info->nary->remove_elt_with_hash (old, old->hashcode);
4804 vn_nary_op_t cond
4805 = vn_nary_op_insert_pieces (2, code, boolean_type_node, ops,
4806 value
4807 ? boolean_true_node
4808 : boolean_false_node, 0);
4809 if (dump_file && (dump_flags & TDF_DETAILS))
4811 fprintf (dump_file, "Recording temporarily ");
4812 print_generic_expr (dump_file, ops[0], TDF_SLIM);
4813 fprintf (dump_file, " %s ", get_tree_code_name (code));
4814 print_generic_expr (dump_file, ops[1], TDF_SLIM);
4815 fprintf (dump_file, " == %s%s\n",
4816 value ? "true" : "false",
4817 old ? " (old entry saved)" : "");
4819 cond_stack.safe_push (std::make_pair (bb, std::make_pair (cond, old)));
4822 /* Record temporary conditions for the BB and its dominated blocks
4823 according to LHS CODE RHS == VALUE and its dominated conditions. */
4825 void
4826 sccvn_dom_walker::record_conds (basic_block bb,
4827 enum tree_code code, tree lhs, tree rhs,
4828 bool value)
4830 /* Record the original condition. */
4831 record_cond (bb, code, lhs, rhs, value);
4833 if (!value)
4834 return;
4836 /* Record dominated conditions if the condition is true. Note that
4837 the inversion is already recorded. */
4838 switch (code)
4840 case LT_EXPR:
4841 case GT_EXPR:
4842 record_cond (bb, code == LT_EXPR ? LE_EXPR : GE_EXPR, lhs, rhs, true);
4843 record_cond (bb, NE_EXPR, lhs, rhs, true);
4844 record_cond (bb, EQ_EXPR, lhs, rhs, false);
4845 break;
4847 case EQ_EXPR:
4848 record_cond (bb, LE_EXPR, lhs, rhs, true);
4849 record_cond (bb, GE_EXPR, lhs, rhs, true);
4850 record_cond (bb, LT_EXPR, lhs, rhs, false);
4851 record_cond (bb, GT_EXPR, lhs, rhs, false);
4852 break;
4854 default:
4855 break;
4859 /* Restore expressions and values derived from conditionals. */
4861 void
4862 sccvn_dom_walker::after_dom_children (basic_block bb)
4864 while (!cond_stack.is_empty ()
4865 && cond_stack.last ().first == bb)
4867 vn_nary_op_t cond = cond_stack.last ().second.first;
4868 vn_nary_op_t old = cond_stack.last ().second.second;
4869 current_info->nary->remove_elt_with_hash (cond, cond->hashcode);
4870 if (old)
4871 vn_nary_op_insert_into (old, current_info->nary, false);
4872 cond_stack.pop ();
4876 /* Value number all statements in BB. */
4878 edge
4879 sccvn_dom_walker::before_dom_children (basic_block bb)
4881 if (dump_file && (dump_flags & TDF_DETAILS))
4882 fprintf (dump_file, "Visiting BB %d\n", bb->index);
4884 /* If we have a single predecessor record the equivalence from a
4885 possible condition on the predecessor edge. */
4886 edge pred_e = single_pred_edge_ignoring_loop_edges (bb, false);
4887 if (pred_e)
4889 /* Check if there are multiple executable successor edges in
4890 the source block. Otherwise there is no additional info
4891 to be recorded. */
4892 edge_iterator ei;
4893 edge e2;
4894 FOR_EACH_EDGE (e2, ei, pred_e->src->succs)
4895 if (e2 != pred_e
4896 && e2->flags & EDGE_EXECUTABLE)
4897 break;
4898 if (e2 && (e2->flags & EDGE_EXECUTABLE))
4900 gimple *stmt = last_stmt (pred_e->src);
4901 if (stmt
4902 && gimple_code (stmt) == GIMPLE_COND)
4904 enum tree_code code = gimple_cond_code (stmt);
4905 tree lhs = gimple_cond_lhs (stmt);
4906 tree rhs = gimple_cond_rhs (stmt);
4907 record_conds (bb, code, lhs, rhs,
4908 (pred_e->flags & EDGE_TRUE_VALUE) != 0);
4909 code = invert_tree_comparison (code, HONOR_NANS (lhs));
4910 if (code != ERROR_MARK)
4911 record_conds (bb, code, lhs, rhs,
4912 (pred_e->flags & EDGE_TRUE_VALUE) == 0);
4917 /* Value-number all defs in the basic-block. */
4918 for (gphi_iterator gsi = gsi_start_phis (bb);
4919 !gsi_end_p (gsi); gsi_next (&gsi))
4921 gphi *phi = gsi.phi ();
4922 tree res = PHI_RESULT (phi);
4923 if (!VN_INFO (res)->visited)
4924 DFS (res);
4926 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
4927 !gsi_end_p (gsi); gsi_next (&gsi))
4929 ssa_op_iter i;
4930 tree op;
4931 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
4932 if (!VN_INFO (op)->visited)
4933 DFS (op);
4936 /* Finally look at the last stmt. */
4937 gimple *stmt = last_stmt (bb);
4938 if (!stmt)
4939 return NULL;
4941 enum gimple_code code = gimple_code (stmt);
4942 if (code != GIMPLE_COND
4943 && code != GIMPLE_SWITCH
4944 && code != GIMPLE_GOTO)
4945 return NULL;
4947 if (dump_file && (dump_flags & TDF_DETAILS))
4949 fprintf (dump_file, "Visiting control stmt ending BB %d: ", bb->index);
4950 print_gimple_stmt (dump_file, stmt, 0);
4953 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
4954 if value-numbering can prove they are not reachable. Handling
4955 computed gotos is also possible. */
4956 tree val;
4957 switch (code)
4959 case GIMPLE_COND:
4961 tree lhs = vn_valueize (gimple_cond_lhs (stmt));
4962 tree rhs = vn_valueize (gimple_cond_rhs (stmt));
4963 val = gimple_simplify (gimple_cond_code (stmt),
4964 boolean_type_node, lhs, rhs,
4965 NULL, vn_valueize);
4966 /* If that didn't simplify to a constant see if we have recorded
4967 temporary expressions from taken edges. */
4968 if (!val || TREE_CODE (val) != INTEGER_CST)
4970 tree ops[2];
4971 ops[0] = lhs;
4972 ops[1] = rhs;
4973 val = vn_nary_op_lookup_pieces (2, gimple_cond_code (stmt),
4974 boolean_type_node, ops, NULL);
4976 break;
4978 case GIMPLE_SWITCH:
4979 val = gimple_switch_index (as_a <gswitch *> (stmt));
4980 break;
4981 case GIMPLE_GOTO:
4982 val = gimple_goto_dest (stmt);
4983 break;
4984 default:
4985 gcc_unreachable ();
4987 if (!val)
4988 return NULL;
4990 edge taken = find_taken_edge (bb, vn_valueize (val));
4991 if (!taken)
4992 return NULL;
4994 if (dump_file && (dump_flags & TDF_DETAILS))
4995 fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
4996 "not executable\n", bb->index, bb->index, taken->dest->index);
4998 return taken;
5001 /* Do SCCVN. Returns true if it finished, false if we bailed out
5002 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
5003 how we use the alias oracle walking during the VN process. */
5005 void
5006 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
5008 size_t i;
5010 default_vn_walk_kind = default_vn_walk_kind_;
5012 init_scc_vn ();
5014 /* Collect pointers we know point to readonly memory. */
5015 const_parms = BITMAP_ALLOC (NULL);
5016 tree fnspec = lookup_attribute ("fn spec",
5017 TYPE_ATTRIBUTES (TREE_TYPE (cfun->decl)));
5018 if (fnspec)
5020 fnspec = TREE_VALUE (TREE_VALUE (fnspec));
5021 i = 1;
5022 for (tree arg = DECL_ARGUMENTS (cfun->decl);
5023 arg; arg = DECL_CHAIN (arg), ++i)
5025 if (i >= (unsigned) TREE_STRING_LENGTH (fnspec))
5026 break;
5027 if (TREE_STRING_POINTER (fnspec)[i] == 'R'
5028 || TREE_STRING_POINTER (fnspec)[i] == 'r')
5030 tree name = ssa_default_def (cfun, arg);
5031 if (name)
5032 bitmap_set_bit (const_parms, SSA_NAME_VERSION (name));
5037 /* Walk all blocks in dominator order, value-numbering stmts
5038 SSA defs and decide whether outgoing edges are not executable. */
5039 sccvn_dom_walker walker;
5040 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5042 /* Initialize the value ids and prune out remaining VN_TOPs
5043 from dead code. */
5044 tree name;
5045 FOR_EACH_SSA_NAME (i, name, cfun)
5047 vn_ssa_aux_t info = VN_INFO (name);
5048 if (!info->visited
5049 || info->valnum == VN_TOP)
5050 info->valnum = name;
5051 if (info->valnum == name)
5052 info->value_id = get_next_value_id ();
5053 else if (is_gimple_min_invariant (info->valnum))
5054 info->value_id = get_or_alloc_constant_value_id (info->valnum);
5057 /* Propagate. */
5058 FOR_EACH_SSA_NAME (i, name, cfun)
5060 vn_ssa_aux_t info = VN_INFO (name);
5061 if (TREE_CODE (info->valnum) == SSA_NAME
5062 && info->valnum != name
5063 && info->value_id != VN_INFO (info->valnum)->value_id)
5064 info->value_id = VN_INFO (info->valnum)->value_id;
5067 set_hashtable_value_ids ();
5069 if (dump_file && (dump_flags & TDF_DETAILS))
5071 fprintf (dump_file, "Value numbers:\n");
5072 FOR_EACH_SSA_NAME (i, name, cfun)
5074 if (VN_INFO (name)->visited
5075 && SSA_VAL (name) != name)
5077 print_generic_expr (dump_file, name);
5078 fprintf (dump_file, " = ");
5079 print_generic_expr (dump_file, SSA_VAL (name));
5080 fprintf (dump_file, "\n");
5086 /* Return the maximum value id we have ever seen. */
5088 unsigned int
5089 get_max_value_id (void)
5091 return next_value_id;
5094 /* Return the next unique value id. */
5096 unsigned int
5097 get_next_value_id (void)
5099 return next_value_id++;
5103 /* Compare two expressions E1 and E2 and return true if they are equal. */
5105 bool
5106 expressions_equal_p (tree e1, tree e2)
5108 /* The obvious case. */
5109 if (e1 == e2)
5110 return true;
5112 /* If either one is VN_TOP consider them equal. */
5113 if (e1 == VN_TOP || e2 == VN_TOP)
5114 return true;
5116 /* If only one of them is null, they cannot be equal. */
5117 if (!e1 || !e2)
5118 return false;
5120 /* Now perform the actual comparison. */
5121 if (TREE_CODE (e1) == TREE_CODE (e2)
5122 && operand_equal_p (e1, e2, OEP_PURE_SAME))
5123 return true;
5125 return false;
5129 /* Return true if the nary operation NARY may trap. This is a copy
5130 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
5132 bool
5133 vn_nary_may_trap (vn_nary_op_t nary)
5135 tree type;
5136 tree rhs2 = NULL_TREE;
5137 bool honor_nans = false;
5138 bool honor_snans = false;
5139 bool fp_operation = false;
5140 bool honor_trapv = false;
5141 bool handled, ret;
5142 unsigned i;
5144 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
5145 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
5146 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
5148 type = nary->type;
5149 fp_operation = FLOAT_TYPE_P (type);
5150 if (fp_operation)
5152 honor_nans = flag_trapping_math && !flag_finite_math_only;
5153 honor_snans = flag_signaling_nans != 0;
5155 else if (INTEGRAL_TYPE_P (type)
5156 && TYPE_OVERFLOW_TRAPS (type))
5157 honor_trapv = true;
5159 if (nary->length >= 2)
5160 rhs2 = nary->op[1];
5161 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
5162 honor_trapv,
5163 honor_nans, honor_snans, rhs2,
5164 &handled);
5165 if (handled
5166 && ret)
5167 return true;
5169 for (i = 0; i < nary->length; ++i)
5170 if (tree_could_trap_p (nary->op[i]))
5171 return true;
5173 return false;
5177 class eliminate_dom_walker : public dom_walker
5179 public:
5180 eliminate_dom_walker (cdi_direction, bitmap);
5181 ~eliminate_dom_walker ();
5183 virtual edge before_dom_children (basic_block);
5184 virtual void after_dom_children (basic_block);
5186 tree eliminate_avail (tree op);
5187 void eliminate_push_avail (tree op);
5188 tree eliminate_insert (gimple_stmt_iterator *gsi, tree val);
5190 bool do_pre;
5191 unsigned int el_todo;
5192 unsigned int eliminations;
5193 unsigned int insertions;
5195 /* SSA names that had their defs inserted by PRE if do_pre. */
5196 bitmap inserted_exprs;
5198 /* Blocks with statements that have had their EH properties changed. */
5199 bitmap need_eh_cleanup;
5201 /* Blocks with statements that have had their AB properties changed. */
5202 bitmap need_ab_cleanup;
5204 auto_vec<gimple *> to_remove;
5205 auto_vec<gimple *> to_fixup;
5206 auto_vec<tree> avail;
5207 auto_vec<tree> avail_stack;
5210 eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction,
5211 bitmap inserted_exprs_)
5212 : dom_walker (direction), do_pre (inserted_exprs_ != NULL),
5213 el_todo (0), eliminations (0), insertions (0),
5214 inserted_exprs (inserted_exprs_)
5216 need_eh_cleanup = BITMAP_ALLOC (NULL);
5217 need_ab_cleanup = BITMAP_ALLOC (NULL);
5220 eliminate_dom_walker::~eliminate_dom_walker ()
5222 BITMAP_FREE (need_eh_cleanup);
5223 BITMAP_FREE (need_ab_cleanup);
5226 /* Return a leader for OP that is available at the current point of the
5227 eliminate domwalk. */
5229 tree
5230 eliminate_dom_walker::eliminate_avail (tree op)
5232 tree valnum = VN_INFO (op)->valnum;
5233 if (TREE_CODE (valnum) == SSA_NAME)
5235 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
5236 return valnum;
5237 if (avail.length () > SSA_NAME_VERSION (valnum))
5238 return avail[SSA_NAME_VERSION (valnum)];
5240 else if (is_gimple_min_invariant (valnum))
5241 return valnum;
5242 return NULL_TREE;
5245 /* At the current point of the eliminate domwalk make OP available. */
5247 void
5248 eliminate_dom_walker::eliminate_push_avail (tree op)
5250 tree valnum = VN_INFO (op)->valnum;
5251 if (TREE_CODE (valnum) == SSA_NAME)
5253 if (avail.length () <= SSA_NAME_VERSION (valnum))
5254 avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1);
5255 tree pushop = op;
5256 if (avail[SSA_NAME_VERSION (valnum)])
5257 pushop = avail[SSA_NAME_VERSION (valnum)];
5258 avail_stack.safe_push (pushop);
5259 avail[SSA_NAME_VERSION (valnum)] = op;
5263 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
5264 the leader for the expression if insertion was successful. */
5266 tree
5267 eliminate_dom_walker::eliminate_insert (gimple_stmt_iterator *gsi, tree val)
5269 /* We can insert a sequence with a single assignment only. */
5270 gimple_seq stmts = VN_INFO (val)->expr;
5271 if (!gimple_seq_singleton_p (stmts))
5272 return NULL_TREE;
5273 gassign *stmt = dyn_cast <gassign *> (gimple_seq_first_stmt (stmts));
5274 if (!stmt
5275 || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
5276 && gimple_assign_rhs_code (stmt) != VIEW_CONVERT_EXPR
5277 && gimple_assign_rhs_code (stmt) != BIT_FIELD_REF
5278 && (gimple_assign_rhs_code (stmt) != BIT_AND_EXPR
5279 || TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)))
5280 return NULL_TREE;
5282 tree op = gimple_assign_rhs1 (stmt);
5283 if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR
5284 || gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5285 op = TREE_OPERAND (op, 0);
5286 tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (op) : op;
5287 if (!leader)
5288 return NULL_TREE;
5290 tree res;
5291 stmts = NULL;
5292 if (gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5293 res = gimple_build (&stmts, BIT_FIELD_REF,
5294 TREE_TYPE (val), leader,
5295 TREE_OPERAND (gimple_assign_rhs1 (stmt), 1),
5296 TREE_OPERAND (gimple_assign_rhs1 (stmt), 2));
5297 else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
5298 res = gimple_build (&stmts, BIT_AND_EXPR,
5299 TREE_TYPE (val), leader, gimple_assign_rhs2 (stmt));
5300 else
5301 res = gimple_build (&stmts, gimple_assign_rhs_code (stmt),
5302 TREE_TYPE (val), leader);
5303 if (TREE_CODE (res) != SSA_NAME
5304 || SSA_NAME_IS_DEFAULT_DEF (res)
5305 || gimple_bb (SSA_NAME_DEF_STMT (res)))
5307 gimple_seq_discard (stmts);
5309 /* During propagation we have to treat SSA info conservatively
5310 and thus we can end up simplifying the inserted expression
5311 at elimination time to sth not defined in stmts. */
5312 /* But then this is a redundancy we failed to detect. Which means
5313 res now has two values. That doesn't play well with how
5314 we track availability here, so give up. */
5315 if (dump_file && (dump_flags & TDF_DETAILS))
5317 if (TREE_CODE (res) == SSA_NAME)
5318 res = eliminate_avail (res);
5319 if (res)
5321 fprintf (dump_file, "Failed to insert expression for value ");
5322 print_generic_expr (dump_file, val);
5323 fprintf (dump_file, " which is really fully redundant to ");
5324 print_generic_expr (dump_file, res);
5325 fprintf (dump_file, "\n");
5329 return NULL_TREE;
5331 else
5333 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
5334 VN_INFO_GET (res)->valnum = val;
5337 insertions++;
5338 if (dump_file && (dump_flags & TDF_DETAILS))
5340 fprintf (dump_file, "Inserted ");
5341 print_gimple_stmt (dump_file, SSA_NAME_DEF_STMT (res), 0);
5344 return res;
5349 /* Perform elimination for the basic-block B during the domwalk. */
5351 edge
5352 eliminate_dom_walker::before_dom_children (basic_block b)
5354 /* Mark new bb. */
5355 avail_stack.safe_push (NULL_TREE);
5357 /* Skip unreachable blocks marked unreachable during the SCCVN domwalk. */
5358 edge_iterator ei;
5359 edge e;
5360 FOR_EACH_EDGE (e, ei, b->preds)
5361 if (e->flags & EDGE_EXECUTABLE)
5362 break;
5363 if (! e)
5364 return NULL;
5366 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
5368 gphi *phi = gsi.phi ();
5369 tree res = PHI_RESULT (phi);
5371 if (virtual_operand_p (res))
5373 gsi_next (&gsi);
5374 continue;
5377 tree sprime = eliminate_avail (res);
5378 if (sprime
5379 && sprime != res)
5381 if (dump_file && (dump_flags & TDF_DETAILS))
5383 fprintf (dump_file, "Replaced redundant PHI node defining ");
5384 print_generic_expr (dump_file, res);
5385 fprintf (dump_file, " with ");
5386 print_generic_expr (dump_file, sprime);
5387 fprintf (dump_file, "\n");
5390 /* If we inserted this PHI node ourself, it's not an elimination. */
5391 if (! inserted_exprs
5392 || ! bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
5393 eliminations++;
5395 /* If we will propagate into all uses don't bother to do
5396 anything. */
5397 if (may_propagate_copy (res, sprime))
5399 /* Mark the PHI for removal. */
5400 to_remove.safe_push (phi);
5401 gsi_next (&gsi);
5402 continue;
5405 remove_phi_node (&gsi, false);
5407 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
5408 sprime = fold_convert (TREE_TYPE (res), sprime);
5409 gimple *stmt = gimple_build_assign (res, sprime);
5410 gimple_stmt_iterator gsi2 = gsi_after_labels (b);
5411 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
5412 continue;
5415 eliminate_push_avail (res);
5416 gsi_next (&gsi);
5419 for (gimple_stmt_iterator gsi = gsi_start_bb (b);
5420 !gsi_end_p (gsi);
5421 gsi_next (&gsi))
5423 tree sprime = NULL_TREE;
5424 gimple *stmt = gsi_stmt (gsi);
5425 tree lhs = gimple_get_lhs (stmt);
5426 if (lhs && TREE_CODE (lhs) == SSA_NAME
5427 && !gimple_has_volatile_ops (stmt)
5428 /* See PR43491. Do not replace a global register variable when
5429 it is a the RHS of an assignment. Do replace local register
5430 variables since gcc does not guarantee a local variable will
5431 be allocated in register.
5432 ??? The fix isn't effective here. This should instead
5433 be ensured by not value-numbering them the same but treating
5434 them like volatiles? */
5435 && !(gimple_assign_single_p (stmt)
5436 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
5437 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))
5438 && is_global_var (gimple_assign_rhs1 (stmt)))))
5440 sprime = eliminate_avail (lhs);
5441 if (!sprime)
5443 /* If there is no existing usable leader but SCCVN thinks
5444 it has an expression it wants to use as replacement,
5445 insert that. */
5446 tree val = VN_INFO (lhs)->valnum;
5447 if (val != VN_TOP
5448 && TREE_CODE (val) == SSA_NAME
5449 && VN_INFO (val)->needs_insertion
5450 && VN_INFO (val)->expr != NULL
5451 && (sprime = eliminate_insert (&gsi, val)) != NULL_TREE)
5452 eliminate_push_avail (sprime);
5455 /* If this now constitutes a copy duplicate points-to
5456 and range info appropriately. This is especially
5457 important for inserted code. See tree-ssa-copy.c
5458 for similar code. */
5459 if (sprime
5460 && TREE_CODE (sprime) == SSA_NAME)
5462 basic_block sprime_b = gimple_bb (SSA_NAME_DEF_STMT (sprime));
5463 if (POINTER_TYPE_P (TREE_TYPE (lhs))
5464 && VN_INFO_PTR_INFO (lhs)
5465 && ! VN_INFO_PTR_INFO (sprime))
5467 duplicate_ssa_name_ptr_info (sprime,
5468 VN_INFO_PTR_INFO (lhs));
5469 if (b != sprime_b)
5470 mark_ptr_info_alignment_unknown
5471 (SSA_NAME_PTR_INFO (sprime));
5473 else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
5474 && VN_INFO_RANGE_INFO (lhs)
5475 && ! VN_INFO_RANGE_INFO (sprime)
5476 && b == sprime_b)
5477 duplicate_ssa_name_range_info (sprime,
5478 VN_INFO_RANGE_TYPE (lhs),
5479 VN_INFO_RANGE_INFO (lhs));
5482 /* Inhibit the use of an inserted PHI on a loop header when
5483 the address of the memory reference is a simple induction
5484 variable. In other cases the vectorizer won't do anything
5485 anyway (either it's loop invariant or a complicated
5486 expression). */
5487 if (sprime
5488 && TREE_CODE (sprime) == SSA_NAME
5489 && do_pre
5490 && (flag_tree_loop_vectorize || flag_tree_parallelize_loops > 1)
5491 && loop_outer (b->loop_father)
5492 && has_zero_uses (sprime)
5493 && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
5494 && gimple_assign_load_p (stmt))
5496 gimple *def_stmt = SSA_NAME_DEF_STMT (sprime);
5497 basic_block def_bb = gimple_bb (def_stmt);
5498 if (gimple_code (def_stmt) == GIMPLE_PHI
5499 && def_bb->loop_father->header == def_bb)
5501 loop_p loop = def_bb->loop_father;
5502 ssa_op_iter iter;
5503 tree op;
5504 bool found = false;
5505 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5507 affine_iv iv;
5508 def_bb = gimple_bb (SSA_NAME_DEF_STMT (op));
5509 if (def_bb
5510 && flow_bb_inside_loop_p (loop, def_bb)
5511 && simple_iv (loop, loop, op, &iv, true))
5513 found = true;
5514 break;
5517 if (found)
5519 if (dump_file && (dump_flags & TDF_DETAILS))
5521 fprintf (dump_file, "Not replacing ");
5522 print_gimple_expr (dump_file, stmt, 0);
5523 fprintf (dump_file, " with ");
5524 print_generic_expr (dump_file, sprime);
5525 fprintf (dump_file, " which would add a loop"
5526 " carried dependence to loop %d\n",
5527 loop->num);
5529 /* Don't keep sprime available. */
5530 sprime = NULL_TREE;
5535 if (sprime)
5537 /* If we can propagate the value computed for LHS into
5538 all uses don't bother doing anything with this stmt. */
5539 if (may_propagate_copy (lhs, sprime))
5541 /* Mark it for removal. */
5542 to_remove.safe_push (stmt);
5544 /* ??? Don't count copy/constant propagations. */
5545 if (gimple_assign_single_p (stmt)
5546 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5547 || gimple_assign_rhs1 (stmt) == sprime))
5548 continue;
5550 if (dump_file && (dump_flags & TDF_DETAILS))
5552 fprintf (dump_file, "Replaced ");
5553 print_gimple_expr (dump_file, stmt, 0);
5554 fprintf (dump_file, " with ");
5555 print_generic_expr (dump_file, sprime);
5556 fprintf (dump_file, " in all uses of ");
5557 print_gimple_stmt (dump_file, stmt, 0);
5560 eliminations++;
5561 continue;
5564 /* If this is an assignment from our leader (which
5565 happens in the case the value-number is a constant)
5566 then there is nothing to do. */
5567 if (gimple_assign_single_p (stmt)
5568 && sprime == gimple_assign_rhs1 (stmt))
5569 continue;
5571 /* Else replace its RHS. */
5572 bool can_make_abnormal_goto
5573 = is_gimple_call (stmt)
5574 && stmt_can_make_abnormal_goto (stmt);
5576 if (dump_file && (dump_flags & TDF_DETAILS))
5578 fprintf (dump_file, "Replaced ");
5579 print_gimple_expr (dump_file, stmt, 0);
5580 fprintf (dump_file, " with ");
5581 print_generic_expr (dump_file, sprime);
5582 fprintf (dump_file, " in ");
5583 print_gimple_stmt (dump_file, stmt, 0);
5586 eliminations++;
5587 gimple *orig_stmt = stmt;
5588 if (!useless_type_conversion_p (TREE_TYPE (lhs),
5589 TREE_TYPE (sprime)))
5590 sprime = fold_convert (TREE_TYPE (lhs), sprime);
5591 tree vdef = gimple_vdef (stmt);
5592 tree vuse = gimple_vuse (stmt);
5593 propagate_tree_value_into_stmt (&gsi, sprime);
5594 stmt = gsi_stmt (gsi);
5595 update_stmt (stmt);
5596 if (vdef != gimple_vdef (stmt))
5597 VN_INFO (vdef)->valnum = vuse;
5599 /* If we removed EH side-effects from the statement, clean
5600 its EH information. */
5601 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
5603 bitmap_set_bit (need_eh_cleanup,
5604 gimple_bb (stmt)->index);
5605 if (dump_file && (dump_flags & TDF_DETAILS))
5606 fprintf (dump_file, " Removed EH side-effects.\n");
5609 /* Likewise for AB side-effects. */
5610 if (can_make_abnormal_goto
5611 && !stmt_can_make_abnormal_goto (stmt))
5613 bitmap_set_bit (need_ab_cleanup,
5614 gimple_bb (stmt)->index);
5615 if (dump_file && (dump_flags & TDF_DETAILS))
5616 fprintf (dump_file, " Removed AB side-effects.\n");
5619 continue;
5623 /* If the statement is a scalar store, see if the expression
5624 has the same value number as its rhs. If so, the store is
5625 dead. */
5626 if (gimple_assign_single_p (stmt)
5627 && !gimple_has_volatile_ops (stmt)
5628 && !is_gimple_reg (gimple_assign_lhs (stmt))
5629 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5630 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
5632 tree val;
5633 tree rhs = gimple_assign_rhs1 (stmt);
5634 vn_reference_t vnresult;
5635 val = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_WALKREWRITE,
5636 &vnresult, false);
5637 if (TREE_CODE (rhs) == SSA_NAME)
5638 rhs = VN_INFO (rhs)->valnum;
5639 if (val
5640 && operand_equal_p (val, rhs, 0))
5642 /* We can only remove the later store if the former aliases
5643 at least all accesses the later one does or if the store
5644 was to readonly memory storing the same value. */
5645 alias_set_type set = get_alias_set (lhs);
5646 if (! vnresult
5647 || vnresult->set == set
5648 || alias_set_subset_of (set, vnresult->set))
5650 if (dump_file && (dump_flags & TDF_DETAILS))
5652 fprintf (dump_file, "Deleted redundant store ");
5653 print_gimple_stmt (dump_file, stmt, 0);
5656 /* Queue stmt for removal. */
5657 to_remove.safe_push (stmt);
5658 continue;
5663 /* If this is a control statement value numbering left edges
5664 unexecuted on force the condition in a way consistent with
5665 that. */
5666 if (gcond *cond = dyn_cast <gcond *> (stmt))
5668 if ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE)
5669 ^ (EDGE_SUCC (b, 1)->flags & EDGE_EXECUTABLE))
5671 if (dump_file && (dump_flags & TDF_DETAILS))
5673 fprintf (dump_file, "Removing unexecutable edge from ");
5674 print_gimple_stmt (dump_file, stmt, 0);
5676 if (((EDGE_SUCC (b, 0)->flags & EDGE_TRUE_VALUE) != 0)
5677 == ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE) != 0))
5678 gimple_cond_make_true (cond);
5679 else
5680 gimple_cond_make_false (cond);
5681 update_stmt (cond);
5682 el_todo |= TODO_cleanup_cfg;
5683 continue;
5687 bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
5688 bool was_noreturn = (is_gimple_call (stmt)
5689 && gimple_call_noreturn_p (stmt));
5690 tree vdef = gimple_vdef (stmt);
5691 tree vuse = gimple_vuse (stmt);
5693 /* If we didn't replace the whole stmt (or propagate the result
5694 into all uses), replace all uses on this stmt with their
5695 leaders. */
5696 bool modified = false;
5697 use_operand_p use_p;
5698 ssa_op_iter iter;
5699 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5701 tree use = USE_FROM_PTR (use_p);
5702 /* ??? The call code above leaves stmt operands un-updated. */
5703 if (TREE_CODE (use) != SSA_NAME)
5704 continue;
5705 tree sprime = eliminate_avail (use);
5706 if (sprime && sprime != use
5707 && may_propagate_copy (use, sprime)
5708 /* We substitute into debug stmts to avoid excessive
5709 debug temporaries created by removed stmts, but we need
5710 to avoid doing so for inserted sprimes as we never want
5711 to create debug temporaries for them. */
5712 && (!inserted_exprs
5713 || TREE_CODE (sprime) != SSA_NAME
5714 || !is_gimple_debug (stmt)
5715 || !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
5717 propagate_value (use_p, sprime);
5718 modified = true;
5722 /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
5723 into which is a requirement for the IPA devirt machinery. */
5724 gimple *old_stmt = stmt;
5725 if (modified)
5727 /* If a formerly non-invariant ADDR_EXPR is turned into an
5728 invariant one it was on a separate stmt. */
5729 if (gimple_assign_single_p (stmt)
5730 && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
5731 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
5732 gimple_stmt_iterator prev = gsi;
5733 gsi_prev (&prev);
5734 if (fold_stmt (&gsi))
5736 /* fold_stmt may have created new stmts inbetween
5737 the previous stmt and the folded stmt. Mark
5738 all defs created there as varying to not confuse
5739 the SCCVN machinery as we're using that even during
5740 elimination. */
5741 if (gsi_end_p (prev))
5742 prev = gsi_start_bb (b);
5743 else
5744 gsi_next (&prev);
5745 if (gsi_stmt (prev) != gsi_stmt (gsi))
5748 tree def;
5749 ssa_op_iter dit;
5750 FOR_EACH_SSA_TREE_OPERAND (def, gsi_stmt (prev),
5751 dit, SSA_OP_ALL_DEFS)
5752 /* As existing DEFs may move between stmts
5753 we have to guard VN_INFO_GET. */
5754 if (! has_VN_INFO (def))
5755 VN_INFO_GET (def)->valnum = def;
5756 if (gsi_stmt (prev) == gsi_stmt (gsi))
5757 break;
5758 gsi_next (&prev);
5760 while (1);
5762 stmt = gsi_stmt (gsi);
5763 /* In case we folded the stmt away schedule the NOP for removal. */
5764 if (gimple_nop_p (stmt))
5765 to_remove.safe_push (stmt);
5768 /* Visit indirect calls and turn them into direct calls if
5769 possible using the devirtualization machinery. Do this before
5770 checking for required EH/abnormal/noreturn cleanup as devird
5771 may expose more of those. */
5772 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
5774 tree fn = gimple_call_fn (call_stmt);
5775 if (fn
5776 && flag_devirtualize
5777 && virtual_method_call_p (fn))
5779 tree otr_type = obj_type_ref_class (fn);
5780 unsigned HOST_WIDE_INT otr_tok
5781 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn));
5782 tree instance;
5783 ipa_polymorphic_call_context context (current_function_decl,
5784 fn, stmt, &instance);
5785 context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn),
5786 otr_type, stmt);
5787 bool final;
5788 vec <cgraph_node *> targets
5789 = possible_polymorphic_call_targets (obj_type_ref_class (fn),
5790 otr_tok, context, &final);
5791 if (dump_file)
5792 dump_possible_polymorphic_call_targets (dump_file,
5793 obj_type_ref_class (fn),
5794 otr_tok, context);
5795 if (final && targets.length () <= 1 && dbg_cnt (devirt))
5797 tree fn;
5798 if (targets.length () == 1)
5799 fn = targets[0]->decl;
5800 else
5801 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5802 if (dump_enabled_p ())
5804 location_t loc = gimple_location (stmt);
5805 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
5806 "converting indirect call to "
5807 "function %s\n",
5808 lang_hooks.decl_printable_name (fn, 2));
5810 gimple_call_set_fndecl (call_stmt, fn);
5811 /* If changing the call to __builtin_unreachable
5812 or similar noreturn function, adjust gimple_call_fntype
5813 too. */
5814 if (gimple_call_noreturn_p (call_stmt)
5815 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
5816 && TYPE_ARG_TYPES (TREE_TYPE (fn))
5817 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn)))
5818 == void_type_node))
5819 gimple_call_set_fntype (call_stmt, TREE_TYPE (fn));
5820 maybe_remove_unused_call_args (cfun, call_stmt);
5821 modified = true;
5826 if (modified)
5828 /* When changing a call into a noreturn call, cfg cleanup
5829 is needed to fix up the noreturn call. */
5830 if (!was_noreturn
5831 && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
5832 to_fixup.safe_push (stmt);
5833 /* When changing a condition or switch into one we know what
5834 edge will be executed, schedule a cfg cleanup. */
5835 if ((gimple_code (stmt) == GIMPLE_COND
5836 && (gimple_cond_true_p (as_a <gcond *> (stmt))
5837 || gimple_cond_false_p (as_a <gcond *> (stmt))))
5838 || (gimple_code (stmt) == GIMPLE_SWITCH
5839 && TREE_CODE (gimple_switch_index
5840 (as_a <gswitch *> (stmt))) == INTEGER_CST))
5841 el_todo |= TODO_cleanup_cfg;
5842 /* If we removed EH side-effects from the statement, clean
5843 its EH information. */
5844 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
5846 bitmap_set_bit (need_eh_cleanup,
5847 gimple_bb (stmt)->index);
5848 if (dump_file && (dump_flags & TDF_DETAILS))
5849 fprintf (dump_file, " Removed EH side-effects.\n");
5851 /* Likewise for AB side-effects. */
5852 if (can_make_abnormal_goto
5853 && !stmt_can_make_abnormal_goto (stmt))
5855 bitmap_set_bit (need_ab_cleanup,
5856 gimple_bb (stmt)->index);
5857 if (dump_file && (dump_flags & TDF_DETAILS))
5858 fprintf (dump_file, " Removed AB side-effects.\n");
5860 update_stmt (stmt);
5861 if (vdef != gimple_vdef (stmt))
5862 VN_INFO (vdef)->valnum = vuse;
5865 /* Make new values available - for fully redundant LHS we
5866 continue with the next stmt above and skip this. */
5867 def_operand_p defp;
5868 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
5869 eliminate_push_avail (DEF_FROM_PTR (defp));
5872 /* Replace destination PHI arguments. */
5873 FOR_EACH_EDGE (e, ei, b->succs)
5874 if (e->flags & EDGE_EXECUTABLE)
5875 for (gphi_iterator gsi = gsi_start_phis (e->dest);
5876 !gsi_end_p (gsi);
5877 gsi_next (&gsi))
5879 gphi *phi = gsi.phi ();
5880 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
5881 tree arg = USE_FROM_PTR (use_p);
5882 if (TREE_CODE (arg) != SSA_NAME
5883 || virtual_operand_p (arg))
5884 continue;
5885 tree sprime = eliminate_avail (arg);
5886 if (sprime && may_propagate_copy (arg, sprime))
5887 propagate_value (use_p, sprime);
5889 return NULL;
5892 /* Make no longer available leaders no longer available. */
5894 void
5895 eliminate_dom_walker::after_dom_children (basic_block)
5897 tree entry;
5898 while ((entry = avail_stack.pop ()) != NULL_TREE)
5900 tree valnum = VN_INFO (entry)->valnum;
5901 tree old = avail[SSA_NAME_VERSION (valnum)];
5902 if (old == entry)
5903 avail[SSA_NAME_VERSION (valnum)] = NULL_TREE;
5904 else
5905 avail[SSA_NAME_VERSION (valnum)] = entry;
5909 /* Eliminate fully redundant computations. */
5911 unsigned int
5912 vn_eliminate (bitmap inserted_exprs)
5914 eliminate_dom_walker el (CDI_DOMINATORS, inserted_exprs);
5915 el.avail.reserve (num_ssa_names);
5917 el.walk (cfun->cfg->x_entry_block_ptr);
5919 /* We cannot remove stmts during BB walk, especially not release SSA
5920 names there as this confuses the VN machinery. The stmts ending
5921 up in to_remove are either stores or simple copies.
5922 Remove stmts in reverse order to make debug stmt creation possible. */
5923 while (!el.to_remove.is_empty ())
5925 gimple *stmt = el.to_remove.pop ();
5927 if (dump_file && (dump_flags & TDF_DETAILS))
5929 fprintf (dump_file, "Removing dead stmt ");
5930 print_gimple_stmt (dump_file, stmt, 0, 0);
5933 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5934 if (gimple_code (stmt) == GIMPLE_PHI)
5935 remove_phi_node (&gsi, true);
5936 else
5938 basic_block bb = gimple_bb (stmt);
5939 unlink_stmt_vdef (stmt);
5940 if (gsi_remove (&gsi, true))
5941 bitmap_set_bit (el.need_eh_cleanup, bb->index);
5942 if (is_gimple_call (stmt) && stmt_can_make_abnormal_goto (stmt))
5943 bitmap_set_bit (el.need_ab_cleanup, bb->index);
5944 release_defs (stmt);
5947 /* Removing a stmt may expose a forwarder block. */
5948 el.el_todo |= TODO_cleanup_cfg;
5951 /* Fixup stmts that became noreturn calls. This may require splitting
5952 blocks and thus isn't possible during the dominator walk. Do this
5953 in reverse order so we don't inadvertedly remove a stmt we want to
5954 fixup by visiting a dominating now noreturn call first. */
5955 while (!el.to_fixup.is_empty ())
5957 gimple *stmt = el.to_fixup.pop ();
5959 if (dump_file && (dump_flags & TDF_DETAILS))
5961 fprintf (dump_file, "Fixing up noreturn call ");
5962 print_gimple_stmt (dump_file, stmt, 0);
5965 if (fixup_noreturn_call (stmt))
5966 el.el_todo |= TODO_cleanup_cfg;
5969 bool do_eh_cleanup = !bitmap_empty_p (el.need_eh_cleanup);
5970 bool do_ab_cleanup = !bitmap_empty_p (el.need_ab_cleanup);
5972 if (do_eh_cleanup)
5973 gimple_purge_all_dead_eh_edges (el.need_eh_cleanup);
5975 if (do_ab_cleanup)
5976 gimple_purge_all_dead_abnormal_call_edges (el.need_ab_cleanup);
5978 if (do_eh_cleanup || do_ab_cleanup)
5979 el.el_todo |= TODO_cleanup_cfg;
5981 statistics_counter_event (cfun, "Eliminated", el.eliminations);
5982 statistics_counter_event (cfun, "Insertions", el.insertions);
5984 return el.el_todo;
5988 namespace {
5990 const pass_data pass_data_fre =
5992 GIMPLE_PASS, /* type */
5993 "fre", /* name */
5994 OPTGROUP_NONE, /* optinfo_flags */
5995 TV_TREE_FRE, /* tv_id */
5996 ( PROP_cfg | PROP_ssa ), /* properties_required */
5997 0, /* properties_provided */
5998 0, /* properties_destroyed */
5999 0, /* todo_flags_start */
6000 0, /* todo_flags_finish */
6003 class pass_fre : public gimple_opt_pass
6005 public:
6006 pass_fre (gcc::context *ctxt)
6007 : gimple_opt_pass (pass_data_fre, ctxt)
6010 /* opt_pass methods: */
6011 opt_pass * clone () { return new pass_fre (m_ctxt); }
6012 virtual bool gate (function *) { return flag_tree_fre != 0; }
6013 virtual unsigned int execute (function *);
6015 }; // class pass_fre
6017 unsigned int
6018 pass_fre::execute (function *)
6020 unsigned int todo = 0;
6022 run_scc_vn (VN_WALKREWRITE);
6024 /* Remove all the redundant expressions. */
6025 todo |= vn_eliminate (NULL);
6027 scc_vn_restore_ssa_info ();
6028 free_scc_vn ();
6030 return todo;
6033 } // anon namespace
6035 gimple_opt_pass *
6036 make_pass_fre (gcc::context *ctxt)
6038 return new pass_fre (ctxt);