PR rtl-optimization/82913
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blobf5bc28efa7096bc2843faf7f544f7ad0897ef140
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2017 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "insn-config.h"
32 #include "memmodel.h"
33 #include "emit-rtl.h"
34 #include "cgraph.h"
35 #include "gimple-pretty-print.h"
36 #include "alias.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "cfganal.h"
40 #include "tree-inline.h"
41 #include "internal-fn.h"
42 #include "gimple-fold.h"
43 #include "tree-eh.h"
44 #include "gimplify.h"
45 #include "flags.h"
46 #include "dojump.h"
47 #include "explow.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "stmt.h"
51 #include "expr.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "dumpfile.h"
55 #include "cfgloop.h"
56 #include "params.h"
57 #include "tree-ssa-propagate.h"
58 #include "tree-cfg.h"
59 #include "domwalk.h"
60 #include "gimple-iterator.h"
61 #include "gimple-match.h"
62 #include "stringpool.h"
63 #include "attribs.h"
64 #include "tree-pass.h"
65 #include "statistics.h"
66 #include "langhooks.h"
67 #include "ipa-utils.h"
68 #include "dbgcnt.h"
69 #include "tree-cfgcleanup.h"
70 #include "tree-ssa-loop.h"
71 #include "tree-scalar-evolution.h"
72 #include "tree-ssa-sccvn.h"
74 /* This algorithm is based on the SCC algorithm presented by Keith
75 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
76 (http://citeseer.ist.psu.edu/41805.html). In
77 straight line code, it is equivalent to a regular hash based value
78 numbering that is performed in reverse postorder.
80 For code with cycles, there are two alternatives, both of which
81 require keeping the hashtables separate from the actual list of
82 value numbers for SSA names.
84 1. Iterate value numbering in an RPO walk of the blocks, removing
85 all the entries from the hashtable after each iteration (but
86 keeping the SSA name->value number mapping between iterations).
87 Iterate until it does not change.
89 2. Perform value numbering as part of an SCC walk on the SSA graph,
90 iterating only the cycles in the SSA graph until they do not change
91 (using a separate, optimistic hashtable for value numbering the SCC
92 operands).
94 The second is not just faster in practice (because most SSA graph
95 cycles do not involve all the variables in the graph), it also has
96 some nice properties.
98 One of these nice properties is that when we pop an SCC off the
99 stack, we are guaranteed to have processed all the operands coming from
100 *outside of that SCC*, so we do not need to do anything special to
101 ensure they have value numbers.
103 Another nice property is that the SCC walk is done as part of a DFS
104 of the SSA graph, which makes it easy to perform combining and
105 simplifying operations at the same time.
107 The code below is deliberately written in a way that makes it easy
108 to separate the SCC walk from the other work it does.
110 In order to propagate constants through the code, we track which
111 expressions contain constants, and use those while folding. In
112 theory, we could also track expressions whose value numbers are
113 replaced, in case we end up folding based on expression
114 identities.
116 In order to value number memory, we assign value numbers to vuses.
117 This enables us to note that, for example, stores to the same
118 address of the same value from the same starting memory states are
119 equivalent.
120 TODO:
122 1. We can iterate only the changing portions of the SCC's, but
123 I have not seen an SCC big enough for this to be a win.
124 2. If you differentiate between phi nodes for loops and phi nodes
125 for if-then-else, you can properly consider phi nodes in different
126 blocks for equivalence.
127 3. We could value number vuses in more cases, particularly, whole
128 structure copies.
132 static tree *last_vuse_ptr;
133 static vn_lookup_kind vn_walk_kind;
134 static vn_lookup_kind default_vn_walk_kind;
135 bitmap const_parms;
137 /* vn_nary_op hashtable helpers. */
139 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
141 typedef vn_nary_op_s *compare_type;
142 static inline hashval_t hash (const vn_nary_op_s *);
143 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
146 /* Return the computed hashcode for nary operation P1. */
148 inline hashval_t
149 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
151 return vno1->hashcode;
154 /* Compare nary operations P1 and P2 and return true if they are
155 equivalent. */
157 inline bool
158 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
160 return vn_nary_op_eq (vno1, vno2);
163 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
164 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
167 /* vn_phi hashtable helpers. */
169 static int
170 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
172 struct vn_phi_hasher : pointer_hash <vn_phi_s>
174 static inline hashval_t hash (const vn_phi_s *);
175 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
176 static inline void remove (vn_phi_s *);
179 /* Return the computed hashcode for phi operation P1. */
181 inline hashval_t
182 vn_phi_hasher::hash (const vn_phi_s *vp1)
184 return vp1->hashcode;
187 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
189 inline bool
190 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
192 return vn_phi_eq (vp1, vp2);
195 /* Free a phi operation structure VP. */
197 inline void
198 vn_phi_hasher::remove (vn_phi_s *phi)
200 phi->phiargs.release ();
203 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
204 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
207 /* Compare two reference operands P1 and P2 for equality. Return true if
208 they are equal, and false otherwise. */
210 static int
211 vn_reference_op_eq (const void *p1, const void *p2)
213 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
214 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
216 return (vro1->opcode == vro2->opcode
217 /* We do not care for differences in type qualification. */
218 && (vro1->type == vro2->type
219 || (vro1->type && vro2->type
220 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
221 TYPE_MAIN_VARIANT (vro2->type))))
222 && expressions_equal_p (vro1->op0, vro2->op0)
223 && expressions_equal_p (vro1->op1, vro2->op1)
224 && expressions_equal_p (vro1->op2, vro2->op2));
227 /* Free a reference operation structure VP. */
229 static inline void
230 free_reference (vn_reference_s *vr)
232 vr->operands.release ();
236 /* vn_reference hashtable helpers. */
238 struct vn_reference_hasher : pointer_hash <vn_reference_s>
240 static inline hashval_t hash (const vn_reference_s *);
241 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
242 static inline void remove (vn_reference_s *);
245 /* Return the hashcode for a given reference operation P1. */
247 inline hashval_t
248 vn_reference_hasher::hash (const vn_reference_s *vr1)
250 return vr1->hashcode;
253 inline bool
254 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
256 return vn_reference_eq (v, c);
259 inline void
260 vn_reference_hasher::remove (vn_reference_s *v)
262 free_reference (v);
265 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
266 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
269 /* The set of hashtables and alloc_pool's for their items. */
271 typedef struct vn_tables_s
273 vn_nary_op_table_type *nary;
274 vn_phi_table_type *phis;
275 vn_reference_table_type *references;
276 struct obstack nary_obstack;
277 object_allocator<vn_phi_s> *phis_pool;
278 object_allocator<vn_reference_s> *references_pool;
279 } *vn_tables_t;
282 /* vn_constant hashtable helpers. */
284 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
286 static inline hashval_t hash (const vn_constant_s *);
287 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
290 /* Hash table hash function for vn_constant_t. */
292 inline hashval_t
293 vn_constant_hasher::hash (const vn_constant_s *vc1)
295 return vc1->hashcode;
298 /* Hash table equality function for vn_constant_t. */
300 inline bool
301 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
303 if (vc1->hashcode != vc2->hashcode)
304 return false;
306 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
309 static hash_table<vn_constant_hasher> *constant_to_value_id;
310 static bitmap constant_value_ids;
313 /* Valid hashtables storing information we have proven to be
314 correct. */
316 static vn_tables_t valid_info;
318 /* Optimistic hashtables storing information we are making assumptions about
319 during iterations. */
321 static vn_tables_t optimistic_info;
323 /* Pointer to the set of hashtables that is currently being used.
324 Should always point to either the optimistic_info, or the
325 valid_info. */
327 static vn_tables_t current_info;
330 /* Reverse post order index for each basic block. */
332 static int *rpo_numbers;
334 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
336 /* Return the SSA value of the VUSE x, supporting released VDEFs
337 during elimination which will value-number the VDEF to the
338 associated VUSE (but not substitute in the whole lattice). */
340 static inline tree
341 vuse_ssa_val (tree x)
343 if (!x)
344 return NULL_TREE;
348 x = SSA_VAL (x);
350 while (SSA_NAME_IN_FREE_LIST (x));
352 return x;
355 /* This represents the top of the VN lattice, which is the universal
356 value. */
358 tree VN_TOP;
360 /* Unique counter for our value ids. */
362 static unsigned int next_value_id;
364 /* Next DFS number and the stack for strongly connected component
365 detection. */
367 static unsigned int next_dfs_num;
368 static vec<tree> sccstack;
372 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
373 are allocated on an obstack for locality reasons, and to free them
374 without looping over the vec. */
376 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
377 static struct obstack vn_ssa_aux_obstack;
379 /* Return whether there is value numbering information for a given SSA name. */
381 bool
382 has_VN_INFO (tree name)
384 if (SSA_NAME_VERSION (name) < vn_ssa_aux_table.length ())
385 return vn_ssa_aux_table[SSA_NAME_VERSION (name)] != NULL;
386 return false;
389 /* Return the value numbering information for a given SSA name. */
391 vn_ssa_aux_t
392 VN_INFO (tree name)
394 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
395 gcc_checking_assert (res);
396 return res;
399 /* Set the value numbering info for a given SSA name to a given
400 value. */
402 static inline void
403 VN_INFO_SET (tree name, vn_ssa_aux_t value)
405 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
408 /* Initialize the value numbering info for a given SSA name.
409 This should be called just once for every SSA name. */
411 vn_ssa_aux_t
412 VN_INFO_GET (tree name)
414 vn_ssa_aux_t newinfo;
416 gcc_assert (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ()
417 || vn_ssa_aux_table[SSA_NAME_VERSION (name)] == NULL);
418 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
419 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
420 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
421 vn_ssa_aux_table.safe_grow_cleared (SSA_NAME_VERSION (name) + 1);
422 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
423 return newinfo;
427 /* Return the vn_kind the expression computed by the stmt should be
428 associated with. */
430 enum vn_kind
431 vn_get_stmt_kind (gimple *stmt)
433 switch (gimple_code (stmt))
435 case GIMPLE_CALL:
436 return VN_REFERENCE;
437 case GIMPLE_PHI:
438 return VN_PHI;
439 case GIMPLE_ASSIGN:
441 enum tree_code code = gimple_assign_rhs_code (stmt);
442 tree rhs1 = gimple_assign_rhs1 (stmt);
443 switch (get_gimple_rhs_class (code))
445 case GIMPLE_UNARY_RHS:
446 case GIMPLE_BINARY_RHS:
447 case GIMPLE_TERNARY_RHS:
448 return VN_NARY;
449 case GIMPLE_SINGLE_RHS:
450 switch (TREE_CODE_CLASS (code))
452 case tcc_reference:
453 /* VOP-less references can go through unary case. */
454 if ((code == REALPART_EXPR
455 || code == IMAGPART_EXPR
456 || code == VIEW_CONVERT_EXPR
457 || code == BIT_FIELD_REF)
458 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
459 return VN_NARY;
461 /* Fallthrough. */
462 case tcc_declaration:
463 return VN_REFERENCE;
465 case tcc_constant:
466 return VN_CONSTANT;
468 default:
469 if (code == ADDR_EXPR)
470 return (is_gimple_min_invariant (rhs1)
471 ? VN_CONSTANT : VN_REFERENCE);
472 else if (code == CONSTRUCTOR)
473 return VN_NARY;
474 return VN_NONE;
476 default:
477 return VN_NONE;
480 default:
481 return VN_NONE;
485 /* Lookup a value id for CONSTANT and return it. If it does not
486 exist returns 0. */
488 unsigned int
489 get_constant_value_id (tree constant)
491 vn_constant_s **slot;
492 struct vn_constant_s vc;
494 vc.hashcode = vn_hash_constant_with_type (constant);
495 vc.constant = constant;
496 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
497 if (slot)
498 return (*slot)->value_id;
499 return 0;
502 /* Lookup a value id for CONSTANT, and if it does not exist, create a
503 new one and return it. If it does exist, return it. */
505 unsigned int
506 get_or_alloc_constant_value_id (tree constant)
508 vn_constant_s **slot;
509 struct vn_constant_s vc;
510 vn_constant_t vcp;
512 vc.hashcode = vn_hash_constant_with_type (constant);
513 vc.constant = constant;
514 slot = constant_to_value_id->find_slot (&vc, INSERT);
515 if (*slot)
516 return (*slot)->value_id;
518 vcp = XNEW (struct vn_constant_s);
519 vcp->hashcode = vc.hashcode;
520 vcp->constant = constant;
521 vcp->value_id = get_next_value_id ();
522 *slot = vcp;
523 bitmap_set_bit (constant_value_ids, vcp->value_id);
524 return vcp->value_id;
527 /* Return true if V is a value id for a constant. */
529 bool
530 value_id_constant_p (unsigned int v)
532 return bitmap_bit_p (constant_value_ids, v);
535 /* Compute the hash for a reference operand VRO1. */
537 static void
538 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
540 hstate.add_int (vro1->opcode);
541 if (vro1->op0)
542 inchash::add_expr (vro1->op0, hstate);
543 if (vro1->op1)
544 inchash::add_expr (vro1->op1, hstate);
545 if (vro1->op2)
546 inchash::add_expr (vro1->op2, hstate);
549 /* Compute a hash for the reference operation VR1 and return it. */
551 static hashval_t
552 vn_reference_compute_hash (const vn_reference_t vr1)
554 inchash::hash hstate;
555 hashval_t result;
556 int i;
557 vn_reference_op_t vro;
558 HOST_WIDE_INT off = -1;
559 bool deref = false;
561 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
563 if (vro->opcode == MEM_REF)
564 deref = true;
565 else if (vro->opcode != ADDR_EXPR)
566 deref = false;
567 if (vro->off != -1)
569 if (off == -1)
570 off = 0;
571 off += vro->off;
573 else
575 if (off != -1
576 && off != 0)
577 hstate.add_int (off);
578 off = -1;
579 if (deref
580 && vro->opcode == ADDR_EXPR)
582 if (vro->op0)
584 tree op = TREE_OPERAND (vro->op0, 0);
585 hstate.add_int (TREE_CODE (op));
586 inchash::add_expr (op, hstate);
589 else
590 vn_reference_op_compute_hash (vro, hstate);
593 result = hstate.end ();
594 /* ??? We would ICE later if we hash instead of adding that in. */
595 if (vr1->vuse)
596 result += SSA_NAME_VERSION (vr1->vuse);
598 return result;
601 /* Return true if reference operations VR1 and VR2 are equivalent. This
602 means they have the same set of operands and vuses. */
604 bool
605 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
607 unsigned i, j;
609 /* Early out if this is not a hash collision. */
610 if (vr1->hashcode != vr2->hashcode)
611 return false;
613 /* The VOP needs to be the same. */
614 if (vr1->vuse != vr2->vuse)
615 return false;
617 /* If the operands are the same we are done. */
618 if (vr1->operands == vr2->operands)
619 return true;
621 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
622 return false;
624 if (INTEGRAL_TYPE_P (vr1->type)
625 && INTEGRAL_TYPE_P (vr2->type))
627 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
628 return false;
630 else if (INTEGRAL_TYPE_P (vr1->type)
631 && (TYPE_PRECISION (vr1->type)
632 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
633 return false;
634 else if (INTEGRAL_TYPE_P (vr2->type)
635 && (TYPE_PRECISION (vr2->type)
636 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
637 return false;
639 i = 0;
640 j = 0;
643 HOST_WIDE_INT off1 = 0, off2 = 0;
644 vn_reference_op_t vro1, vro2;
645 vn_reference_op_s tem1, tem2;
646 bool deref1 = false, deref2 = false;
647 for (; vr1->operands.iterate (i, &vro1); i++)
649 if (vro1->opcode == MEM_REF)
650 deref1 = true;
651 /* Do not look through a storage order barrier. */
652 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
653 return false;
654 if (vro1->off == -1)
655 break;
656 off1 += vro1->off;
658 for (; vr2->operands.iterate (j, &vro2); j++)
660 if (vro2->opcode == MEM_REF)
661 deref2 = true;
662 /* Do not look through a storage order barrier. */
663 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
664 return false;
665 if (vro2->off == -1)
666 break;
667 off2 += vro2->off;
669 if (off1 != off2)
670 return false;
671 if (deref1 && vro1->opcode == ADDR_EXPR)
673 memset (&tem1, 0, sizeof (tem1));
674 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
675 tem1.type = TREE_TYPE (tem1.op0);
676 tem1.opcode = TREE_CODE (tem1.op0);
677 vro1 = &tem1;
678 deref1 = false;
680 if (deref2 && vro2->opcode == ADDR_EXPR)
682 memset (&tem2, 0, sizeof (tem2));
683 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
684 tem2.type = TREE_TYPE (tem2.op0);
685 tem2.opcode = TREE_CODE (tem2.op0);
686 vro2 = &tem2;
687 deref2 = false;
689 if (deref1 != deref2)
690 return false;
691 if (!vn_reference_op_eq (vro1, vro2))
692 return false;
693 ++j;
694 ++i;
696 while (vr1->operands.length () != i
697 || vr2->operands.length () != j);
699 return true;
702 /* Copy the operations present in load/store REF into RESULT, a vector of
703 vn_reference_op_s's. */
705 static void
706 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
708 if (TREE_CODE (ref) == TARGET_MEM_REF)
710 vn_reference_op_s temp;
712 result->reserve (3);
714 memset (&temp, 0, sizeof (temp));
715 temp.type = TREE_TYPE (ref);
716 temp.opcode = TREE_CODE (ref);
717 temp.op0 = TMR_INDEX (ref);
718 temp.op1 = TMR_STEP (ref);
719 temp.op2 = TMR_OFFSET (ref);
720 temp.off = -1;
721 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
722 temp.base = MR_DEPENDENCE_BASE (ref);
723 result->quick_push (temp);
725 memset (&temp, 0, sizeof (temp));
726 temp.type = NULL_TREE;
727 temp.opcode = ERROR_MARK;
728 temp.op0 = TMR_INDEX2 (ref);
729 temp.off = -1;
730 result->quick_push (temp);
732 memset (&temp, 0, sizeof (temp));
733 temp.type = NULL_TREE;
734 temp.opcode = TREE_CODE (TMR_BASE (ref));
735 temp.op0 = TMR_BASE (ref);
736 temp.off = -1;
737 result->quick_push (temp);
738 return;
741 /* For non-calls, store the information that makes up the address. */
742 tree orig = ref;
743 while (ref)
745 vn_reference_op_s temp;
747 memset (&temp, 0, sizeof (temp));
748 temp.type = TREE_TYPE (ref);
749 temp.opcode = TREE_CODE (ref);
750 temp.off = -1;
752 switch (temp.opcode)
754 case MODIFY_EXPR:
755 temp.op0 = TREE_OPERAND (ref, 1);
756 break;
757 case WITH_SIZE_EXPR:
758 temp.op0 = TREE_OPERAND (ref, 1);
759 temp.off = 0;
760 break;
761 case MEM_REF:
762 /* The base address gets its own vn_reference_op_s structure. */
763 temp.op0 = TREE_OPERAND (ref, 1);
765 offset_int off = mem_ref_offset (ref);
766 if (wi::fits_shwi_p (off))
767 temp.off = off.to_shwi ();
769 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
770 temp.base = MR_DEPENDENCE_BASE (ref);
771 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
772 break;
773 case BIT_FIELD_REF:
774 /* Record bits, position and storage order. */
775 temp.op0 = TREE_OPERAND (ref, 1);
776 temp.op1 = TREE_OPERAND (ref, 2);
777 if (tree_fits_shwi_p (TREE_OPERAND (ref, 2)))
779 HOST_WIDE_INT off = tree_to_shwi (TREE_OPERAND (ref, 2));
780 if (off % BITS_PER_UNIT == 0)
781 temp.off = off / BITS_PER_UNIT;
783 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
784 break;
785 case COMPONENT_REF:
786 /* The field decl is enough to unambiguously specify the field,
787 a matching type is not necessary and a mismatching type
788 is always a spurious difference. */
789 temp.type = NULL_TREE;
790 temp.op0 = TREE_OPERAND (ref, 1);
791 temp.op1 = TREE_OPERAND (ref, 2);
793 tree this_offset = component_ref_field_offset (ref);
794 if (this_offset
795 && TREE_CODE (this_offset) == INTEGER_CST)
797 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
798 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
800 offset_int off
801 = (wi::to_offset (this_offset)
802 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
803 if (wi::fits_shwi_p (off)
804 /* Probibit value-numbering zero offset components
805 of addresses the same before the pass folding
806 __builtin_object_size had a chance to run
807 (checking cfun->after_inlining does the
808 trick here). */
809 && (TREE_CODE (orig) != ADDR_EXPR
810 || off != 0
811 || cfun->after_inlining))
812 temp.off = off.to_shwi ();
816 break;
817 case ARRAY_RANGE_REF:
818 case ARRAY_REF:
820 tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
821 /* Record index as operand. */
822 temp.op0 = TREE_OPERAND (ref, 1);
823 /* Always record lower bounds and element size. */
824 temp.op1 = array_ref_low_bound (ref);
825 /* But record element size in units of the type alignment. */
826 temp.op2 = TREE_OPERAND (ref, 3);
827 temp.align = eltype->type_common.align;
828 if (! temp.op2)
829 temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
830 size_int (TYPE_ALIGN_UNIT (eltype)));
831 if (TREE_CODE (temp.op0) == INTEGER_CST
832 && TREE_CODE (temp.op1) == INTEGER_CST
833 && TREE_CODE (temp.op2) == INTEGER_CST)
835 offset_int off = ((wi::to_offset (temp.op0)
836 - wi::to_offset (temp.op1))
837 * wi::to_offset (temp.op2)
838 * vn_ref_op_align_unit (&temp));
839 if (wi::fits_shwi_p (off))
840 temp.off = off.to_shwi();
843 break;
844 case VAR_DECL:
845 if (DECL_HARD_REGISTER (ref))
847 temp.op0 = ref;
848 break;
850 /* Fallthru. */
851 case PARM_DECL:
852 case CONST_DECL:
853 case RESULT_DECL:
854 /* Canonicalize decls to MEM[&decl] which is what we end up with
855 when valueizing MEM[ptr] with ptr = &decl. */
856 temp.opcode = MEM_REF;
857 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
858 temp.off = 0;
859 result->safe_push (temp);
860 temp.opcode = ADDR_EXPR;
861 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
862 temp.type = TREE_TYPE (temp.op0);
863 temp.off = -1;
864 break;
865 case STRING_CST:
866 case INTEGER_CST:
867 case COMPLEX_CST:
868 case VECTOR_CST:
869 case REAL_CST:
870 case FIXED_CST:
871 case CONSTRUCTOR:
872 case SSA_NAME:
873 temp.op0 = ref;
874 break;
875 case ADDR_EXPR:
876 if (is_gimple_min_invariant (ref))
878 temp.op0 = ref;
879 break;
881 break;
882 /* These are only interesting for their operands, their
883 existence, and their type. They will never be the last
884 ref in the chain of references (IE they require an
885 operand), so we don't have to put anything
886 for op* as it will be handled by the iteration */
887 case REALPART_EXPR:
888 temp.off = 0;
889 break;
890 case VIEW_CONVERT_EXPR:
891 temp.off = 0;
892 temp.reverse = storage_order_barrier_p (ref);
893 break;
894 case IMAGPART_EXPR:
895 /* This is only interesting for its constant offset. */
896 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
897 break;
898 default:
899 gcc_unreachable ();
901 result->safe_push (temp);
903 if (REFERENCE_CLASS_P (ref)
904 || TREE_CODE (ref) == MODIFY_EXPR
905 || TREE_CODE (ref) == WITH_SIZE_EXPR
906 || (TREE_CODE (ref) == ADDR_EXPR
907 && !is_gimple_min_invariant (ref)))
908 ref = TREE_OPERAND (ref, 0);
909 else
910 ref = NULL_TREE;
914 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
915 operands in *OPS, the reference alias set SET and the reference type TYPE.
916 Return true if something useful was produced. */
918 bool
919 ao_ref_init_from_vn_reference (ao_ref *ref,
920 alias_set_type set, tree type,
921 vec<vn_reference_op_s> ops)
923 vn_reference_op_t op;
924 unsigned i;
925 tree base = NULL_TREE;
926 tree *op0_p = &base;
927 offset_int offset = 0;
928 offset_int max_size;
929 offset_int size = -1;
930 tree size_tree = NULL_TREE;
931 alias_set_type base_alias_set = -1;
933 /* First get the final access size from just the outermost expression. */
934 op = &ops[0];
935 if (op->opcode == COMPONENT_REF)
936 size_tree = DECL_SIZE (op->op0);
937 else if (op->opcode == BIT_FIELD_REF)
938 size_tree = op->op0;
939 else
941 machine_mode mode = TYPE_MODE (type);
942 if (mode == BLKmode)
943 size_tree = TYPE_SIZE (type);
944 else
945 size = int (GET_MODE_BITSIZE (mode));
947 if (size_tree != NULL_TREE
948 && TREE_CODE (size_tree) == INTEGER_CST)
949 size = wi::to_offset (size_tree);
951 /* Initially, maxsize is the same as the accessed element size.
952 In the following it will only grow (or become -1). */
953 max_size = size;
955 /* Compute cumulative bit-offset for nested component-refs and array-refs,
956 and find the ultimate containing object. */
957 FOR_EACH_VEC_ELT (ops, i, op)
959 switch (op->opcode)
961 /* These may be in the reference ops, but we cannot do anything
962 sensible with them here. */
963 case ADDR_EXPR:
964 /* Apart from ADDR_EXPR arguments to MEM_REF. */
965 if (base != NULL_TREE
966 && TREE_CODE (base) == MEM_REF
967 && op->op0
968 && DECL_P (TREE_OPERAND (op->op0, 0)))
970 vn_reference_op_t pop = &ops[i-1];
971 base = TREE_OPERAND (op->op0, 0);
972 if (pop->off == -1)
974 max_size = -1;
975 offset = 0;
977 else
978 offset += pop->off * BITS_PER_UNIT;
979 op0_p = NULL;
980 break;
982 /* Fallthru. */
983 case CALL_EXPR:
984 return false;
986 /* Record the base objects. */
987 case MEM_REF:
988 base_alias_set = get_deref_alias_set (op->op0);
989 *op0_p = build2 (MEM_REF, op->type,
990 NULL_TREE, op->op0);
991 MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
992 MR_DEPENDENCE_BASE (*op0_p) = op->base;
993 op0_p = &TREE_OPERAND (*op0_p, 0);
994 break;
996 case VAR_DECL:
997 case PARM_DECL:
998 case RESULT_DECL:
999 case SSA_NAME:
1000 *op0_p = op->op0;
1001 op0_p = NULL;
1002 break;
1004 /* And now the usual component-reference style ops. */
1005 case BIT_FIELD_REF:
1006 offset += wi::to_offset (op->op1);
1007 break;
1009 case COMPONENT_REF:
1011 tree field = op->op0;
1012 /* We do not have a complete COMPONENT_REF tree here so we
1013 cannot use component_ref_field_offset. Do the interesting
1014 parts manually. */
1015 tree this_offset = DECL_FIELD_OFFSET (field);
1017 if (op->op1 || TREE_CODE (this_offset) != INTEGER_CST)
1018 max_size = -1;
1019 else
1021 offset_int woffset = (wi::to_offset (this_offset)
1022 << LOG2_BITS_PER_UNIT);
1023 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1024 offset += woffset;
1026 break;
1029 case ARRAY_RANGE_REF:
1030 case ARRAY_REF:
1031 /* We recorded the lower bound and the element size. */
1032 if (TREE_CODE (op->op0) != INTEGER_CST
1033 || TREE_CODE (op->op1) != INTEGER_CST
1034 || TREE_CODE (op->op2) != INTEGER_CST)
1035 max_size = -1;
1036 else
1038 offset_int woffset
1039 = wi::sext (wi::to_offset (op->op0) - wi::to_offset (op->op1),
1040 TYPE_PRECISION (TREE_TYPE (op->op0)));
1041 woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
1042 woffset <<= LOG2_BITS_PER_UNIT;
1043 offset += woffset;
1045 break;
1047 case REALPART_EXPR:
1048 break;
1050 case IMAGPART_EXPR:
1051 offset += size;
1052 break;
1054 case VIEW_CONVERT_EXPR:
1055 break;
1057 case STRING_CST:
1058 case INTEGER_CST:
1059 case COMPLEX_CST:
1060 case VECTOR_CST:
1061 case REAL_CST:
1062 case CONSTRUCTOR:
1063 case CONST_DECL:
1064 return false;
1066 default:
1067 return false;
1071 if (base == NULL_TREE)
1072 return false;
1074 ref->ref = NULL_TREE;
1075 ref->base = base;
1076 ref->ref_alias_set = set;
1077 if (base_alias_set != -1)
1078 ref->base_alias_set = base_alias_set;
1079 else
1080 ref->base_alias_set = get_alias_set (base);
1081 /* We discount volatiles from value-numbering elsewhere. */
1082 ref->volatile_p = false;
1084 if (!wi::fits_shwi_p (size) || wi::neg_p (size))
1086 ref->offset = 0;
1087 ref->size = -1;
1088 ref->max_size = -1;
1089 return true;
1092 ref->size = size.to_shwi ();
1094 if (!wi::fits_shwi_p (offset))
1096 ref->offset = 0;
1097 ref->max_size = -1;
1098 return true;
1101 ref->offset = offset.to_shwi ();
1103 if (!wi::fits_shwi_p (max_size) || wi::neg_p (max_size))
1104 ref->max_size = -1;
1105 else
1106 ref->max_size = max_size.to_shwi ();
1108 return true;
1111 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1112 vn_reference_op_s's. */
1114 static void
1115 copy_reference_ops_from_call (gcall *call,
1116 vec<vn_reference_op_s> *result)
1118 vn_reference_op_s temp;
1119 unsigned i;
1120 tree lhs = gimple_call_lhs (call);
1121 int lr;
1123 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1124 different. By adding the lhs here in the vector, we ensure that the
1125 hashcode is different, guaranteeing a different value number. */
1126 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1128 memset (&temp, 0, sizeof (temp));
1129 temp.opcode = MODIFY_EXPR;
1130 temp.type = TREE_TYPE (lhs);
1131 temp.op0 = lhs;
1132 temp.off = -1;
1133 result->safe_push (temp);
1136 /* Copy the type, opcode, function, static chain and EH region, if any. */
1137 memset (&temp, 0, sizeof (temp));
1138 temp.type = gimple_call_return_type (call);
1139 temp.opcode = CALL_EXPR;
1140 temp.op0 = gimple_call_fn (call);
1141 temp.op1 = gimple_call_chain (call);
1142 if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1143 temp.op2 = size_int (lr);
1144 temp.off = -1;
1145 if (gimple_call_with_bounds_p (call))
1146 temp.with_bounds = 1;
1147 result->safe_push (temp);
1149 /* Copy the call arguments. As they can be references as well,
1150 just chain them together. */
1151 for (i = 0; i < gimple_call_num_args (call); ++i)
1153 tree callarg = gimple_call_arg (call, i);
1154 copy_reference_ops_from_ref (callarg, result);
1158 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1159 *I_P to point to the last element of the replacement. */
1160 static bool
1161 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1162 unsigned int *i_p)
1164 unsigned int i = *i_p;
1165 vn_reference_op_t op = &(*ops)[i];
1166 vn_reference_op_t mem_op = &(*ops)[i - 1];
1167 tree addr_base;
1168 HOST_WIDE_INT addr_offset = 0;
1170 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1171 from .foo.bar to the preceding MEM_REF offset and replace the
1172 address with &OBJ. */
1173 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1174 &addr_offset);
1175 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1176 if (addr_base != TREE_OPERAND (op->op0, 0))
1178 offset_int off = offset_int::from (wi::to_wide (mem_op->op0), SIGNED);
1179 off += addr_offset;
1180 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1181 op->op0 = build_fold_addr_expr (addr_base);
1182 if (tree_fits_shwi_p (mem_op->op0))
1183 mem_op->off = tree_to_shwi (mem_op->op0);
1184 else
1185 mem_op->off = -1;
1186 return true;
1188 return false;
1191 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1192 *I_P to point to the last element of the replacement. */
1193 static bool
1194 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1195 unsigned int *i_p)
1197 unsigned int i = *i_p;
1198 vn_reference_op_t op = &(*ops)[i];
1199 vn_reference_op_t mem_op = &(*ops)[i - 1];
1200 gimple *def_stmt;
1201 enum tree_code code;
1202 offset_int off;
1204 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1205 if (!is_gimple_assign (def_stmt))
1206 return false;
1208 code = gimple_assign_rhs_code (def_stmt);
1209 if (code != ADDR_EXPR
1210 && code != POINTER_PLUS_EXPR)
1211 return false;
1213 off = offset_int::from (wi::to_wide (mem_op->op0), SIGNED);
1215 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1216 from .foo.bar to the preceding MEM_REF offset and replace the
1217 address with &OBJ. */
1218 if (code == ADDR_EXPR)
1220 tree addr, addr_base;
1221 HOST_WIDE_INT addr_offset;
1223 addr = gimple_assign_rhs1 (def_stmt);
1224 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1225 &addr_offset);
1226 /* If that didn't work because the address isn't invariant propagate
1227 the reference tree from the address operation in case the current
1228 dereference isn't offsetted. */
1229 if (!addr_base
1230 && *i_p == ops->length () - 1
1231 && off == 0
1232 /* This makes us disable this transform for PRE where the
1233 reference ops might be also used for code insertion which
1234 is invalid. */
1235 && default_vn_walk_kind == VN_WALKREWRITE)
1237 auto_vec<vn_reference_op_s, 32> tem;
1238 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1239 /* Make sure to preserve TBAA info. The only objects not
1240 wrapped in MEM_REFs that can have their address taken are
1241 STRING_CSTs. */
1242 if (tem.length () >= 2
1243 && tem[tem.length () - 2].opcode == MEM_REF)
1245 vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1246 new_mem_op->op0
1247 = wide_int_to_tree (TREE_TYPE (mem_op->op0),
1248 wi::to_wide (new_mem_op->op0));
1250 else
1251 gcc_assert (tem.last ().opcode == STRING_CST);
1252 ops->pop ();
1253 ops->pop ();
1254 ops->safe_splice (tem);
1255 --*i_p;
1256 return true;
1258 if (!addr_base
1259 || TREE_CODE (addr_base) != MEM_REF)
1260 return false;
1262 off += addr_offset;
1263 off += mem_ref_offset (addr_base);
1264 op->op0 = TREE_OPERAND (addr_base, 0);
1266 else
1268 tree ptr, ptroff;
1269 ptr = gimple_assign_rhs1 (def_stmt);
1270 ptroff = gimple_assign_rhs2 (def_stmt);
1271 if (TREE_CODE (ptr) != SSA_NAME
1272 || TREE_CODE (ptroff) != INTEGER_CST)
1273 return false;
1275 off += wi::to_offset (ptroff);
1276 op->op0 = ptr;
1279 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1280 if (tree_fits_shwi_p (mem_op->op0))
1281 mem_op->off = tree_to_shwi (mem_op->op0);
1282 else
1283 mem_op->off = -1;
1284 if (TREE_CODE (op->op0) == SSA_NAME)
1285 op->op0 = SSA_VAL (op->op0);
1286 if (TREE_CODE (op->op0) != SSA_NAME)
1287 op->opcode = TREE_CODE (op->op0);
1289 /* And recurse. */
1290 if (TREE_CODE (op->op0) == SSA_NAME)
1291 vn_reference_maybe_forwprop_address (ops, i_p);
1292 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1293 vn_reference_fold_indirect (ops, i_p);
1294 return true;
1297 /* Optimize the reference REF to a constant if possible or return
1298 NULL_TREE if not. */
1300 tree
1301 fully_constant_vn_reference_p (vn_reference_t ref)
1303 vec<vn_reference_op_s> operands = ref->operands;
1304 vn_reference_op_t op;
1306 /* Try to simplify the translated expression if it is
1307 a call to a builtin function with at most two arguments. */
1308 op = &operands[0];
1309 if (op->opcode == CALL_EXPR
1310 && TREE_CODE (op->op0) == ADDR_EXPR
1311 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1312 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1313 && operands.length () >= 2
1314 && operands.length () <= 3)
1316 vn_reference_op_t arg0, arg1 = NULL;
1317 bool anyconst = false;
1318 arg0 = &operands[1];
1319 if (operands.length () > 2)
1320 arg1 = &operands[2];
1321 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1322 || (arg0->opcode == ADDR_EXPR
1323 && is_gimple_min_invariant (arg0->op0)))
1324 anyconst = true;
1325 if (arg1
1326 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1327 || (arg1->opcode == ADDR_EXPR
1328 && is_gimple_min_invariant (arg1->op0))))
1329 anyconst = true;
1330 if (anyconst)
1332 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1333 arg1 ? 2 : 1,
1334 arg0->op0,
1335 arg1 ? arg1->op0 : NULL);
1336 if (folded
1337 && TREE_CODE (folded) == NOP_EXPR)
1338 folded = TREE_OPERAND (folded, 0);
1339 if (folded
1340 && is_gimple_min_invariant (folded))
1341 return folded;
1345 /* Simplify reads from constants or constant initializers. */
1346 else if (BITS_PER_UNIT == 8
1347 && is_gimple_reg_type (ref->type)
1348 && (!INTEGRAL_TYPE_P (ref->type)
1349 || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
1351 HOST_WIDE_INT off = 0;
1352 HOST_WIDE_INT size;
1353 if (INTEGRAL_TYPE_P (ref->type))
1354 size = TYPE_PRECISION (ref->type);
1355 else
1356 size = tree_to_shwi (TYPE_SIZE (ref->type));
1357 if (size % BITS_PER_UNIT != 0
1358 || size > MAX_BITSIZE_MODE_ANY_MODE)
1359 return NULL_TREE;
1360 size /= BITS_PER_UNIT;
1361 unsigned i;
1362 for (i = 0; i < operands.length (); ++i)
1364 if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1366 ++i;
1367 break;
1369 if (operands[i].off == -1)
1370 return NULL_TREE;
1371 off += operands[i].off;
1372 if (operands[i].opcode == MEM_REF)
1374 ++i;
1375 break;
1378 vn_reference_op_t base = &operands[--i];
1379 tree ctor = error_mark_node;
1380 tree decl = NULL_TREE;
1381 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1382 ctor = base->op0;
1383 else if (base->opcode == MEM_REF
1384 && base[1].opcode == ADDR_EXPR
1385 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1386 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL))
1388 decl = TREE_OPERAND (base[1].op0, 0);
1389 ctor = ctor_for_folding (decl);
1391 if (ctor == NULL_TREE)
1392 return build_zero_cst (ref->type);
1393 else if (ctor != error_mark_node)
1395 if (decl)
1397 tree res = fold_ctor_reference (ref->type, ctor,
1398 off * BITS_PER_UNIT,
1399 size * BITS_PER_UNIT, decl);
1400 if (res)
1402 STRIP_USELESS_TYPE_CONVERSION (res);
1403 if (is_gimple_min_invariant (res))
1404 return res;
1407 else
1409 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1410 int len = native_encode_expr (ctor, buf, size, off);
1411 if (len > 0)
1412 return native_interpret_expr (ref->type, buf, len);
1417 return NULL_TREE;
1420 /* Return true if OPS contain a storage order barrier. */
1422 static bool
1423 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1425 vn_reference_op_t op;
1426 unsigned i;
1428 FOR_EACH_VEC_ELT (ops, i, op)
1429 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1430 return true;
1432 return false;
1435 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1436 structures into their value numbers. This is done in-place, and
1437 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1438 whether any operands were valueized. */
1440 static vec<vn_reference_op_s>
1441 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1443 vn_reference_op_t vro;
1444 unsigned int i;
1446 *valueized_anything = false;
1448 FOR_EACH_VEC_ELT (orig, i, vro)
1450 if (vro->opcode == SSA_NAME
1451 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1453 tree tem = SSA_VAL (vro->op0);
1454 if (tem != vro->op0)
1456 *valueized_anything = true;
1457 vro->op0 = tem;
1459 /* If it transforms from an SSA_NAME to a constant, update
1460 the opcode. */
1461 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1462 vro->opcode = TREE_CODE (vro->op0);
1464 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1466 tree tem = SSA_VAL (vro->op1);
1467 if (tem != vro->op1)
1469 *valueized_anything = true;
1470 vro->op1 = tem;
1473 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1475 tree tem = SSA_VAL (vro->op2);
1476 if (tem != vro->op2)
1478 *valueized_anything = true;
1479 vro->op2 = tem;
1482 /* If it transforms from an SSA_NAME to an address, fold with
1483 a preceding indirect reference. */
1484 if (i > 0
1485 && vro->op0
1486 && TREE_CODE (vro->op0) == ADDR_EXPR
1487 && orig[i - 1].opcode == MEM_REF)
1489 if (vn_reference_fold_indirect (&orig, &i))
1490 *valueized_anything = true;
1492 else if (i > 0
1493 && vro->opcode == SSA_NAME
1494 && orig[i - 1].opcode == MEM_REF)
1496 if (vn_reference_maybe_forwprop_address (&orig, &i))
1497 *valueized_anything = true;
1499 /* If it transforms a non-constant ARRAY_REF into a constant
1500 one, adjust the constant offset. */
1501 else if (vro->opcode == ARRAY_REF
1502 && vro->off == -1
1503 && TREE_CODE (vro->op0) == INTEGER_CST
1504 && TREE_CODE (vro->op1) == INTEGER_CST
1505 && TREE_CODE (vro->op2) == INTEGER_CST)
1507 offset_int off = ((wi::to_offset (vro->op0)
1508 - wi::to_offset (vro->op1))
1509 * wi::to_offset (vro->op2)
1510 * vn_ref_op_align_unit (vro));
1511 if (wi::fits_shwi_p (off))
1512 vro->off = off.to_shwi ();
1516 return orig;
1519 static vec<vn_reference_op_s>
1520 valueize_refs (vec<vn_reference_op_s> orig)
1522 bool tem;
1523 return valueize_refs_1 (orig, &tem);
1526 static vec<vn_reference_op_s> shared_lookup_references;
1528 /* Create a vector of vn_reference_op_s structures from REF, a
1529 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1530 this function. *VALUEIZED_ANYTHING will specify whether any
1531 operands were valueized. */
1533 static vec<vn_reference_op_s>
1534 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1536 if (!ref)
1537 return vNULL;
1538 shared_lookup_references.truncate (0);
1539 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1540 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1541 valueized_anything);
1542 return shared_lookup_references;
1545 /* Create a vector of vn_reference_op_s structures from CALL, a
1546 call statement. The vector is shared among all callers of
1547 this function. */
1549 static vec<vn_reference_op_s>
1550 valueize_shared_reference_ops_from_call (gcall *call)
1552 if (!call)
1553 return vNULL;
1554 shared_lookup_references.truncate (0);
1555 copy_reference_ops_from_call (call, &shared_lookup_references);
1556 shared_lookup_references = valueize_refs (shared_lookup_references);
1557 return shared_lookup_references;
1560 /* Lookup a SCCVN reference operation VR in the current hash table.
1561 Returns the resulting value number if it exists in the hash table,
1562 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1563 vn_reference_t stored in the hashtable if something is found. */
1565 static tree
1566 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1568 vn_reference_s **slot;
1569 hashval_t hash;
1571 hash = vr->hashcode;
1572 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1573 if (!slot && current_info == optimistic_info)
1574 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1575 if (slot)
1577 if (vnresult)
1578 *vnresult = (vn_reference_t)*slot;
1579 return ((vn_reference_t)*slot)->result;
1582 return NULL_TREE;
1585 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1586 with the current VUSE and performs the expression lookup. */
1588 static void *
1589 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1590 unsigned int cnt, void *vr_)
1592 vn_reference_t vr = (vn_reference_t)vr_;
1593 vn_reference_s **slot;
1594 hashval_t hash;
1596 /* This bounds the stmt walks we perform on reference lookups
1597 to O(1) instead of O(N) where N is the number of dominating
1598 stores. */
1599 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1600 return (void *)-1;
1602 if (last_vuse_ptr)
1603 *last_vuse_ptr = vuse;
1605 /* Fixup vuse and hash. */
1606 if (vr->vuse)
1607 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1608 vr->vuse = vuse_ssa_val (vuse);
1609 if (vr->vuse)
1610 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1612 hash = vr->hashcode;
1613 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1614 if (!slot && current_info == optimistic_info)
1615 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1616 if (slot)
1617 return *slot;
1619 return NULL;
1622 /* Lookup an existing or insert a new vn_reference entry into the
1623 value table for the VUSE, SET, TYPE, OPERANDS reference which
1624 has the value VALUE which is either a constant or an SSA name. */
1626 static vn_reference_t
1627 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1628 alias_set_type set,
1629 tree type,
1630 vec<vn_reference_op_s,
1631 va_heap> operands,
1632 tree value)
1634 vn_reference_s vr1;
1635 vn_reference_t result;
1636 unsigned value_id;
1637 vr1.vuse = vuse;
1638 vr1.operands = operands;
1639 vr1.type = type;
1640 vr1.set = set;
1641 vr1.hashcode = vn_reference_compute_hash (&vr1);
1642 if (vn_reference_lookup_1 (&vr1, &result))
1643 return result;
1644 if (TREE_CODE (value) == SSA_NAME)
1645 value_id = VN_INFO (value)->value_id;
1646 else
1647 value_id = get_or_alloc_constant_value_id (value);
1648 return vn_reference_insert_pieces (vuse, set, type,
1649 operands.copy (), value, value_id);
1652 static vn_nary_op_t vn_nary_op_insert_stmt (gimple *stmt, tree result);
1653 static unsigned mprts_hook_cnt;
1655 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
1657 static tree
1658 vn_lookup_simplify_result (code_helper rcode, tree type, tree *ops_)
1660 if (!rcode.is_tree_code ())
1661 return NULL_TREE;
1662 tree *ops = ops_;
1663 unsigned int length = TREE_CODE_LENGTH ((tree_code) rcode);
1664 if (rcode == CONSTRUCTOR
1665 /* ??? We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
1666 and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree. */
1667 && TREE_CODE (ops_[0]) == CONSTRUCTOR)
1669 length = CONSTRUCTOR_NELTS (ops_[0]);
1670 ops = XALLOCAVEC (tree, length);
1671 for (unsigned i = 0; i < length; ++i)
1672 ops[i] = CONSTRUCTOR_ELT (ops_[0], i)->value;
1674 vn_nary_op_t vnresult = NULL;
1675 tree res = vn_nary_op_lookup_pieces (length, (tree_code) rcode,
1676 type, ops, &vnresult);
1677 /* We can end up endlessly recursing simplifications if the lookup above
1678 presents us with a def-use chain that mirrors the original simplification.
1679 See PR80887 for an example. Limit successful lookup artificially
1680 to 10 times if we are called as mprts_hook. */
1681 if (res
1682 && mprts_hook
1683 && --mprts_hook_cnt == 0)
1685 if (dump_file && (dump_flags & TDF_DETAILS))
1686 fprintf (dump_file, "Resetting mprts_hook after too many "
1687 "invocations.\n");
1688 mprts_hook = NULL;
1690 return res;
1693 /* Return a value-number for RCODE OPS... either by looking up an existing
1694 value-number for the simplified result or by inserting the operation if
1695 INSERT is true. */
1697 static tree
1698 vn_nary_build_or_lookup_1 (code_helper rcode, tree type, tree *ops,
1699 bool insert)
1701 tree result = NULL_TREE;
1702 /* We will be creating a value number for
1703 RCODE (OPS...).
1704 So first simplify and lookup this expression to see if it
1705 is already available. */
1706 mprts_hook = vn_lookup_simplify_result;
1707 mprts_hook_cnt = 9;
1708 bool res = false;
1709 switch (TREE_CODE_LENGTH ((tree_code) rcode))
1711 case 1:
1712 res = gimple_resimplify1 (NULL, &rcode, type, ops, vn_valueize);
1713 break;
1714 case 2:
1715 res = gimple_resimplify2 (NULL, &rcode, type, ops, vn_valueize);
1716 break;
1717 case 3:
1718 res = gimple_resimplify3 (NULL, &rcode, type, ops, vn_valueize);
1719 break;
1721 mprts_hook = NULL;
1722 gimple *new_stmt = NULL;
1723 if (res
1724 && gimple_simplified_result_is_gimple_val (rcode, ops))
1725 /* The expression is already available. */
1726 result = ops[0];
1727 else
1729 tree val = vn_lookup_simplify_result (rcode, type, ops);
1730 if (!val && insert)
1732 gimple_seq stmts = NULL;
1733 result = maybe_push_res_to_seq (rcode, type, ops, &stmts);
1734 if (result)
1736 gcc_assert (gimple_seq_singleton_p (stmts));
1737 new_stmt = gimple_seq_first_stmt (stmts);
1740 else
1741 /* The expression is already available. */
1742 result = val;
1744 if (new_stmt)
1746 /* The expression is not yet available, value-number lhs to
1747 the new SSA_NAME we created. */
1748 /* Initialize value-number information properly. */
1749 VN_INFO_GET (result)->valnum = result;
1750 VN_INFO (result)->value_id = get_next_value_id ();
1751 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
1752 new_stmt);
1753 VN_INFO (result)->needs_insertion = true;
1754 /* ??? PRE phi-translation inserts NARYs without corresponding
1755 SSA name result. Re-use those but set their result according
1756 to the stmt we just built. */
1757 vn_nary_op_t nary = NULL;
1758 vn_nary_op_lookup_stmt (new_stmt, &nary);
1759 if (nary)
1761 gcc_assert (nary->result == NULL_TREE);
1762 nary->result = gimple_assign_lhs (new_stmt);
1764 /* As all "inserted" statements are singleton SCCs, insert
1765 to the valid table. This is strictly needed to
1766 avoid re-generating new value SSA_NAMEs for the same
1767 expression during SCC iteration over and over (the
1768 optimistic table gets cleared after each iteration).
1769 We do not need to insert into the optimistic table, as
1770 lookups there will fall back to the valid table. */
1771 else if (current_info == optimistic_info)
1773 current_info = valid_info;
1774 vn_nary_op_insert_stmt (new_stmt, result);
1775 current_info = optimistic_info;
1777 else
1778 vn_nary_op_insert_stmt (new_stmt, result);
1779 if (dump_file && (dump_flags & TDF_DETAILS))
1781 fprintf (dump_file, "Inserting name ");
1782 print_generic_expr (dump_file, result);
1783 fprintf (dump_file, " for expression ");
1784 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
1785 fprintf (dump_file, "\n");
1788 return result;
1791 /* Return a value-number for RCODE OPS... either by looking up an existing
1792 value-number for the simplified result or by inserting the operation. */
1794 static tree
1795 vn_nary_build_or_lookup (code_helper rcode, tree type, tree *ops)
1797 return vn_nary_build_or_lookup_1 (rcode, type, ops, true);
1800 /* Try to simplify the expression RCODE OPS... of type TYPE and return
1801 its value if present. */
1803 tree
1804 vn_nary_simplify (vn_nary_op_t nary)
1806 if (nary->length > 3)
1807 return NULL_TREE;
1808 tree ops[3];
1809 memcpy (ops, nary->op, sizeof (tree) * nary->length);
1810 return vn_nary_build_or_lookup_1 (nary->opcode, nary->type, ops, false);
1814 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1815 from the statement defining VUSE and if not successful tries to
1816 translate *REFP and VR_ through an aggregate copy at the definition
1817 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
1818 of *REF and *VR. If only disambiguation was performed then
1819 *DISAMBIGUATE_ONLY is set to true. */
1821 static void *
1822 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1823 bool *disambiguate_only)
1825 vn_reference_t vr = (vn_reference_t)vr_;
1826 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
1827 tree base = ao_ref_base (ref);
1828 HOST_WIDE_INT offset, maxsize;
1829 static vec<vn_reference_op_s> lhs_ops;
1830 ao_ref lhs_ref;
1831 bool lhs_ref_ok = false;
1833 /* If the reference is based on a parameter that was determined as
1834 pointing to readonly memory it doesn't change. */
1835 if (TREE_CODE (base) == MEM_REF
1836 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
1837 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
1838 && bitmap_bit_p (const_parms,
1839 SSA_NAME_VERSION (TREE_OPERAND (base, 0))))
1841 *disambiguate_only = true;
1842 return NULL;
1845 /* First try to disambiguate after value-replacing in the definitions LHS. */
1846 if (is_gimple_assign (def_stmt))
1848 tree lhs = gimple_assign_lhs (def_stmt);
1849 bool valueized_anything = false;
1850 /* Avoid re-allocation overhead. */
1851 lhs_ops.truncate (0);
1852 copy_reference_ops_from_ref (lhs, &lhs_ops);
1853 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1854 if (valueized_anything)
1856 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1857 get_alias_set (lhs),
1858 TREE_TYPE (lhs), lhs_ops);
1859 if (lhs_ref_ok
1860 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1862 *disambiguate_only = true;
1863 return NULL;
1866 else
1868 ao_ref_init (&lhs_ref, lhs);
1869 lhs_ref_ok = true;
1872 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1873 && gimple_call_num_args (def_stmt) <= 4)
1875 /* For builtin calls valueize its arguments and call the
1876 alias oracle again. Valueization may improve points-to
1877 info of pointers and constify size and position arguments.
1878 Originally this was motivated by PR61034 which has
1879 conditional calls to free falsely clobbering ref because
1880 of imprecise points-to info of the argument. */
1881 tree oldargs[4];
1882 bool valueized_anything = false;
1883 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1885 oldargs[i] = gimple_call_arg (def_stmt, i);
1886 tree val = vn_valueize (oldargs[i]);
1887 if (val != oldargs[i])
1889 gimple_call_set_arg (def_stmt, i, val);
1890 valueized_anything = true;
1893 if (valueized_anything)
1895 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1896 ref);
1897 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1898 gimple_call_set_arg (def_stmt, i, oldargs[i]);
1899 if (!res)
1901 *disambiguate_only = true;
1902 return NULL;
1907 if (*disambiguate_only)
1908 return (void *)-1;
1910 offset = ref->offset;
1911 maxsize = ref->max_size;
1913 /* If we cannot constrain the size of the reference we cannot
1914 test if anything kills it. */
1915 if (maxsize == -1)
1916 return (void *)-1;
1918 /* We can't deduce anything useful from clobbers. */
1919 if (gimple_clobber_p (def_stmt))
1920 return (void *)-1;
1922 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1923 from that definition.
1924 1) Memset. */
1925 if (is_gimple_reg_type (vr->type)
1926 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1927 && integer_zerop (gimple_call_arg (def_stmt, 1))
1928 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2))
1929 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1931 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1932 tree base2;
1933 HOST_WIDE_INT offset2, size2, maxsize2;
1934 bool reverse;
1935 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
1936 &reverse);
1937 size2 = tree_to_uhwi (gimple_call_arg (def_stmt, 2)) * 8;
1938 if ((unsigned HOST_WIDE_INT)size2 / 8
1939 == tree_to_uhwi (gimple_call_arg (def_stmt, 2))
1940 && maxsize2 != -1
1941 && operand_equal_p (base, base2, 0)
1942 && offset2 <= offset
1943 && offset2 + size2 >= offset + maxsize)
1945 tree val = build_zero_cst (vr->type);
1946 return vn_reference_lookup_or_insert_for_pieces
1947 (vuse, vr->set, vr->type, vr->operands, val);
1951 /* 2) Assignment from an empty CONSTRUCTOR. */
1952 else if (is_gimple_reg_type (vr->type)
1953 && gimple_assign_single_p (def_stmt)
1954 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1955 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1957 tree base2;
1958 HOST_WIDE_INT offset2, size2, maxsize2;
1959 bool reverse;
1960 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1961 &offset2, &size2, &maxsize2, &reverse);
1962 if (maxsize2 != -1
1963 && operand_equal_p (base, base2, 0)
1964 && offset2 <= offset
1965 && offset2 + size2 >= offset + maxsize)
1967 tree val = build_zero_cst (vr->type);
1968 return vn_reference_lookup_or_insert_for_pieces
1969 (vuse, vr->set, vr->type, vr->operands, val);
1973 /* 3) Assignment from a constant. We can use folds native encode/interpret
1974 routines to extract the assigned bits. */
1975 else if (ref->size == maxsize
1976 && is_gimple_reg_type (vr->type)
1977 && !contains_storage_order_barrier_p (vr->operands)
1978 && gimple_assign_single_p (def_stmt)
1979 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1980 && maxsize % BITS_PER_UNIT == 0
1981 && offset % BITS_PER_UNIT == 0
1982 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
1983 || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
1984 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
1986 tree base2;
1987 HOST_WIDE_INT offset2, size2, maxsize2;
1988 bool reverse;
1989 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1990 &offset2, &size2, &maxsize2, &reverse);
1991 if (!reverse
1992 && maxsize2 != -1
1993 && maxsize2 == size2
1994 && size2 % BITS_PER_UNIT == 0
1995 && offset2 % BITS_PER_UNIT == 0
1996 && operand_equal_p (base, base2, 0)
1997 && offset2 <= offset
1998 && offset2 + size2 >= offset + maxsize)
2000 /* We support up to 512-bit values (for V8DFmode). */
2001 unsigned char buffer[64];
2002 int len;
2004 tree rhs = gimple_assign_rhs1 (def_stmt);
2005 if (TREE_CODE (rhs) == SSA_NAME)
2006 rhs = SSA_VAL (rhs);
2007 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
2008 buffer, sizeof (buffer));
2009 if (len > 0)
2011 tree type = vr->type;
2012 /* Make sure to interpret in a type that has a range
2013 covering the whole access size. */
2014 if (INTEGRAL_TYPE_P (vr->type)
2015 && ref->size != TYPE_PRECISION (vr->type))
2016 type = build_nonstandard_integer_type (ref->size,
2017 TYPE_UNSIGNED (type));
2018 tree val = native_interpret_expr (type,
2019 buffer
2020 + ((offset - offset2)
2021 / BITS_PER_UNIT),
2022 ref->size / BITS_PER_UNIT);
2023 /* If we chop off bits because the types precision doesn't
2024 match the memory access size this is ok when optimizing
2025 reads but not when called from the DSE code during
2026 elimination. */
2027 if (val
2028 && type != vr->type)
2030 if (! int_fits_type_p (val, vr->type))
2031 val = NULL_TREE;
2032 else
2033 val = fold_convert (vr->type, val);
2036 if (val)
2037 return vn_reference_lookup_or_insert_for_pieces
2038 (vuse, vr->set, vr->type, vr->operands, val);
2043 /* 4) Assignment from an SSA name which definition we may be able
2044 to access pieces from. */
2045 else if (ref->size == maxsize
2046 && is_gimple_reg_type (vr->type)
2047 && !contains_storage_order_barrier_p (vr->operands)
2048 && gimple_assign_single_p (def_stmt)
2049 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
2051 tree base2;
2052 HOST_WIDE_INT offset2, size2, maxsize2;
2053 bool reverse;
2054 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2055 &offset2, &size2, &maxsize2,
2056 &reverse);
2057 if (!reverse
2058 && maxsize2 != -1
2059 && maxsize2 == size2
2060 && operand_equal_p (base, base2, 0)
2061 && offset2 <= offset
2062 && offset2 + size2 >= offset + maxsize
2063 /* ??? We can't handle bitfield precision extracts without
2064 either using an alternate type for the BIT_FIELD_REF and
2065 then doing a conversion or possibly adjusting the offset
2066 according to endianness. */
2067 && (! INTEGRAL_TYPE_P (vr->type)
2068 || ref->size == TYPE_PRECISION (vr->type))
2069 && ref->size % BITS_PER_UNIT == 0)
2071 code_helper rcode = BIT_FIELD_REF;
2072 tree ops[3];
2073 ops[0] = SSA_VAL (gimple_assign_rhs1 (def_stmt));
2074 ops[1] = bitsize_int (ref->size);
2075 ops[2] = bitsize_int (offset - offset2);
2076 tree val = vn_nary_build_or_lookup (rcode, vr->type, ops);
2077 if (val
2078 && (TREE_CODE (val) != SSA_NAME
2079 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2081 vn_reference_t res = vn_reference_lookup_or_insert_for_pieces
2082 (vuse, vr->set, vr->type, vr->operands, val);
2083 return res;
2088 /* 5) For aggregate copies translate the reference through them if
2089 the copy kills ref. */
2090 else if (vn_walk_kind == VN_WALKREWRITE
2091 && gimple_assign_single_p (def_stmt)
2092 && (DECL_P (gimple_assign_rhs1 (def_stmt))
2093 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
2094 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
2096 tree base2;
2097 HOST_WIDE_INT maxsize2;
2098 int i, j, k;
2099 auto_vec<vn_reference_op_s> rhs;
2100 vn_reference_op_t vro;
2101 ao_ref r;
2103 if (!lhs_ref_ok)
2104 return (void *)-1;
2106 /* See if the assignment kills REF. */
2107 base2 = ao_ref_base (&lhs_ref);
2108 maxsize2 = lhs_ref.max_size;
2109 if (maxsize2 == -1
2110 || (base != base2
2111 && (TREE_CODE (base) != MEM_REF
2112 || TREE_CODE (base2) != MEM_REF
2113 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
2114 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
2115 TREE_OPERAND (base2, 1))))
2116 || !stmt_kills_ref_p (def_stmt, ref))
2117 return (void *)-1;
2119 /* Find the common base of ref and the lhs. lhs_ops already
2120 contains valueized operands for the lhs. */
2121 i = vr->operands.length () - 1;
2122 j = lhs_ops.length () - 1;
2123 while (j >= 0 && i >= 0
2124 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
2126 i--;
2127 j--;
2130 /* ??? The innermost op should always be a MEM_REF and we already
2131 checked that the assignment to the lhs kills vr. Thus for
2132 aggregate copies using char[] types the vn_reference_op_eq
2133 may fail when comparing types for compatibility. But we really
2134 don't care here - further lookups with the rewritten operands
2135 will simply fail if we messed up types too badly. */
2136 HOST_WIDE_INT extra_off = 0;
2137 if (j == 0 && i >= 0
2138 && lhs_ops[0].opcode == MEM_REF
2139 && lhs_ops[0].off != -1)
2141 if (lhs_ops[0].off == vr->operands[i].off)
2142 i--, j--;
2143 else if (vr->operands[i].opcode == MEM_REF
2144 && vr->operands[i].off != -1)
2146 extra_off = vr->operands[i].off - lhs_ops[0].off;
2147 i--, j--;
2151 /* i now points to the first additional op.
2152 ??? LHS may not be completely contained in VR, one or more
2153 VIEW_CONVERT_EXPRs could be in its way. We could at least
2154 try handling outermost VIEW_CONVERT_EXPRs. */
2155 if (j != -1)
2156 return (void *)-1;
2158 /* Punt if the additional ops contain a storage order barrier. */
2159 for (k = i; k >= 0; k--)
2161 vro = &vr->operands[k];
2162 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
2163 return (void *)-1;
2166 /* Now re-write REF to be based on the rhs of the assignment. */
2167 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
2169 /* Apply an extra offset to the inner MEM_REF of the RHS. */
2170 if (extra_off != 0)
2172 if (rhs.length () < 2
2173 || rhs[0].opcode != MEM_REF
2174 || rhs[0].off == -1)
2175 return (void *)-1;
2176 rhs[0].off += extra_off;
2177 rhs[0].op0 = int_const_binop (PLUS_EXPR, rhs[0].op0,
2178 build_int_cst (TREE_TYPE (rhs[0].op0),
2179 extra_off));
2182 /* We need to pre-pend vr->operands[0..i] to rhs. */
2183 vec<vn_reference_op_s> old = vr->operands;
2184 if (i + 1 + rhs.length () > vr->operands.length ())
2185 vr->operands.safe_grow (i + 1 + rhs.length ());
2186 else
2187 vr->operands.truncate (i + 1 + rhs.length ());
2188 FOR_EACH_VEC_ELT (rhs, j, vro)
2189 vr->operands[i + 1 + j] = *vro;
2190 vr->operands = valueize_refs (vr->operands);
2191 if (old == shared_lookup_references)
2192 shared_lookup_references = vr->operands;
2193 vr->hashcode = vn_reference_compute_hash (vr);
2195 /* Try folding the new reference to a constant. */
2196 tree val = fully_constant_vn_reference_p (vr);
2197 if (val)
2198 return vn_reference_lookup_or_insert_for_pieces
2199 (vuse, vr->set, vr->type, vr->operands, val);
2201 /* Adjust *ref from the new operands. */
2202 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2203 return (void *)-1;
2204 /* This can happen with bitfields. */
2205 if (ref->size != r.size)
2206 return (void *)-1;
2207 *ref = r;
2209 /* Do not update last seen VUSE after translating. */
2210 last_vuse_ptr = NULL;
2212 /* Keep looking for the adjusted *REF / VR pair. */
2213 return NULL;
2216 /* 6) For memcpy copies translate the reference through them if
2217 the copy kills ref. */
2218 else if (vn_walk_kind == VN_WALKREWRITE
2219 && is_gimple_reg_type (vr->type)
2220 /* ??? Handle BCOPY as well. */
2221 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2222 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2223 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2224 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2225 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2226 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2227 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2228 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2)))
2230 tree lhs, rhs;
2231 ao_ref r;
2232 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
2233 vn_reference_op_s op;
2234 HOST_WIDE_INT at;
2236 /* Only handle non-variable, addressable refs. */
2237 if (ref->size != maxsize
2238 || offset % BITS_PER_UNIT != 0
2239 || ref->size % BITS_PER_UNIT != 0)
2240 return (void *)-1;
2242 /* Extract a pointer base and an offset for the destination. */
2243 lhs = gimple_call_arg (def_stmt, 0);
2244 lhs_offset = 0;
2245 if (TREE_CODE (lhs) == SSA_NAME)
2247 lhs = SSA_VAL (lhs);
2248 if (TREE_CODE (lhs) == SSA_NAME)
2250 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
2251 if (gimple_assign_single_p (def_stmt)
2252 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2253 lhs = gimple_assign_rhs1 (def_stmt);
2256 if (TREE_CODE (lhs) == ADDR_EXPR)
2258 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2259 &lhs_offset);
2260 if (!tem)
2261 return (void *)-1;
2262 if (TREE_CODE (tem) == MEM_REF
2263 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2265 lhs = TREE_OPERAND (tem, 0);
2266 if (TREE_CODE (lhs) == SSA_NAME)
2267 lhs = SSA_VAL (lhs);
2268 lhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2270 else if (DECL_P (tem))
2271 lhs = build_fold_addr_expr (tem);
2272 else
2273 return (void *)-1;
2275 if (TREE_CODE (lhs) != SSA_NAME
2276 && TREE_CODE (lhs) != ADDR_EXPR)
2277 return (void *)-1;
2279 /* Extract a pointer base and an offset for the source. */
2280 rhs = gimple_call_arg (def_stmt, 1);
2281 rhs_offset = 0;
2282 if (TREE_CODE (rhs) == SSA_NAME)
2283 rhs = SSA_VAL (rhs);
2284 if (TREE_CODE (rhs) == ADDR_EXPR)
2286 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2287 &rhs_offset);
2288 if (!tem)
2289 return (void *)-1;
2290 if (TREE_CODE (tem) == MEM_REF
2291 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2293 rhs = TREE_OPERAND (tem, 0);
2294 rhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2296 else if (DECL_P (tem))
2297 rhs = build_fold_addr_expr (tem);
2298 else
2299 return (void *)-1;
2301 if (TREE_CODE (rhs) != SSA_NAME
2302 && TREE_CODE (rhs) != ADDR_EXPR)
2303 return (void *)-1;
2305 copy_size = tree_to_uhwi (gimple_call_arg (def_stmt, 2));
2307 /* The bases of the destination and the references have to agree. */
2308 if ((TREE_CODE (base) != MEM_REF
2309 && !DECL_P (base))
2310 || (TREE_CODE (base) == MEM_REF
2311 && (TREE_OPERAND (base, 0) != lhs
2312 || !tree_fits_uhwi_p (TREE_OPERAND (base, 1))))
2313 || (DECL_P (base)
2314 && (TREE_CODE (lhs) != ADDR_EXPR
2315 || TREE_OPERAND (lhs, 0) != base)))
2316 return (void *)-1;
2318 at = offset / BITS_PER_UNIT;
2319 if (TREE_CODE (base) == MEM_REF)
2320 at += tree_to_uhwi (TREE_OPERAND (base, 1));
2321 /* If the access is completely outside of the memcpy destination
2322 area there is no aliasing. */
2323 if (lhs_offset >= at + maxsize / BITS_PER_UNIT
2324 || lhs_offset + copy_size <= at)
2325 return NULL;
2326 /* And the access has to be contained within the memcpy destination. */
2327 if (lhs_offset > at
2328 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
2329 return (void *)-1;
2331 /* Make room for 2 operands in the new reference. */
2332 if (vr->operands.length () < 2)
2334 vec<vn_reference_op_s> old = vr->operands;
2335 vr->operands.safe_grow_cleared (2);
2336 if (old == shared_lookup_references)
2337 shared_lookup_references = vr->operands;
2339 else
2340 vr->operands.truncate (2);
2342 /* The looked-through reference is a simple MEM_REF. */
2343 memset (&op, 0, sizeof (op));
2344 op.type = vr->type;
2345 op.opcode = MEM_REF;
2346 op.op0 = build_int_cst (ptr_type_node, at - lhs_offset + rhs_offset);
2347 op.off = at - lhs_offset + rhs_offset;
2348 vr->operands[0] = op;
2349 op.type = TREE_TYPE (rhs);
2350 op.opcode = TREE_CODE (rhs);
2351 op.op0 = rhs;
2352 op.off = -1;
2353 vr->operands[1] = op;
2354 vr->hashcode = vn_reference_compute_hash (vr);
2356 /* Try folding the new reference to a constant. */
2357 tree val = fully_constant_vn_reference_p (vr);
2358 if (val)
2359 return vn_reference_lookup_or_insert_for_pieces
2360 (vuse, vr->set, vr->type, vr->operands, val);
2362 /* Adjust *ref from the new operands. */
2363 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2364 return (void *)-1;
2365 /* This can happen with bitfields. */
2366 if (ref->size != r.size)
2367 return (void *)-1;
2368 *ref = r;
2370 /* Do not update last seen VUSE after translating. */
2371 last_vuse_ptr = NULL;
2373 /* Keep looking for the adjusted *REF / VR pair. */
2374 return NULL;
2377 /* Bail out and stop walking. */
2378 return (void *)-1;
2381 /* Return a reference op vector from OP that can be used for
2382 vn_reference_lookup_pieces. The caller is responsible for releasing
2383 the vector. */
2385 vec<vn_reference_op_s>
2386 vn_reference_operands_for_lookup (tree op)
2388 bool valueized;
2389 return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
2392 /* Lookup a reference operation by it's parts, in the current hash table.
2393 Returns the resulting value number if it exists in the hash table,
2394 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2395 vn_reference_t stored in the hashtable if something is found. */
2397 tree
2398 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2399 vec<vn_reference_op_s> operands,
2400 vn_reference_t *vnresult, vn_lookup_kind kind)
2402 struct vn_reference_s vr1;
2403 vn_reference_t tmp;
2404 tree cst;
2406 if (!vnresult)
2407 vnresult = &tmp;
2408 *vnresult = NULL;
2410 vr1.vuse = vuse_ssa_val (vuse);
2411 shared_lookup_references.truncate (0);
2412 shared_lookup_references.safe_grow (operands.length ());
2413 memcpy (shared_lookup_references.address (),
2414 operands.address (),
2415 sizeof (vn_reference_op_s)
2416 * operands.length ());
2417 vr1.operands = operands = shared_lookup_references
2418 = valueize_refs (shared_lookup_references);
2419 vr1.type = type;
2420 vr1.set = set;
2421 vr1.hashcode = vn_reference_compute_hash (&vr1);
2422 if ((cst = fully_constant_vn_reference_p (&vr1)))
2423 return cst;
2425 vn_reference_lookup_1 (&vr1, vnresult);
2426 if (!*vnresult
2427 && kind != VN_NOWALK
2428 && vr1.vuse)
2430 ao_ref r;
2431 vn_walk_kind = kind;
2432 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2433 *vnresult =
2434 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2435 vn_reference_lookup_2,
2436 vn_reference_lookup_3,
2437 vuse_ssa_val, &vr1);
2438 gcc_checking_assert (vr1.operands == shared_lookup_references);
2441 if (*vnresult)
2442 return (*vnresult)->result;
2444 return NULL_TREE;
2447 /* Lookup OP in the current hash table, and return the resulting value
2448 number if it exists in the hash table. Return NULL_TREE if it does
2449 not exist in the hash table or if the result field of the structure
2450 was NULL.. VNRESULT will be filled in with the vn_reference_t
2451 stored in the hashtable if one exists. When TBAA_P is false assume
2452 we are looking up a store and treat it as having alias-set zero. */
2454 tree
2455 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2456 vn_reference_t *vnresult, bool tbaa_p)
2458 vec<vn_reference_op_s> operands;
2459 struct vn_reference_s vr1;
2460 tree cst;
2461 bool valuezied_anything;
2463 if (vnresult)
2464 *vnresult = NULL;
2466 vr1.vuse = vuse_ssa_val (vuse);
2467 vr1.operands = operands
2468 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2469 vr1.type = TREE_TYPE (op);
2470 vr1.set = tbaa_p ? get_alias_set (op) : 0;
2471 vr1.hashcode = vn_reference_compute_hash (&vr1);
2472 if ((cst = fully_constant_vn_reference_p (&vr1)))
2473 return cst;
2475 if (kind != VN_NOWALK
2476 && vr1.vuse)
2478 vn_reference_t wvnresult;
2479 ao_ref r;
2480 /* Make sure to use a valueized reference if we valueized anything.
2481 Otherwise preserve the full reference for advanced TBAA. */
2482 if (!valuezied_anything
2483 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2484 vr1.operands))
2485 ao_ref_init (&r, op);
2486 if (! tbaa_p)
2487 r.ref_alias_set = r.base_alias_set = 0;
2488 vn_walk_kind = kind;
2489 wvnresult =
2490 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2491 vn_reference_lookup_2,
2492 vn_reference_lookup_3,
2493 vuse_ssa_val, &vr1);
2494 gcc_checking_assert (vr1.operands == shared_lookup_references);
2495 if (wvnresult)
2497 if (vnresult)
2498 *vnresult = wvnresult;
2499 return wvnresult->result;
2502 return NULL_TREE;
2505 return vn_reference_lookup_1 (&vr1, vnresult);
2508 /* Lookup CALL in the current hash table and return the entry in
2509 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2511 void
2512 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2513 vn_reference_t vr)
2515 if (vnresult)
2516 *vnresult = NULL;
2518 tree vuse = gimple_vuse (call);
2520 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2521 vr->operands = valueize_shared_reference_ops_from_call (call);
2522 vr->type = gimple_expr_type (call);
2523 vr->set = 0;
2524 vr->hashcode = vn_reference_compute_hash (vr);
2525 vn_reference_lookup_1 (vr, vnresult);
2528 /* Insert OP into the current hash table with a value number of
2529 RESULT, and return the resulting reference structure we created. */
2531 static vn_reference_t
2532 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2534 vn_reference_s **slot;
2535 vn_reference_t vr1;
2536 bool tem;
2538 vr1 = current_info->references_pool->allocate ();
2539 if (TREE_CODE (result) == SSA_NAME)
2540 vr1->value_id = VN_INFO (result)->value_id;
2541 else
2542 vr1->value_id = get_or_alloc_constant_value_id (result);
2543 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2544 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2545 vr1->type = TREE_TYPE (op);
2546 vr1->set = get_alias_set (op);
2547 vr1->hashcode = vn_reference_compute_hash (vr1);
2548 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2549 vr1->result_vdef = vdef;
2551 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2552 INSERT);
2554 /* Because we lookup stores using vuses, and value number failures
2555 using the vdefs (see visit_reference_op_store for how and why),
2556 it's possible that on failure we may try to insert an already
2557 inserted store. This is not wrong, there is no ssa name for a
2558 store that we could use as a differentiator anyway. Thus, unlike
2559 the other lookup functions, you cannot gcc_assert (!*slot)
2560 here. */
2562 /* But free the old slot in case of a collision. */
2563 if (*slot)
2564 free_reference (*slot);
2566 *slot = vr1;
2567 return vr1;
2570 /* Insert a reference by it's pieces into the current hash table with
2571 a value number of RESULT. Return the resulting reference
2572 structure we created. */
2574 vn_reference_t
2575 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2576 vec<vn_reference_op_s> operands,
2577 tree result, unsigned int value_id)
2580 vn_reference_s **slot;
2581 vn_reference_t vr1;
2583 vr1 = current_info->references_pool->allocate ();
2584 vr1->value_id = value_id;
2585 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2586 vr1->operands = valueize_refs (operands);
2587 vr1->type = type;
2588 vr1->set = set;
2589 vr1->hashcode = vn_reference_compute_hash (vr1);
2590 if (result && TREE_CODE (result) == SSA_NAME)
2591 result = SSA_VAL (result);
2592 vr1->result = result;
2594 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2595 INSERT);
2597 /* At this point we should have all the things inserted that we have
2598 seen before, and we should never try inserting something that
2599 already exists. */
2600 gcc_assert (!*slot);
2601 if (*slot)
2602 free_reference (*slot);
2604 *slot = vr1;
2605 return vr1;
2608 /* Compute and return the hash value for nary operation VBO1. */
2610 static hashval_t
2611 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2613 inchash::hash hstate;
2614 unsigned i;
2616 for (i = 0; i < vno1->length; ++i)
2617 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2618 vno1->op[i] = SSA_VAL (vno1->op[i]);
2620 if (((vno1->length == 2
2621 && commutative_tree_code (vno1->opcode))
2622 || (vno1->length == 3
2623 && commutative_ternary_tree_code (vno1->opcode)))
2624 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2625 std::swap (vno1->op[0], vno1->op[1]);
2626 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
2627 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2629 std::swap (vno1->op[0], vno1->op[1]);
2630 vno1->opcode = swap_tree_comparison (vno1->opcode);
2633 hstate.add_int (vno1->opcode);
2634 for (i = 0; i < vno1->length; ++i)
2635 inchash::add_expr (vno1->op[i], hstate);
2637 return hstate.end ();
2640 /* Compare nary operations VNO1 and VNO2 and return true if they are
2641 equivalent. */
2643 bool
2644 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2646 unsigned i;
2648 if (vno1->hashcode != vno2->hashcode)
2649 return false;
2651 if (vno1->length != vno2->length)
2652 return false;
2654 if (vno1->opcode != vno2->opcode
2655 || !types_compatible_p (vno1->type, vno2->type))
2656 return false;
2658 for (i = 0; i < vno1->length; ++i)
2659 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2660 return false;
2662 /* BIT_INSERT_EXPR has an implict operand as the type precision
2663 of op1. Need to check to make sure they are the same. */
2664 if (vno1->opcode == BIT_INSERT_EXPR
2665 && TREE_CODE (vno1->op[1]) == INTEGER_CST
2666 && TYPE_PRECISION (TREE_TYPE (vno1->op[1]))
2667 != TYPE_PRECISION (TREE_TYPE (vno2->op[1])))
2668 return false;
2670 return true;
2673 /* Initialize VNO from the pieces provided. */
2675 static void
2676 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2677 enum tree_code code, tree type, tree *ops)
2679 vno->opcode = code;
2680 vno->length = length;
2681 vno->type = type;
2682 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2685 /* Initialize VNO from OP. */
2687 static void
2688 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2690 unsigned i;
2692 vno->opcode = TREE_CODE (op);
2693 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2694 vno->type = TREE_TYPE (op);
2695 for (i = 0; i < vno->length; ++i)
2696 vno->op[i] = TREE_OPERAND (op, i);
2699 /* Return the number of operands for a vn_nary ops structure from STMT. */
2701 static unsigned int
2702 vn_nary_length_from_stmt (gimple *stmt)
2704 switch (gimple_assign_rhs_code (stmt))
2706 case REALPART_EXPR:
2707 case IMAGPART_EXPR:
2708 case VIEW_CONVERT_EXPR:
2709 return 1;
2711 case BIT_FIELD_REF:
2712 return 3;
2714 case CONSTRUCTOR:
2715 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2717 default:
2718 return gimple_num_ops (stmt) - 1;
2722 /* Initialize VNO from STMT. */
2724 static void
2725 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
2727 unsigned i;
2729 vno->opcode = gimple_assign_rhs_code (stmt);
2730 vno->type = gimple_expr_type (stmt);
2731 switch (vno->opcode)
2733 case REALPART_EXPR:
2734 case IMAGPART_EXPR:
2735 case VIEW_CONVERT_EXPR:
2736 vno->length = 1;
2737 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2738 break;
2740 case BIT_FIELD_REF:
2741 vno->length = 3;
2742 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2743 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2744 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2745 break;
2747 case CONSTRUCTOR:
2748 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2749 for (i = 0; i < vno->length; ++i)
2750 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2751 break;
2753 default:
2754 gcc_checking_assert (!gimple_assign_single_p (stmt));
2755 vno->length = gimple_num_ops (stmt) - 1;
2756 for (i = 0; i < vno->length; ++i)
2757 vno->op[i] = gimple_op (stmt, i + 1);
2761 /* Compute the hashcode for VNO and look for it in the hash table;
2762 return the resulting value number if it exists in the hash table.
2763 Return NULL_TREE if it does not exist in the hash table or if the
2764 result field of the operation is NULL. VNRESULT will contain the
2765 vn_nary_op_t from the hashtable if it exists. */
2767 static tree
2768 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2770 vn_nary_op_s **slot;
2772 if (vnresult)
2773 *vnresult = NULL;
2775 vno->hashcode = vn_nary_op_compute_hash (vno);
2776 slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2777 NO_INSERT);
2778 if (!slot && current_info == optimistic_info)
2779 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2780 NO_INSERT);
2781 if (!slot)
2782 return NULL_TREE;
2783 if (vnresult)
2784 *vnresult = *slot;
2785 return (*slot)->result;
2788 /* Lookup a n-ary operation by its pieces and return the resulting value
2789 number if it exists in the hash table. Return NULL_TREE if it does
2790 not exist in the hash table or if the result field of the operation
2791 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2792 if it exists. */
2794 tree
2795 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2796 tree type, tree *ops, vn_nary_op_t *vnresult)
2798 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2799 sizeof_vn_nary_op (length));
2800 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2801 return vn_nary_op_lookup_1 (vno1, vnresult);
2804 /* Lookup OP in the current hash table, and return the resulting value
2805 number if it exists in the hash table. Return NULL_TREE if it does
2806 not exist in the hash table or if the result field of the operation
2807 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2808 if it exists. */
2810 tree
2811 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2813 vn_nary_op_t vno1
2814 = XALLOCAVAR (struct vn_nary_op_s,
2815 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2816 init_vn_nary_op_from_op (vno1, op);
2817 return vn_nary_op_lookup_1 (vno1, vnresult);
2820 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2821 value number if it exists in the hash table. Return NULL_TREE if
2822 it does not exist in the hash table. VNRESULT will contain the
2823 vn_nary_op_t from the hashtable if it exists. */
2825 tree
2826 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
2828 vn_nary_op_t vno1
2829 = XALLOCAVAR (struct vn_nary_op_s,
2830 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2831 init_vn_nary_op_from_stmt (vno1, stmt);
2832 return vn_nary_op_lookup_1 (vno1, vnresult);
2835 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2837 static vn_nary_op_t
2838 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2840 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2843 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2844 obstack. */
2846 static vn_nary_op_t
2847 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2849 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2850 &current_info->nary_obstack);
2852 vno1->value_id = value_id;
2853 vno1->length = length;
2854 vno1->result = result;
2856 return vno1;
2859 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2860 VNO->HASHCODE first. */
2862 static vn_nary_op_t
2863 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
2864 bool compute_hash)
2866 vn_nary_op_s **slot;
2868 if (compute_hash)
2869 vno->hashcode = vn_nary_op_compute_hash (vno);
2871 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
2872 /* While we do not want to insert things twice it's awkward to
2873 avoid it in the case where visit_nary_op pattern-matches stuff
2874 and ends up simplifying the replacement to itself. We then
2875 get two inserts, one from visit_nary_op and one from
2876 vn_nary_build_or_lookup.
2877 So allow inserts with the same value number. */
2878 if (*slot && (*slot)->result == vno->result)
2879 return *slot;
2881 gcc_assert (!*slot);
2883 *slot = vno;
2884 return vno;
2887 /* Insert a n-ary operation into the current hash table using it's
2888 pieces. Return the vn_nary_op_t structure we created and put in
2889 the hashtable. */
2891 vn_nary_op_t
2892 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2893 tree type, tree *ops,
2894 tree result, unsigned int value_id)
2896 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2897 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2898 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2901 /* Insert OP into the current hash table with a value number of
2902 RESULT. Return the vn_nary_op_t structure we created and put in
2903 the hashtable. */
2905 vn_nary_op_t
2906 vn_nary_op_insert (tree op, tree result)
2908 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2909 vn_nary_op_t vno1;
2911 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2912 init_vn_nary_op_from_op (vno1, op);
2913 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2916 /* Insert the rhs of STMT into the current hash table with a value number of
2917 RESULT. */
2919 static vn_nary_op_t
2920 vn_nary_op_insert_stmt (gimple *stmt, tree result)
2922 vn_nary_op_t vno1
2923 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2924 result, VN_INFO (result)->value_id);
2925 init_vn_nary_op_from_stmt (vno1, stmt);
2926 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2929 /* Compute a hashcode for PHI operation VP1 and return it. */
2931 static inline hashval_t
2932 vn_phi_compute_hash (vn_phi_t vp1)
2934 inchash::hash hstate (vp1->phiargs.length () > 2
2935 ? vp1->block->index : vp1->phiargs.length ());
2936 tree phi1op;
2937 tree type;
2938 edge e;
2939 edge_iterator ei;
2941 /* If all PHI arguments are constants we need to distinguish
2942 the PHI node via its type. */
2943 type = vp1->type;
2944 hstate.merge_hash (vn_hash_type (type));
2946 FOR_EACH_EDGE (e, ei, vp1->block->preds)
2948 /* Don't hash backedge values they need to be handled as VN_TOP
2949 for optimistic value-numbering. */
2950 if (e->flags & EDGE_DFS_BACK)
2951 continue;
2953 phi1op = vp1->phiargs[e->dest_idx];
2954 if (phi1op == VN_TOP)
2955 continue;
2956 inchash::add_expr (phi1op, hstate);
2959 return hstate.end ();
2963 /* Return true if COND1 and COND2 represent the same condition, set
2964 *INVERTED_P if one needs to be inverted to make it the same as
2965 the other. */
2967 static bool
2968 cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
2969 gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
2971 enum tree_code code1 = gimple_cond_code (cond1);
2972 enum tree_code code2 = gimple_cond_code (cond2);
2974 *inverted_p = false;
2975 if (code1 == code2)
2977 else if (code1 == swap_tree_comparison (code2))
2978 std::swap (lhs2, rhs2);
2979 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
2980 *inverted_p = true;
2981 else if (code1 == invert_tree_comparison
2982 (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
2984 std::swap (lhs2, rhs2);
2985 *inverted_p = true;
2987 else
2988 return false;
2990 return ((expressions_equal_p (lhs1, lhs2)
2991 && expressions_equal_p (rhs1, rhs2))
2992 || (commutative_tree_code (code1)
2993 && expressions_equal_p (lhs1, rhs2)
2994 && expressions_equal_p (rhs1, lhs2)));
2997 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2999 static int
3000 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
3002 if (vp1->hashcode != vp2->hashcode)
3003 return false;
3005 if (vp1->block != vp2->block)
3007 if (vp1->phiargs.length () != vp2->phiargs.length ())
3008 return false;
3010 switch (vp1->phiargs.length ())
3012 case 1:
3013 /* Single-arg PHIs are just copies. */
3014 break;
3016 case 2:
3018 /* Rule out backedges into the PHI. */
3019 if (vp1->block->loop_father->header == vp1->block
3020 || vp2->block->loop_father->header == vp2->block)
3021 return false;
3023 /* If the PHI nodes do not have compatible types
3024 they are not the same. */
3025 if (!types_compatible_p (vp1->type, vp2->type))
3026 return false;
3028 basic_block idom1
3029 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3030 basic_block idom2
3031 = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
3032 /* If the immediate dominator end in switch stmts multiple
3033 values may end up in the same PHI arg via intermediate
3034 CFG merges. */
3035 if (EDGE_COUNT (idom1->succs) != 2
3036 || EDGE_COUNT (idom2->succs) != 2)
3037 return false;
3039 /* Verify the controlling stmt is the same. */
3040 gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1));
3041 gcond *last2 = safe_dyn_cast <gcond *> (last_stmt (idom2));
3042 if (! last1 || ! last2)
3043 return false;
3044 bool inverted_p;
3045 if (! cond_stmts_equal_p (last1, vp1->cclhs, vp1->ccrhs,
3046 last2, vp2->cclhs, vp2->ccrhs,
3047 &inverted_p))
3048 return false;
3050 /* Get at true/false controlled edges into the PHI. */
3051 edge te1, te2, fe1, fe2;
3052 if (! extract_true_false_controlled_edges (idom1, vp1->block,
3053 &te1, &fe1)
3054 || ! extract_true_false_controlled_edges (idom2, vp2->block,
3055 &te2, &fe2))
3056 return false;
3058 /* Swap edges if the second condition is the inverted of the
3059 first. */
3060 if (inverted_p)
3061 std::swap (te2, fe2);
3063 /* ??? Handle VN_TOP specially. */
3064 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
3065 vp2->phiargs[te2->dest_idx])
3066 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
3067 vp2->phiargs[fe2->dest_idx]))
3068 return false;
3070 return true;
3073 default:
3074 return false;
3078 /* If the PHI nodes do not have compatible types
3079 they are not the same. */
3080 if (!types_compatible_p (vp1->type, vp2->type))
3081 return false;
3083 /* Any phi in the same block will have it's arguments in the
3084 same edge order, because of how we store phi nodes. */
3085 int i;
3086 tree phi1op;
3087 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
3089 tree phi2op = vp2->phiargs[i];
3090 if (phi1op == VN_TOP || phi2op == VN_TOP)
3091 continue;
3092 if (!expressions_equal_p (phi1op, phi2op))
3093 return false;
3096 return true;
3099 static vec<tree> shared_lookup_phiargs;
3101 /* Lookup PHI in the current hash table, and return the resulting
3102 value number if it exists in the hash table. Return NULL_TREE if
3103 it does not exist in the hash table. */
3105 static tree
3106 vn_phi_lookup (gimple *phi)
3108 vn_phi_s **slot;
3109 struct vn_phi_s vp1;
3110 edge e;
3111 edge_iterator ei;
3113 shared_lookup_phiargs.truncate (0);
3114 shared_lookup_phiargs.safe_grow (gimple_phi_num_args (phi));
3116 /* Canonicalize the SSA_NAME's to their value number. */
3117 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3119 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3120 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3121 shared_lookup_phiargs[e->dest_idx] = def;
3123 vp1.type = TREE_TYPE (gimple_phi_result (phi));
3124 vp1.phiargs = shared_lookup_phiargs;
3125 vp1.block = gimple_bb (phi);
3126 /* Extract values of the controlling condition. */
3127 vp1.cclhs = NULL_TREE;
3128 vp1.ccrhs = NULL_TREE;
3129 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1.block);
3130 if (EDGE_COUNT (idom1->succs) == 2)
3131 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3133 vp1.cclhs = vn_valueize (gimple_cond_lhs (last1));
3134 vp1.ccrhs = vn_valueize (gimple_cond_rhs (last1));
3136 vp1.hashcode = vn_phi_compute_hash (&vp1);
3137 slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3138 NO_INSERT);
3139 if (!slot && current_info == optimistic_info)
3140 slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3141 NO_INSERT);
3142 if (!slot)
3143 return NULL_TREE;
3144 return (*slot)->result;
3147 /* Insert PHI into the current hash table with a value number of
3148 RESULT. */
3150 static vn_phi_t
3151 vn_phi_insert (gimple *phi, tree result)
3153 vn_phi_s **slot;
3154 vn_phi_t vp1 = current_info->phis_pool->allocate ();
3155 vec<tree> args = vNULL;
3156 edge e;
3157 edge_iterator ei;
3159 args.safe_grow (gimple_phi_num_args (phi));
3161 /* Canonicalize the SSA_NAME's to their value number. */
3162 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3164 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3165 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3166 args[e->dest_idx] = def;
3168 vp1->value_id = VN_INFO (result)->value_id;
3169 vp1->type = TREE_TYPE (gimple_phi_result (phi));
3170 vp1->phiargs = args;
3171 vp1->block = gimple_bb (phi);
3172 /* Extract values of the controlling condition. */
3173 vp1->cclhs = NULL_TREE;
3174 vp1->ccrhs = NULL_TREE;
3175 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3176 if (EDGE_COUNT (idom1->succs) == 2)
3177 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3179 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
3180 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
3182 vp1->result = result;
3183 vp1->hashcode = vn_phi_compute_hash (vp1);
3185 slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
3187 /* Because we iterate over phi operations more than once, it's
3188 possible the slot might already exist here, hence no assert.*/
3189 *slot = vp1;
3190 return vp1;
3194 /* Print set of components in strongly connected component SCC to OUT. */
3196 static void
3197 print_scc (FILE *out, vec<tree> scc)
3199 tree var;
3200 unsigned int i;
3202 fprintf (out, "SCC consists of %u:", scc.length ());
3203 FOR_EACH_VEC_ELT (scc, i, var)
3205 fprintf (out, " ");
3206 print_generic_expr (out, var);
3208 fprintf (out, "\n");
3211 /* Return true if BB1 is dominated by BB2 taking into account edges
3212 that are not executable. */
3214 static bool
3215 dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
3217 edge_iterator ei;
3218 edge e;
3220 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3221 return true;
3223 /* Before iterating we'd like to know if there exists a
3224 (executable) path from bb2 to bb1 at all, if not we can
3225 directly return false. For now simply iterate once. */
3227 /* Iterate to the single executable bb1 predecessor. */
3228 if (EDGE_COUNT (bb1->preds) > 1)
3230 edge prede = NULL;
3231 FOR_EACH_EDGE (e, ei, bb1->preds)
3232 if (e->flags & EDGE_EXECUTABLE)
3234 if (prede)
3236 prede = NULL;
3237 break;
3239 prede = e;
3241 if (prede)
3243 bb1 = prede->src;
3245 /* Re-do the dominance check with changed bb1. */
3246 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3247 return true;
3251 /* Iterate to the single executable bb2 successor. */
3252 edge succe = NULL;
3253 FOR_EACH_EDGE (e, ei, bb2->succs)
3254 if (e->flags & EDGE_EXECUTABLE)
3256 if (succe)
3258 succe = NULL;
3259 break;
3261 succe = e;
3263 if (succe)
3265 /* Verify the reached block is only reached through succe.
3266 If there is only one edge we can spare us the dominator
3267 check and iterate directly. */
3268 if (EDGE_COUNT (succe->dest->preds) > 1)
3270 FOR_EACH_EDGE (e, ei, succe->dest->preds)
3271 if (e != succe
3272 && (e->flags & EDGE_EXECUTABLE))
3274 succe = NULL;
3275 break;
3278 if (succe)
3280 bb2 = succe->dest;
3282 /* Re-do the dominance check with changed bb2. */
3283 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3284 return true;
3288 /* We could now iterate updating bb1 / bb2. */
3289 return false;
3292 /* Set the value number of FROM to TO, return true if it has changed
3293 as a result. */
3295 static inline bool
3296 set_ssa_val_to (tree from, tree to)
3298 tree currval = SSA_VAL (from);
3299 HOST_WIDE_INT toff, coff;
3301 /* The only thing we allow as value numbers are ssa_names
3302 and invariants. So assert that here. We don't allow VN_TOP
3303 as visiting a stmt should produce a value-number other than
3304 that.
3305 ??? Still VN_TOP can happen for unreachable code, so force
3306 it to varying in that case. Not all code is prepared to
3307 get VN_TOP on valueization. */
3308 if (to == VN_TOP)
3310 if (dump_file && (dump_flags & TDF_DETAILS))
3311 fprintf (dump_file, "Forcing value number to varying on "
3312 "receiving VN_TOP\n");
3313 to = from;
3316 gcc_assert (to != NULL_TREE
3317 && ((TREE_CODE (to) == SSA_NAME
3318 && (to == from || SSA_VAL (to) == to))
3319 || is_gimple_min_invariant (to)));
3321 if (from != to)
3323 if (currval == from)
3325 if (dump_file && (dump_flags & TDF_DETAILS))
3327 fprintf (dump_file, "Not changing value number of ");
3328 print_generic_expr (dump_file, from);
3329 fprintf (dump_file, " from VARYING to ");
3330 print_generic_expr (dump_file, to);
3331 fprintf (dump_file, "\n");
3333 return false;
3335 else if (currval != VN_TOP
3336 && ! is_gimple_min_invariant (currval)
3337 && is_gimple_min_invariant (to))
3339 if (dump_file && (dump_flags & TDF_DETAILS))
3341 fprintf (dump_file, "Forcing VARYING instead of changing "
3342 "value number of ");
3343 print_generic_expr (dump_file, from);
3344 fprintf (dump_file, " from ");
3345 print_generic_expr (dump_file, currval);
3346 fprintf (dump_file, " (non-constant) to ");
3347 print_generic_expr (dump_file, to);
3348 fprintf (dump_file, " (constant)\n");
3350 to = from;
3352 else if (TREE_CODE (to) == SSA_NAME
3353 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
3354 to = from;
3357 if (dump_file && (dump_flags & TDF_DETAILS))
3359 fprintf (dump_file, "Setting value number of ");
3360 print_generic_expr (dump_file, from);
3361 fprintf (dump_file, " to ");
3362 print_generic_expr (dump_file, to);
3365 if (currval != to
3366 && !operand_equal_p (currval, to, 0)
3367 /* Different undefined SSA names are not actually different. See
3368 PR82320 for a testcase were we'd otherwise not terminate iteration. */
3369 && !(TREE_CODE (currval) == SSA_NAME
3370 && TREE_CODE (to) == SSA_NAME
3371 && ssa_undefined_value_p (currval, false)
3372 && ssa_undefined_value_p (to, false))
3373 /* ??? For addresses involving volatile objects or types operand_equal_p
3374 does not reliably detect ADDR_EXPRs as equal. We know we are only
3375 getting invariant gimple addresses here, so can use
3376 get_addr_base_and_unit_offset to do this comparison. */
3377 && !(TREE_CODE (currval) == ADDR_EXPR
3378 && TREE_CODE (to) == ADDR_EXPR
3379 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
3380 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
3381 && coff == toff))
3383 if (dump_file && (dump_flags & TDF_DETAILS))
3384 fprintf (dump_file, " (changed)\n");
3386 /* If we equate two SSA names we have to make the side-band info
3387 of the leader conservative (and remember whatever original value
3388 was present). */
3389 if (TREE_CODE (to) == SSA_NAME)
3391 if (INTEGRAL_TYPE_P (TREE_TYPE (to))
3392 && SSA_NAME_RANGE_INFO (to))
3394 if (SSA_NAME_IS_DEFAULT_DEF (to)
3395 || dominated_by_p_w_unex
3396 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3397 gimple_bb (SSA_NAME_DEF_STMT (to))))
3398 /* Keep the info from the dominator. */
3400 else
3402 /* Save old info. */
3403 if (! VN_INFO (to)->info.range_info)
3405 VN_INFO (to)->info.range_info = SSA_NAME_RANGE_INFO (to);
3406 VN_INFO (to)->range_info_anti_range_p
3407 = SSA_NAME_ANTI_RANGE_P (to);
3409 /* Rather than allocating memory and unioning the info
3410 just clear it. */
3411 if (dump_file && (dump_flags & TDF_DETAILS))
3413 fprintf (dump_file, "clearing range info of ");
3414 print_generic_expr (dump_file, to);
3415 fprintf (dump_file, "\n");
3417 SSA_NAME_RANGE_INFO (to) = NULL;
3420 else if (POINTER_TYPE_P (TREE_TYPE (to))
3421 && SSA_NAME_PTR_INFO (to))
3423 if (SSA_NAME_IS_DEFAULT_DEF (to)
3424 || dominated_by_p_w_unex
3425 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3426 gimple_bb (SSA_NAME_DEF_STMT (to))))
3427 /* Keep the info from the dominator. */
3429 else if (! SSA_NAME_PTR_INFO (from)
3430 /* Handle the case of trivially equivalent info. */
3431 || memcmp (SSA_NAME_PTR_INFO (to),
3432 SSA_NAME_PTR_INFO (from),
3433 sizeof (ptr_info_def)) != 0)
3435 /* Save old info. */
3436 if (! VN_INFO (to)->info.ptr_info)
3437 VN_INFO (to)->info.ptr_info = SSA_NAME_PTR_INFO (to);
3438 /* Rather than allocating memory and unioning the info
3439 just clear it. */
3440 if (dump_file && (dump_flags & TDF_DETAILS))
3442 fprintf (dump_file, "clearing points-to info of ");
3443 print_generic_expr (dump_file, to);
3444 fprintf (dump_file, "\n");
3446 SSA_NAME_PTR_INFO (to) = NULL;
3451 VN_INFO (from)->valnum = to;
3452 return true;
3454 if (dump_file && (dump_flags & TDF_DETAILS))
3455 fprintf (dump_file, "\n");
3456 return false;
3459 /* Mark as processed all the definitions in the defining stmt of USE, or
3460 the USE itself. */
3462 static void
3463 mark_use_processed (tree use)
3465 ssa_op_iter iter;
3466 def_operand_p defp;
3467 gimple *stmt = SSA_NAME_DEF_STMT (use);
3469 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
3471 VN_INFO (use)->use_processed = true;
3472 return;
3475 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3477 tree def = DEF_FROM_PTR (defp);
3479 VN_INFO (def)->use_processed = true;
3483 /* Set all definitions in STMT to value number to themselves.
3484 Return true if a value number changed. */
3486 static bool
3487 defs_to_varying (gimple *stmt)
3489 bool changed = false;
3490 ssa_op_iter iter;
3491 def_operand_p defp;
3493 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3495 tree def = DEF_FROM_PTR (defp);
3496 changed |= set_ssa_val_to (def, def);
3498 return changed;
3501 /* Visit a copy between LHS and RHS, return true if the value number
3502 changed. */
3504 static bool
3505 visit_copy (tree lhs, tree rhs)
3507 /* Valueize. */
3508 rhs = SSA_VAL (rhs);
3510 return set_ssa_val_to (lhs, rhs);
3513 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
3514 is the same. */
3516 static tree
3517 valueized_wider_op (tree wide_type, tree op)
3519 if (TREE_CODE (op) == SSA_NAME)
3520 op = SSA_VAL (op);
3522 /* Either we have the op widened available. */
3523 tree ops[3] = {};
3524 ops[0] = op;
3525 tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
3526 wide_type, ops, NULL);
3527 if (tem)
3528 return tem;
3530 /* Or the op is truncated from some existing value. */
3531 if (TREE_CODE (op) == SSA_NAME)
3533 gimple *def = SSA_NAME_DEF_STMT (op);
3534 if (is_gimple_assign (def)
3535 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3537 tem = gimple_assign_rhs1 (def);
3538 if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)))
3540 if (TREE_CODE (tem) == SSA_NAME)
3541 tem = SSA_VAL (tem);
3542 return tem;
3547 /* For constants simply extend it. */
3548 if (TREE_CODE (op) == INTEGER_CST)
3549 return wide_int_to_tree (wide_type, wi::to_wide (op));
3551 return NULL_TREE;
3554 /* Visit a nary operator RHS, value number it, and return true if the
3555 value number of LHS has changed as a result. */
3557 static bool
3558 visit_nary_op (tree lhs, gassign *stmt)
3560 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
3561 if (result)
3562 return set_ssa_val_to (lhs, result);
3564 /* Do some special pattern matching for redundancies of operations
3565 in different types. */
3566 enum tree_code code = gimple_assign_rhs_code (stmt);
3567 tree type = TREE_TYPE (lhs);
3568 tree rhs1 = gimple_assign_rhs1 (stmt);
3569 switch (code)
3571 CASE_CONVERT:
3572 /* Match arithmetic done in a different type where we can easily
3573 substitute the result from some earlier sign-changed or widened
3574 operation. */
3575 if (INTEGRAL_TYPE_P (type)
3576 && TREE_CODE (rhs1) == SSA_NAME
3577 /* We only handle sign-changes or zero-extension -> & mask. */
3578 && ((TYPE_UNSIGNED (TREE_TYPE (rhs1))
3579 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs1)))
3580 || TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (rhs1))))
3582 gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
3583 if (def
3584 && (gimple_assign_rhs_code (def) == PLUS_EXPR
3585 || gimple_assign_rhs_code (def) == MINUS_EXPR
3586 || gimple_assign_rhs_code (def) == MULT_EXPR))
3588 tree ops[3] = {};
3589 /* Either we have the op widened available. */
3590 ops[0] = valueized_wider_op (type,
3591 gimple_assign_rhs1 (def));
3592 if (ops[0])
3593 ops[1] = valueized_wider_op (type,
3594 gimple_assign_rhs2 (def));
3595 if (ops[0] && ops[1])
3597 ops[0] = vn_nary_op_lookup_pieces
3598 (2, gimple_assign_rhs_code (def), type, ops, NULL);
3599 /* We have wider operation available. */
3600 if (ops[0])
3602 unsigned lhs_prec = TYPE_PRECISION (type);
3603 unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1));
3604 if (lhs_prec == rhs_prec)
3606 ops[1] = NULL_TREE;
3607 result = vn_nary_build_or_lookup (NOP_EXPR,
3608 type, ops);
3609 if (result)
3611 bool changed = set_ssa_val_to (lhs, result);
3612 vn_nary_op_insert_stmt (stmt, result);
3613 return changed;
3616 else
3618 ops[1] = wide_int_to_tree (type,
3619 wi::mask (rhs_prec, false,
3620 lhs_prec));
3621 result = vn_nary_build_or_lookup (BIT_AND_EXPR,
3622 TREE_TYPE (lhs),
3623 ops);
3624 if (result)
3626 bool changed = set_ssa_val_to (lhs, result);
3627 vn_nary_op_insert_stmt (stmt, result);
3628 return changed;
3635 default:;
3638 bool changed = set_ssa_val_to (lhs, lhs);
3639 vn_nary_op_insert_stmt (stmt, lhs);
3640 return changed;
3643 /* Visit a call STMT storing into LHS. Return true if the value number
3644 of the LHS has changed as a result. */
3646 static bool
3647 visit_reference_op_call (tree lhs, gcall *stmt)
3649 bool changed = false;
3650 struct vn_reference_s vr1;
3651 vn_reference_t vnresult = NULL;
3652 tree vdef = gimple_vdef (stmt);
3654 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
3655 if (lhs && TREE_CODE (lhs) != SSA_NAME)
3656 lhs = NULL_TREE;
3658 vn_reference_lookup_call (stmt, &vnresult, &vr1);
3659 if (vnresult)
3661 if (vnresult->result_vdef && vdef)
3662 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
3663 else if (vdef)
3664 /* If the call was discovered to be pure or const reflect
3665 that as far as possible. */
3666 changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt)));
3668 if (!vnresult->result && lhs)
3669 vnresult->result = lhs;
3671 if (vnresult->result && lhs)
3672 changed |= set_ssa_val_to (lhs, vnresult->result);
3674 else
3676 vn_reference_t vr2;
3677 vn_reference_s **slot;
3678 tree vdef_val = vdef;
3679 if (vdef)
3681 /* If we value numbered an indirect functions function to
3682 one not clobbering memory value number its VDEF to its
3683 VUSE. */
3684 tree fn = gimple_call_fn (stmt);
3685 if (fn && TREE_CODE (fn) == SSA_NAME)
3687 fn = SSA_VAL (fn);
3688 if (TREE_CODE (fn) == ADDR_EXPR
3689 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
3690 && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
3691 & (ECF_CONST | ECF_PURE)))
3692 vdef_val = vuse_ssa_val (gimple_vuse (stmt));
3694 changed |= set_ssa_val_to (vdef, vdef_val);
3696 if (lhs)
3697 changed |= set_ssa_val_to (lhs, lhs);
3698 vr2 = current_info->references_pool->allocate ();
3699 vr2->vuse = vr1.vuse;
3700 /* As we are not walking the virtual operand chain we know the
3701 shared_lookup_references are still original so we can re-use
3702 them here. */
3703 vr2->operands = vr1.operands.copy ();
3704 vr2->type = vr1.type;
3705 vr2->set = vr1.set;
3706 vr2->hashcode = vr1.hashcode;
3707 vr2->result = lhs;
3708 vr2->result_vdef = vdef_val;
3709 slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3710 INSERT);
3711 gcc_assert (!*slot);
3712 *slot = vr2;
3715 return changed;
3718 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3719 and return true if the value number of the LHS has changed as a result. */
3721 static bool
3722 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
3724 bool changed = false;
3725 tree last_vuse;
3726 tree result;
3728 last_vuse = gimple_vuse (stmt);
3729 last_vuse_ptr = &last_vuse;
3730 result = vn_reference_lookup (op, gimple_vuse (stmt),
3731 default_vn_walk_kind, NULL, true);
3732 last_vuse_ptr = NULL;
3734 /* We handle type-punning through unions by value-numbering based
3735 on offset and size of the access. Be prepared to handle a
3736 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3737 if (result
3738 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3740 /* We will be setting the value number of lhs to the value number
3741 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3742 So first simplify and lookup this expression to see if it
3743 is already available. */
3744 code_helper rcode = VIEW_CONVERT_EXPR;
3745 tree ops[3] = { result };
3746 result = vn_nary_build_or_lookup (rcode, TREE_TYPE (op), ops);
3749 if (result)
3750 changed = set_ssa_val_to (lhs, result);
3751 else
3753 changed = set_ssa_val_to (lhs, lhs);
3754 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
3757 return changed;
3761 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3762 and return true if the value number of the LHS has changed as a result. */
3764 static bool
3765 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
3767 bool changed = false;
3768 vn_reference_t vnresult = NULL;
3769 tree assign;
3770 bool resultsame = false;
3771 tree vuse = gimple_vuse (stmt);
3772 tree vdef = gimple_vdef (stmt);
3774 if (TREE_CODE (op) == SSA_NAME)
3775 op = SSA_VAL (op);
3777 /* First we want to lookup using the *vuses* from the store and see
3778 if there the last store to this location with the same address
3779 had the same value.
3781 The vuses represent the memory state before the store. If the
3782 memory state, address, and value of the store is the same as the
3783 last store to this location, then this store will produce the
3784 same memory state as that store.
3786 In this case the vdef versions for this store are value numbered to those
3787 vuse versions, since they represent the same memory state after
3788 this store.
3790 Otherwise, the vdefs for the store are used when inserting into
3791 the table, since the store generates a new memory state. */
3793 vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
3794 if (vnresult
3795 && vnresult->result)
3797 tree result = vnresult->result;
3798 if (TREE_CODE (result) == SSA_NAME)
3799 result = SSA_VAL (result);
3800 resultsame = expressions_equal_p (result, op);
3801 if (resultsame)
3803 /* If the TBAA state isn't compatible for downstream reads
3804 we cannot value-number the VDEFs the same. */
3805 alias_set_type set = get_alias_set (lhs);
3806 if (vnresult->set != set
3807 && ! alias_set_subset_of (set, vnresult->set))
3808 resultsame = false;
3812 if (!resultsame)
3814 /* Only perform the following when being called from PRE
3815 which embeds tail merging. */
3816 if (default_vn_walk_kind == VN_WALK)
3818 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3819 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
3820 if (vnresult)
3822 VN_INFO (vdef)->use_processed = true;
3823 return set_ssa_val_to (vdef, vnresult->result_vdef);
3827 if (dump_file && (dump_flags & TDF_DETAILS))
3829 fprintf (dump_file, "No store match\n");
3830 fprintf (dump_file, "Value numbering store ");
3831 print_generic_expr (dump_file, lhs);
3832 fprintf (dump_file, " to ");
3833 print_generic_expr (dump_file, op);
3834 fprintf (dump_file, "\n");
3836 /* Have to set value numbers before insert, since insert is
3837 going to valueize the references in-place. */
3838 if (vdef)
3839 changed |= set_ssa_val_to (vdef, vdef);
3841 /* Do not insert structure copies into the tables. */
3842 if (is_gimple_min_invariant (op)
3843 || is_gimple_reg (op))
3844 vn_reference_insert (lhs, op, vdef, NULL);
3846 /* Only perform the following when being called from PRE
3847 which embeds tail merging. */
3848 if (default_vn_walk_kind == VN_WALK)
3850 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3851 vn_reference_insert (assign, lhs, vuse, vdef);
3854 else
3856 /* We had a match, so value number the vdef to have the value
3857 number of the vuse it came from. */
3859 if (dump_file && (dump_flags & TDF_DETAILS))
3860 fprintf (dump_file, "Store matched earlier value, "
3861 "value numbering store vdefs to matching vuses.\n");
3863 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3866 return changed;
3869 /* Visit and value number PHI, return true if the value number
3870 changed. */
3872 static bool
3873 visit_phi (gimple *phi)
3875 tree result, sameval = VN_TOP, seen_undef = NULL_TREE;
3876 unsigned n_executable = 0;
3877 bool allsame = true;
3878 edge_iterator ei;
3879 edge e;
3881 /* TODO: We could check for this in init_sccvn, and replace this
3882 with a gcc_assert. */
3883 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3884 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3886 /* See if all non-TOP arguments have the same value. TOP is
3887 equivalent to everything, so we can ignore it. */
3888 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3889 if (e->flags & EDGE_EXECUTABLE)
3891 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3893 ++n_executable;
3894 if (TREE_CODE (def) == SSA_NAME)
3895 def = SSA_VAL (def);
3896 if (def == VN_TOP)
3898 /* Ignore undefined defs for sameval but record one. */
3899 else if (TREE_CODE (def) == SSA_NAME
3900 && ssa_undefined_value_p (def, false))
3901 seen_undef = def;
3902 else if (sameval == VN_TOP)
3903 sameval = def;
3904 else if (!expressions_equal_p (def, sameval))
3906 allsame = false;
3907 break;
3912 /* If none of the edges was executable keep the value-number at VN_TOP,
3913 if only a single edge is exectuable use its value. */
3914 if (n_executable <= 1)
3915 result = seen_undef ? seen_undef : sameval;
3916 /* If we saw only undefined values and VN_TOP use one of the
3917 undefined values. */
3918 else if (sameval == VN_TOP)
3919 result = seen_undef ? seen_undef : sameval;
3920 /* First see if it is equivalent to a phi node in this block. We prefer
3921 this as it allows IV elimination - see PRs 66502 and 67167. */
3922 else if ((result = vn_phi_lookup (phi)))
3924 /* If all values are the same use that, unless we've seen undefined
3925 values as well and the value isn't constant.
3926 CCP/copyprop have the same restriction to not remove uninit warnings. */
3927 else if (allsame
3928 && (! seen_undef || is_gimple_min_invariant (sameval)))
3929 result = sameval;
3930 else
3932 result = PHI_RESULT (phi);
3933 /* Only insert PHIs that are varying, for constant value numbers
3934 we mess up equivalences otherwise as we are only comparing
3935 the immediate controlling predicates. */
3936 vn_phi_insert (phi, result);
3939 return set_ssa_val_to (PHI_RESULT (phi), result);
3942 /* Try to simplify RHS using equivalences and constant folding. */
3944 static tree
3945 try_to_simplify (gassign *stmt)
3947 enum tree_code code = gimple_assign_rhs_code (stmt);
3948 tree tem;
3950 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3951 in this case, there is no point in doing extra work. */
3952 if (code == SSA_NAME)
3953 return NULL_TREE;
3955 /* First try constant folding based on our current lattice. */
3956 mprts_hook = vn_lookup_simplify_result;
3957 mprts_hook_cnt = 9;
3958 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
3959 mprts_hook = NULL;
3960 if (tem
3961 && (TREE_CODE (tem) == SSA_NAME
3962 || is_gimple_min_invariant (tem)))
3963 return tem;
3965 return NULL_TREE;
3968 /* Visit and value number USE, return true if the value number
3969 changed. */
3971 static bool
3972 visit_use (tree use)
3974 bool changed = false;
3975 gimple *stmt = SSA_NAME_DEF_STMT (use);
3977 mark_use_processed (use);
3979 gcc_assert (!SSA_NAME_IN_FREE_LIST (use)
3980 && !SSA_NAME_IS_DEFAULT_DEF (use));
3982 if (dump_file && (dump_flags & TDF_DETAILS))
3984 fprintf (dump_file, "Value numbering ");
3985 print_generic_expr (dump_file, use);
3986 fprintf (dump_file, " stmt = ");
3987 print_gimple_stmt (dump_file, stmt, 0);
3990 if (gimple_code (stmt) == GIMPLE_PHI)
3991 changed = visit_phi (stmt);
3992 else if (gimple_has_volatile_ops (stmt))
3993 changed = defs_to_varying (stmt);
3994 else if (gassign *ass = dyn_cast <gassign *> (stmt))
3996 enum tree_code code = gimple_assign_rhs_code (ass);
3997 tree lhs = gimple_assign_lhs (ass);
3998 tree rhs1 = gimple_assign_rhs1 (ass);
3999 tree simplified;
4001 /* Shortcut for copies. Simplifying copies is pointless,
4002 since we copy the expression and value they represent. */
4003 if (code == SSA_NAME
4004 && TREE_CODE (lhs) == SSA_NAME)
4006 changed = visit_copy (lhs, rhs1);
4007 goto done;
4009 simplified = try_to_simplify (ass);
4010 if (simplified)
4012 if (dump_file && (dump_flags & TDF_DETAILS))
4014 fprintf (dump_file, "RHS ");
4015 print_gimple_expr (dump_file, ass, 0);
4016 fprintf (dump_file, " simplified to ");
4017 print_generic_expr (dump_file, simplified);
4018 fprintf (dump_file, "\n");
4021 /* Setting value numbers to constants will occasionally
4022 screw up phi congruence because constants are not
4023 uniquely associated with a single ssa name that can be
4024 looked up. */
4025 if (simplified
4026 && is_gimple_min_invariant (simplified)
4027 && TREE_CODE (lhs) == SSA_NAME)
4029 changed = set_ssa_val_to (lhs, simplified);
4030 goto done;
4032 else if (simplified
4033 && TREE_CODE (simplified) == SSA_NAME
4034 && TREE_CODE (lhs) == SSA_NAME)
4036 changed = visit_copy (lhs, simplified);
4037 goto done;
4040 if ((TREE_CODE (lhs) == SSA_NAME
4041 /* We can substitute SSA_NAMEs that are live over
4042 abnormal edges with their constant value. */
4043 && !(gimple_assign_copy_p (ass)
4044 && is_gimple_min_invariant (rhs1))
4045 && !(simplified
4046 && is_gimple_min_invariant (simplified))
4047 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4048 /* Stores or copies from SSA_NAMEs that are live over
4049 abnormal edges are a problem. */
4050 || (code == SSA_NAME
4051 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
4052 changed = defs_to_varying (ass);
4053 else if (REFERENCE_CLASS_P (lhs)
4054 || DECL_P (lhs))
4055 changed = visit_reference_op_store (lhs, rhs1, ass);
4056 else if (TREE_CODE (lhs) == SSA_NAME)
4058 if ((gimple_assign_copy_p (ass)
4059 && is_gimple_min_invariant (rhs1))
4060 || (simplified
4061 && is_gimple_min_invariant (simplified)))
4063 if (simplified)
4064 changed = set_ssa_val_to (lhs, simplified);
4065 else
4066 changed = set_ssa_val_to (lhs, rhs1);
4068 else
4070 /* Visit the original statement. */
4071 switch (vn_get_stmt_kind (ass))
4073 case VN_NARY:
4074 changed = visit_nary_op (lhs, ass);
4075 break;
4076 case VN_REFERENCE:
4077 changed = visit_reference_op_load (lhs, rhs1, ass);
4078 break;
4079 default:
4080 changed = defs_to_varying (ass);
4081 break;
4085 else
4086 changed = defs_to_varying (ass);
4088 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
4090 tree lhs = gimple_call_lhs (call_stmt);
4091 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4093 /* Try constant folding based on our current lattice. */
4094 tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
4095 vn_valueize);
4096 if (simplified)
4098 if (dump_file && (dump_flags & TDF_DETAILS))
4100 fprintf (dump_file, "call ");
4101 print_gimple_expr (dump_file, call_stmt, 0);
4102 fprintf (dump_file, " simplified to ");
4103 print_generic_expr (dump_file, simplified);
4104 fprintf (dump_file, "\n");
4107 /* Setting value numbers to constants will occasionally
4108 screw up phi congruence because constants are not
4109 uniquely associated with a single ssa name that can be
4110 looked up. */
4111 if (simplified
4112 && is_gimple_min_invariant (simplified))
4114 changed = set_ssa_val_to (lhs, simplified);
4115 if (gimple_vdef (call_stmt))
4116 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4117 SSA_VAL (gimple_vuse (call_stmt)));
4118 goto done;
4120 else if (simplified
4121 && TREE_CODE (simplified) == SSA_NAME)
4123 changed = visit_copy (lhs, simplified);
4124 if (gimple_vdef (call_stmt))
4125 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4126 SSA_VAL (gimple_vuse (call_stmt)));
4127 goto done;
4129 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4131 changed = defs_to_varying (call_stmt);
4132 goto done;
4136 /* Pick up flags from a devirtualization target. */
4137 tree fn = gimple_call_fn (stmt);
4138 int extra_fnflags = 0;
4139 if (fn && TREE_CODE (fn) == SSA_NAME)
4141 fn = SSA_VAL (fn);
4142 if (TREE_CODE (fn) == ADDR_EXPR
4143 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
4144 extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0));
4146 if (!gimple_call_internal_p (call_stmt)
4147 && (/* Calls to the same function with the same vuse
4148 and the same operands do not necessarily return the same
4149 value, unless they're pure or const. */
4150 ((gimple_call_flags (call_stmt) | extra_fnflags)
4151 & (ECF_PURE | ECF_CONST))
4152 /* If calls have a vdef, subsequent calls won't have
4153 the same incoming vuse. So, if 2 calls with vdef have the
4154 same vuse, we know they're not subsequent.
4155 We can value number 2 calls to the same function with the
4156 same vuse and the same operands which are not subsequent
4157 the same, because there is no code in the program that can
4158 compare the 2 values... */
4159 || (gimple_vdef (call_stmt)
4160 /* ... unless the call returns a pointer which does
4161 not alias with anything else. In which case the
4162 information that the values are distinct are encoded
4163 in the IL. */
4164 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
4165 /* Only perform the following when being called from PRE
4166 which embeds tail merging. */
4167 && default_vn_walk_kind == VN_WALK)))
4168 changed = visit_reference_op_call (lhs, call_stmt);
4169 else
4170 changed = defs_to_varying (call_stmt);
4172 else
4173 changed = defs_to_varying (stmt);
4174 done:
4175 return changed;
4178 /* Compare two operands by reverse postorder index */
4180 static int
4181 compare_ops (const void *pa, const void *pb)
4183 const tree opa = *((const tree *)pa);
4184 const tree opb = *((const tree *)pb);
4185 gimple *opstmta = SSA_NAME_DEF_STMT (opa);
4186 gimple *opstmtb = SSA_NAME_DEF_STMT (opb);
4187 basic_block bba;
4188 basic_block bbb;
4190 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
4191 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4192 else if (gimple_nop_p (opstmta))
4193 return -1;
4194 else if (gimple_nop_p (opstmtb))
4195 return 1;
4197 bba = gimple_bb (opstmta);
4198 bbb = gimple_bb (opstmtb);
4200 if (!bba && !bbb)
4201 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4202 else if (!bba)
4203 return -1;
4204 else if (!bbb)
4205 return 1;
4207 if (bba == bbb)
4209 if (gimple_code (opstmta) == GIMPLE_PHI
4210 && gimple_code (opstmtb) == GIMPLE_PHI)
4211 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4212 else if (gimple_code (opstmta) == GIMPLE_PHI)
4213 return -1;
4214 else if (gimple_code (opstmtb) == GIMPLE_PHI)
4215 return 1;
4216 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
4217 return gimple_uid (opstmta) - gimple_uid (opstmtb);
4218 else
4219 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4221 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
4224 /* Sort an array containing members of a strongly connected component
4225 SCC so that the members are ordered by RPO number.
4226 This means that when the sort is complete, iterating through the
4227 array will give you the members in RPO order. */
4229 static void
4230 sort_scc (vec<tree> scc)
4232 scc.qsort (compare_ops);
4235 /* Insert the no longer used nary ONARY to the hash INFO. */
4237 static void
4238 copy_nary (vn_nary_op_t onary, vn_tables_t info)
4240 size_t size = sizeof_vn_nary_op (onary->length);
4241 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
4242 &info->nary_obstack);
4243 memcpy (nary, onary, size);
4244 vn_nary_op_insert_into (nary, info->nary, false);
4247 /* Insert the no longer used phi OPHI to the hash INFO. */
4249 static void
4250 copy_phi (vn_phi_t ophi, vn_tables_t info)
4252 vn_phi_t phi = info->phis_pool->allocate ();
4253 vn_phi_s **slot;
4254 memcpy (phi, ophi, sizeof (*phi));
4255 ophi->phiargs.create (0);
4256 slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
4257 gcc_assert (!*slot);
4258 *slot = phi;
4261 /* Insert the no longer used reference OREF to the hash INFO. */
4263 static void
4264 copy_reference (vn_reference_t oref, vn_tables_t info)
4266 vn_reference_t ref;
4267 vn_reference_s **slot;
4268 ref = info->references_pool->allocate ();
4269 memcpy (ref, oref, sizeof (*ref));
4270 oref->operands.create (0);
4271 slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
4272 if (*slot)
4273 free_reference (*slot);
4274 *slot = ref;
4277 /* Process a strongly connected component in the SSA graph. */
4279 static void
4280 process_scc (vec<tree> scc)
4282 tree var;
4283 unsigned int i;
4284 unsigned int iterations = 0;
4285 bool changed = true;
4286 vn_nary_op_iterator_type hin;
4287 vn_phi_iterator_type hip;
4288 vn_reference_iterator_type hir;
4289 vn_nary_op_t nary;
4290 vn_phi_t phi;
4291 vn_reference_t ref;
4293 /* If the SCC has a single member, just visit it. */
4294 if (scc.length () == 1)
4296 tree use = scc[0];
4297 if (VN_INFO (use)->use_processed)
4298 return;
4299 /* We need to make sure it doesn't form a cycle itself, which can
4300 happen for self-referential PHI nodes. In that case we would
4301 end up inserting an expression with VN_TOP operands into the
4302 valid table which makes us derive bogus equivalences later.
4303 The cheapest way to check this is to assume it for all PHI nodes. */
4304 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
4305 /* Fallthru to iteration. */ ;
4306 else
4308 visit_use (use);
4309 return;
4313 if (dump_file && (dump_flags & TDF_DETAILS))
4314 print_scc (dump_file, scc);
4316 /* Iterate over the SCC with the optimistic table until it stops
4317 changing. */
4318 current_info = optimistic_info;
4319 while (changed)
4321 changed = false;
4322 iterations++;
4323 if (dump_file && (dump_flags & TDF_DETAILS))
4324 fprintf (dump_file, "Starting iteration %d\n", iterations);
4325 /* As we are value-numbering optimistically we have to
4326 clear the expression tables and the simplified expressions
4327 in each iteration until we converge. */
4328 optimistic_info->nary->empty ();
4329 optimistic_info->phis->empty ();
4330 optimistic_info->references->empty ();
4331 obstack_free (&optimistic_info->nary_obstack, NULL);
4332 gcc_obstack_init (&optimistic_info->nary_obstack);
4333 optimistic_info->phis_pool->release ();
4334 optimistic_info->references_pool->release ();
4335 FOR_EACH_VEC_ELT (scc, i, var)
4336 gcc_assert (!VN_INFO (var)->needs_insertion
4337 && VN_INFO (var)->expr == NULL);
4338 FOR_EACH_VEC_ELT (scc, i, var)
4339 changed |= visit_use (var);
4342 if (dump_file && (dump_flags & TDF_DETAILS))
4343 fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
4344 statistics_histogram_event (cfun, "SCC iterations", iterations);
4346 /* Finally, copy the contents of the no longer used optimistic
4347 table to the valid table. */
4348 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
4349 copy_nary (nary, valid_info);
4350 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
4351 copy_phi (phi, valid_info);
4352 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
4353 ref, vn_reference_t, hir)
4354 copy_reference (ref, valid_info);
4356 current_info = valid_info;
4360 /* Pop the components of the found SCC for NAME off the SCC stack
4361 and process them. Returns true if all went well, false if
4362 we run into resource limits. */
4364 static void
4365 extract_and_process_scc_for_name (tree name)
4367 auto_vec<tree> scc;
4368 tree x;
4370 /* Found an SCC, pop the components off the SCC stack and
4371 process them. */
4374 x = sccstack.pop ();
4376 VN_INFO (x)->on_sccstack = false;
4377 scc.safe_push (x);
4378 } while (x != name);
4380 /* Drop all defs in the SCC to varying in case a SCC turns out to be
4381 incredibly large.
4382 ??? Just switch to a non-optimistic mode that avoids any iteration. */
4383 if (scc.length () > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
4385 if (dump_file)
4387 print_scc (dump_file, scc);
4388 fprintf (dump_file, "WARNING: Giving up value-numbering SCC due to "
4389 "size %u exceeding %u\n", scc.length (),
4390 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
4392 tree var;
4393 unsigned i;
4394 FOR_EACH_VEC_ELT (scc, i, var)
4396 gimple *def = SSA_NAME_DEF_STMT (var);
4397 mark_use_processed (var);
4398 if (SSA_NAME_IS_DEFAULT_DEF (var)
4399 || gimple_code (def) == GIMPLE_PHI)
4400 set_ssa_val_to (var, var);
4401 else
4402 defs_to_varying (def);
4404 return;
4407 if (scc.length () > 1)
4408 sort_scc (scc);
4410 process_scc (scc);
4413 /* Depth first search on NAME to discover and process SCC's in the SSA
4414 graph.
4415 Execution of this algorithm relies on the fact that the SCC's are
4416 popped off the stack in topological order.
4417 Returns true if successful, false if we stopped processing SCC's due
4418 to resource constraints. */
4420 static void
4421 DFS (tree name)
4423 auto_vec<ssa_op_iter> itervec;
4424 auto_vec<tree> namevec;
4425 use_operand_p usep = NULL;
4426 gimple *defstmt;
4427 tree use;
4428 ssa_op_iter iter;
4430 start_over:
4431 /* SCC info */
4432 VN_INFO (name)->dfsnum = next_dfs_num++;
4433 VN_INFO (name)->visited = true;
4434 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
4436 sccstack.safe_push (name);
4437 VN_INFO (name)->on_sccstack = true;
4438 defstmt = SSA_NAME_DEF_STMT (name);
4440 /* Recursively DFS on our operands, looking for SCC's. */
4441 if (!gimple_nop_p (defstmt))
4443 /* Push a new iterator. */
4444 if (gphi *phi = dyn_cast <gphi *> (defstmt))
4445 usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
4446 else
4447 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
4449 else
4450 clear_and_done_ssa_iter (&iter);
4452 while (1)
4454 /* If we are done processing uses of a name, go up the stack
4455 of iterators and process SCCs as we found them. */
4456 if (op_iter_done (&iter))
4458 /* See if we found an SCC. */
4459 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
4460 extract_and_process_scc_for_name (name);
4462 /* Check if we are done. */
4463 if (namevec.is_empty ())
4464 return;
4466 /* Restore the last use walker and continue walking there. */
4467 use = name;
4468 name = namevec.pop ();
4469 memcpy (&iter, &itervec.last (),
4470 sizeof (ssa_op_iter));
4471 itervec.pop ();
4472 goto continue_walking;
4475 use = USE_FROM_PTR (usep);
4477 /* Since we handle phi nodes, we will sometimes get
4478 invariants in the use expression. */
4479 if (TREE_CODE (use) == SSA_NAME)
4481 if (! (VN_INFO (use)->visited))
4483 /* Recurse by pushing the current use walking state on
4484 the stack and starting over. */
4485 itervec.safe_push (iter);
4486 namevec.safe_push (name);
4487 name = use;
4488 goto start_over;
4490 continue_walking:
4491 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
4492 VN_INFO (use)->low);
4494 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
4495 && VN_INFO (use)->on_sccstack)
4497 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
4498 VN_INFO (name)->low);
4502 usep = op_iter_next_use (&iter);
4506 /* Allocate a value number table. */
4508 static void
4509 allocate_vn_table (vn_tables_t table)
4511 table->phis = new vn_phi_table_type (23);
4512 table->nary = new vn_nary_op_table_type (23);
4513 table->references = new vn_reference_table_type (23);
4515 gcc_obstack_init (&table->nary_obstack);
4516 table->phis_pool = new object_allocator<vn_phi_s> ("VN phis");
4517 table->references_pool = new object_allocator<vn_reference_s>
4518 ("VN references");
4521 /* Free a value number table. */
4523 static void
4524 free_vn_table (vn_tables_t table)
4526 delete table->phis;
4527 table->phis = NULL;
4528 delete table->nary;
4529 table->nary = NULL;
4530 delete table->references;
4531 table->references = NULL;
4532 obstack_free (&table->nary_obstack, NULL);
4533 delete table->phis_pool;
4534 delete table->references_pool;
4537 static void
4538 init_scc_vn (void)
4540 int j;
4541 int *rpo_numbers_temp;
4543 calculate_dominance_info (CDI_DOMINATORS);
4544 mark_dfs_back_edges ();
4546 sccstack.create (0);
4547 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
4549 constant_value_ids = BITMAP_ALLOC (NULL);
4551 next_dfs_num = 1;
4552 next_value_id = 1;
4554 vn_ssa_aux_table.create (num_ssa_names + 1);
4555 /* VEC_alloc doesn't actually grow it to the right size, it just
4556 preallocates the space to do so. */
4557 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
4558 gcc_obstack_init (&vn_ssa_aux_obstack);
4560 shared_lookup_phiargs.create (0);
4561 shared_lookup_references.create (0);
4562 rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
4563 rpo_numbers_temp =
4564 XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
4565 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4567 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4568 the i'th block in RPO order is bb. We want to map bb's to RPO
4569 numbers, so we need to rearrange this array. */
4570 for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
4571 rpo_numbers[rpo_numbers_temp[j]] = j;
4573 XDELETE (rpo_numbers_temp);
4575 VN_TOP = build_decl (UNKNOWN_LOCATION, VAR_DECL,
4576 get_identifier ("VN_TOP"), error_mark_node);
4578 renumber_gimple_stmt_uids ();
4580 /* Create the valid and optimistic value numbering tables. */
4581 valid_info = XCNEW (struct vn_tables_s);
4582 allocate_vn_table (valid_info);
4583 optimistic_info = XCNEW (struct vn_tables_s);
4584 allocate_vn_table (optimistic_info);
4585 current_info = valid_info;
4587 /* Create the VN_INFO structures, and initialize value numbers to
4588 TOP or VARYING for parameters. */
4589 size_t i;
4590 tree name;
4592 FOR_EACH_SSA_NAME (i, name, cfun)
4594 VN_INFO_GET (name)->valnum = VN_TOP;
4595 VN_INFO (name)->needs_insertion = false;
4596 VN_INFO (name)->expr = NULL;
4597 VN_INFO (name)->value_id = 0;
4599 if (!SSA_NAME_IS_DEFAULT_DEF (name))
4600 continue;
4602 switch (TREE_CODE (SSA_NAME_VAR (name)))
4604 case VAR_DECL:
4605 /* All undefined vars are VARYING. */
4606 VN_INFO (name)->valnum = name;
4607 VN_INFO (name)->visited = true;
4608 break;
4610 case PARM_DECL:
4611 /* Parameters are VARYING but we can record a condition
4612 if we know it is a non-NULL pointer. */
4613 VN_INFO (name)->visited = true;
4614 VN_INFO (name)->valnum = name;
4615 if (POINTER_TYPE_P (TREE_TYPE (name))
4616 && nonnull_arg_p (SSA_NAME_VAR (name)))
4618 tree ops[2];
4619 ops[0] = name;
4620 ops[1] = build_int_cst (TREE_TYPE (name), 0);
4621 vn_nary_op_insert_pieces (2, NE_EXPR, boolean_type_node, ops,
4622 boolean_true_node, 0);
4623 if (dump_file && (dump_flags & TDF_DETAILS))
4625 fprintf (dump_file, "Recording ");
4626 print_generic_expr (dump_file, name, TDF_SLIM);
4627 fprintf (dump_file, " != 0\n");
4630 break;
4632 case RESULT_DECL:
4633 /* If the result is passed by invisible reference the default
4634 def is initialized, otherwise it's uninitialized. Still
4635 undefined is varying. */
4636 VN_INFO (name)->visited = true;
4637 VN_INFO (name)->valnum = name;
4638 break;
4640 default:
4641 gcc_unreachable ();
4646 /* Restore SSA info that has been reset on value leaders. */
4648 void
4649 scc_vn_restore_ssa_info (void)
4651 unsigned i;
4652 tree name;
4654 FOR_EACH_SSA_NAME (i, name, cfun)
4656 if (has_VN_INFO (name))
4658 if (VN_INFO (name)->needs_insertion)
4660 else if (POINTER_TYPE_P (TREE_TYPE (name))
4661 && VN_INFO (name)->info.ptr_info)
4662 SSA_NAME_PTR_INFO (name) = VN_INFO (name)->info.ptr_info;
4663 else if (INTEGRAL_TYPE_P (TREE_TYPE (name))
4664 && VN_INFO (name)->info.range_info)
4666 SSA_NAME_RANGE_INFO (name) = VN_INFO (name)->info.range_info;
4667 SSA_NAME_ANTI_RANGE_P (name)
4668 = VN_INFO (name)->range_info_anti_range_p;
4674 void
4675 free_scc_vn (void)
4677 size_t i;
4678 tree name;
4680 delete constant_to_value_id;
4681 constant_to_value_id = NULL;
4682 BITMAP_FREE (constant_value_ids);
4683 shared_lookup_phiargs.release ();
4684 shared_lookup_references.release ();
4685 XDELETEVEC (rpo_numbers);
4687 FOR_EACH_SSA_NAME (i, name, cfun)
4689 if (has_VN_INFO (name)
4690 && VN_INFO (name)->needs_insertion)
4691 release_ssa_name (name);
4693 obstack_free (&vn_ssa_aux_obstack, NULL);
4694 vn_ssa_aux_table.release ();
4696 sccstack.release ();
4697 free_vn_table (valid_info);
4698 XDELETE (valid_info);
4699 free_vn_table (optimistic_info);
4700 XDELETE (optimistic_info);
4702 BITMAP_FREE (const_parms);
4705 /* Set *ID according to RESULT. */
4707 static void
4708 set_value_id_for_result (tree result, unsigned int *id)
4710 if (result && TREE_CODE (result) == SSA_NAME)
4711 *id = VN_INFO (result)->value_id;
4712 else if (result && is_gimple_min_invariant (result))
4713 *id = get_or_alloc_constant_value_id (result);
4714 else
4715 *id = get_next_value_id ();
4718 /* Set the value ids in the valid hash tables. */
4720 static void
4721 set_hashtable_value_ids (void)
4723 vn_nary_op_iterator_type hin;
4724 vn_phi_iterator_type hip;
4725 vn_reference_iterator_type hir;
4726 vn_nary_op_t vno;
4727 vn_reference_t vr;
4728 vn_phi_t vp;
4730 /* Now set the value ids of the things we had put in the hash
4731 table. */
4733 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4734 set_value_id_for_result (vno->result, &vno->value_id);
4736 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4737 set_value_id_for_result (vp->result, &vp->value_id);
4739 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4740 hir)
4741 set_value_id_for_result (vr->result, &vr->value_id);
4744 class sccvn_dom_walker : public dom_walker
4746 public:
4747 sccvn_dom_walker ()
4748 : dom_walker (CDI_DOMINATORS, true), cond_stack (0) {}
4750 virtual edge before_dom_children (basic_block);
4751 virtual void after_dom_children (basic_block);
4753 void record_cond (basic_block,
4754 enum tree_code code, tree lhs, tree rhs, bool value);
4755 void record_conds (basic_block,
4756 enum tree_code code, tree lhs, tree rhs, bool value);
4758 auto_vec<std::pair <basic_block, std::pair <vn_nary_op_t, vn_nary_op_t> > >
4759 cond_stack;
4762 /* Record a temporary condition for the BB and its dominated blocks. */
4764 void
4765 sccvn_dom_walker::record_cond (basic_block bb,
4766 enum tree_code code, tree lhs, tree rhs,
4767 bool value)
4769 tree ops[2] = { lhs, rhs };
4770 vn_nary_op_t old = NULL;
4771 if (vn_nary_op_lookup_pieces (2, code, boolean_type_node, ops, &old))
4772 current_info->nary->remove_elt_with_hash (old, old->hashcode);
4773 vn_nary_op_t cond
4774 = vn_nary_op_insert_pieces (2, code, boolean_type_node, ops,
4775 value
4776 ? boolean_true_node
4777 : boolean_false_node, 0);
4778 if (dump_file && (dump_flags & TDF_DETAILS))
4780 fprintf (dump_file, "Recording temporarily ");
4781 print_generic_expr (dump_file, ops[0], TDF_SLIM);
4782 fprintf (dump_file, " %s ", get_tree_code_name (code));
4783 print_generic_expr (dump_file, ops[1], TDF_SLIM);
4784 fprintf (dump_file, " == %s%s\n",
4785 value ? "true" : "false",
4786 old ? " (old entry saved)" : "");
4788 cond_stack.safe_push (std::make_pair (bb, std::make_pair (cond, old)));
4791 /* Record temporary conditions for the BB and its dominated blocks
4792 according to LHS CODE RHS == VALUE and its dominated conditions. */
4794 void
4795 sccvn_dom_walker::record_conds (basic_block bb,
4796 enum tree_code code, tree lhs, tree rhs,
4797 bool value)
4799 /* Record the original condition. */
4800 record_cond (bb, code, lhs, rhs, value);
4802 if (!value)
4803 return;
4805 /* Record dominated conditions if the condition is true. Note that
4806 the inversion is already recorded. */
4807 switch (code)
4809 case LT_EXPR:
4810 case GT_EXPR:
4811 record_cond (bb, code == LT_EXPR ? LE_EXPR : GE_EXPR, lhs, rhs, true);
4812 record_cond (bb, NE_EXPR, lhs, rhs, true);
4813 record_cond (bb, EQ_EXPR, lhs, rhs, false);
4814 break;
4816 case EQ_EXPR:
4817 record_cond (bb, LE_EXPR, lhs, rhs, true);
4818 record_cond (bb, GE_EXPR, lhs, rhs, true);
4819 record_cond (bb, LT_EXPR, lhs, rhs, false);
4820 record_cond (bb, GT_EXPR, lhs, rhs, false);
4821 break;
4823 default:
4824 break;
4828 /* Restore expressions and values derived from conditionals. */
4830 void
4831 sccvn_dom_walker::after_dom_children (basic_block bb)
4833 while (!cond_stack.is_empty ()
4834 && cond_stack.last ().first == bb)
4836 vn_nary_op_t cond = cond_stack.last ().second.first;
4837 vn_nary_op_t old = cond_stack.last ().second.second;
4838 current_info->nary->remove_elt_with_hash (cond, cond->hashcode);
4839 if (old)
4840 vn_nary_op_insert_into (old, current_info->nary, false);
4841 cond_stack.pop ();
4845 /* Value number all statements in BB. */
4847 edge
4848 sccvn_dom_walker::before_dom_children (basic_block bb)
4850 if (dump_file && (dump_flags & TDF_DETAILS))
4851 fprintf (dump_file, "Visiting BB %d\n", bb->index);
4853 /* If we have a single predecessor record the equivalence from a
4854 possible condition on the predecessor edge. */
4855 edge pred_e = single_pred_edge_ignoring_loop_edges (bb, false);
4856 if (pred_e)
4858 /* Check if there are multiple executable successor edges in
4859 the source block. Otherwise there is no additional info
4860 to be recorded. */
4861 edge_iterator ei;
4862 edge e2;
4863 FOR_EACH_EDGE (e2, ei, pred_e->src->succs)
4864 if (e2 != pred_e
4865 && e2->flags & EDGE_EXECUTABLE)
4866 break;
4867 if (e2 && (e2->flags & EDGE_EXECUTABLE))
4869 gimple *stmt = last_stmt (pred_e->src);
4870 if (stmt
4871 && gimple_code (stmt) == GIMPLE_COND)
4873 enum tree_code code = gimple_cond_code (stmt);
4874 tree lhs = gimple_cond_lhs (stmt);
4875 tree rhs = gimple_cond_rhs (stmt);
4876 record_conds (bb, code, lhs, rhs,
4877 (pred_e->flags & EDGE_TRUE_VALUE) != 0);
4878 code = invert_tree_comparison (code, HONOR_NANS (lhs));
4879 if (code != ERROR_MARK)
4880 record_conds (bb, code, lhs, rhs,
4881 (pred_e->flags & EDGE_TRUE_VALUE) == 0);
4886 /* Value-number all defs in the basic-block. */
4887 for (gphi_iterator gsi = gsi_start_phis (bb);
4888 !gsi_end_p (gsi); gsi_next (&gsi))
4890 gphi *phi = gsi.phi ();
4891 tree res = PHI_RESULT (phi);
4892 if (!VN_INFO (res)->visited)
4893 DFS (res);
4895 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
4896 !gsi_end_p (gsi); gsi_next (&gsi))
4898 ssa_op_iter i;
4899 tree op;
4900 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
4901 if (!VN_INFO (op)->visited)
4902 DFS (op);
4905 /* Finally look at the last stmt. */
4906 gimple *stmt = last_stmt (bb);
4907 if (!stmt)
4908 return NULL;
4910 enum gimple_code code = gimple_code (stmt);
4911 if (code != GIMPLE_COND
4912 && code != GIMPLE_SWITCH
4913 && code != GIMPLE_GOTO)
4914 return NULL;
4916 if (dump_file && (dump_flags & TDF_DETAILS))
4918 fprintf (dump_file, "Visiting control stmt ending BB %d: ", bb->index);
4919 print_gimple_stmt (dump_file, stmt, 0);
4922 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
4923 if value-numbering can prove they are not reachable. Handling
4924 computed gotos is also possible. */
4925 tree val;
4926 switch (code)
4928 case GIMPLE_COND:
4930 tree lhs = vn_valueize (gimple_cond_lhs (stmt));
4931 tree rhs = vn_valueize (gimple_cond_rhs (stmt));
4932 val = gimple_simplify (gimple_cond_code (stmt),
4933 boolean_type_node, lhs, rhs,
4934 NULL, vn_valueize);
4935 /* If that didn't simplify to a constant see if we have recorded
4936 temporary expressions from taken edges. */
4937 if (!val || TREE_CODE (val) != INTEGER_CST)
4939 tree ops[2];
4940 ops[0] = lhs;
4941 ops[1] = rhs;
4942 val = vn_nary_op_lookup_pieces (2, gimple_cond_code (stmt),
4943 boolean_type_node, ops, NULL);
4945 break;
4947 case GIMPLE_SWITCH:
4948 val = gimple_switch_index (as_a <gswitch *> (stmt));
4949 break;
4950 case GIMPLE_GOTO:
4951 val = gimple_goto_dest (stmt);
4952 break;
4953 default:
4954 gcc_unreachable ();
4956 if (!val)
4957 return NULL;
4959 edge taken = find_taken_edge (bb, vn_valueize (val));
4960 if (!taken)
4961 return NULL;
4963 if (dump_file && (dump_flags & TDF_DETAILS))
4964 fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
4965 "not executable\n", bb->index, bb->index, taken->dest->index);
4967 return taken;
4970 /* Do SCCVN. Returns true if it finished, false if we bailed out
4971 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
4972 how we use the alias oracle walking during the VN process. */
4974 void
4975 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
4977 size_t i;
4979 default_vn_walk_kind = default_vn_walk_kind_;
4981 init_scc_vn ();
4983 /* Collect pointers we know point to readonly memory. */
4984 const_parms = BITMAP_ALLOC (NULL);
4985 tree fnspec = lookup_attribute ("fn spec",
4986 TYPE_ATTRIBUTES (TREE_TYPE (cfun->decl)));
4987 if (fnspec)
4989 fnspec = TREE_VALUE (TREE_VALUE (fnspec));
4990 i = 1;
4991 for (tree arg = DECL_ARGUMENTS (cfun->decl);
4992 arg; arg = DECL_CHAIN (arg), ++i)
4994 if (i >= (unsigned) TREE_STRING_LENGTH (fnspec))
4995 break;
4996 if (TREE_STRING_POINTER (fnspec)[i] == 'R'
4997 || TREE_STRING_POINTER (fnspec)[i] == 'r')
4999 tree name = ssa_default_def (cfun, arg);
5000 if (name)
5001 bitmap_set_bit (const_parms, SSA_NAME_VERSION (name));
5006 /* Walk all blocks in dominator order, value-numbering stmts
5007 SSA defs and decide whether outgoing edges are not executable. */
5008 sccvn_dom_walker walker;
5009 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5011 /* Initialize the value ids and prune out remaining VN_TOPs
5012 from dead code. */
5013 tree name;
5014 FOR_EACH_SSA_NAME (i, name, cfun)
5016 vn_ssa_aux_t info = VN_INFO (name);
5017 if (!info->visited
5018 || info->valnum == VN_TOP)
5019 info->valnum = name;
5020 if (info->valnum == name)
5021 info->value_id = get_next_value_id ();
5022 else if (is_gimple_min_invariant (info->valnum))
5023 info->value_id = get_or_alloc_constant_value_id (info->valnum);
5026 /* Propagate. */
5027 FOR_EACH_SSA_NAME (i, name, cfun)
5029 vn_ssa_aux_t info = VN_INFO (name);
5030 if (TREE_CODE (info->valnum) == SSA_NAME
5031 && info->valnum != name
5032 && info->value_id != VN_INFO (info->valnum)->value_id)
5033 info->value_id = VN_INFO (info->valnum)->value_id;
5036 set_hashtable_value_ids ();
5038 if (dump_file && (dump_flags & TDF_DETAILS))
5040 fprintf (dump_file, "Value numbers:\n");
5041 FOR_EACH_SSA_NAME (i, name, cfun)
5043 if (VN_INFO (name)->visited
5044 && SSA_VAL (name) != name)
5046 print_generic_expr (dump_file, name);
5047 fprintf (dump_file, " = ");
5048 print_generic_expr (dump_file, SSA_VAL (name));
5049 fprintf (dump_file, "\n");
5055 /* Return the maximum value id we have ever seen. */
5057 unsigned int
5058 get_max_value_id (void)
5060 return next_value_id;
5063 /* Return the next unique value id. */
5065 unsigned int
5066 get_next_value_id (void)
5068 return next_value_id++;
5072 /* Compare two expressions E1 and E2 and return true if they are equal. */
5074 bool
5075 expressions_equal_p (tree e1, tree e2)
5077 /* The obvious case. */
5078 if (e1 == e2)
5079 return true;
5081 /* If either one is VN_TOP consider them equal. */
5082 if (e1 == VN_TOP || e2 == VN_TOP)
5083 return true;
5085 /* If only one of them is null, they cannot be equal. */
5086 if (!e1 || !e2)
5087 return false;
5089 /* Now perform the actual comparison. */
5090 if (TREE_CODE (e1) == TREE_CODE (e2)
5091 && operand_equal_p (e1, e2, OEP_PURE_SAME))
5092 return true;
5094 return false;
5098 /* Return true if the nary operation NARY may trap. This is a copy
5099 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
5101 bool
5102 vn_nary_may_trap (vn_nary_op_t nary)
5104 tree type;
5105 tree rhs2 = NULL_TREE;
5106 bool honor_nans = false;
5107 bool honor_snans = false;
5108 bool fp_operation = false;
5109 bool honor_trapv = false;
5110 bool handled, ret;
5111 unsigned i;
5113 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
5114 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
5115 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
5117 type = nary->type;
5118 fp_operation = FLOAT_TYPE_P (type);
5119 if (fp_operation)
5121 honor_nans = flag_trapping_math && !flag_finite_math_only;
5122 honor_snans = flag_signaling_nans != 0;
5124 else if (INTEGRAL_TYPE_P (type)
5125 && TYPE_OVERFLOW_TRAPS (type))
5126 honor_trapv = true;
5128 if (nary->length >= 2)
5129 rhs2 = nary->op[1];
5130 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
5131 honor_trapv,
5132 honor_nans, honor_snans, rhs2,
5133 &handled);
5134 if (handled
5135 && ret)
5136 return true;
5138 for (i = 0; i < nary->length; ++i)
5139 if (tree_could_trap_p (nary->op[i]))
5140 return true;
5142 return false;
5146 class eliminate_dom_walker : public dom_walker
5148 public:
5149 eliminate_dom_walker (cdi_direction, bitmap);
5150 ~eliminate_dom_walker ();
5152 virtual edge before_dom_children (basic_block);
5153 virtual void after_dom_children (basic_block);
5155 tree eliminate_avail (tree op);
5156 void eliminate_push_avail (tree op);
5157 tree eliminate_insert (gimple_stmt_iterator *gsi, tree val);
5159 bool do_pre;
5160 unsigned int el_todo;
5161 unsigned int eliminations;
5162 unsigned int insertions;
5164 /* SSA names that had their defs inserted by PRE if do_pre. */
5165 bitmap inserted_exprs;
5167 /* Blocks with statements that have had their EH properties changed. */
5168 bitmap need_eh_cleanup;
5170 /* Blocks with statements that have had their AB properties changed. */
5171 bitmap need_ab_cleanup;
5173 auto_vec<gimple *> to_remove;
5174 auto_vec<gimple *> to_fixup;
5175 auto_vec<tree> avail;
5176 auto_vec<tree> avail_stack;
5179 eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction,
5180 bitmap inserted_exprs_)
5181 : dom_walker (direction), do_pre (inserted_exprs_ != NULL),
5182 el_todo (0), eliminations (0), insertions (0),
5183 inserted_exprs (inserted_exprs_)
5185 need_eh_cleanup = BITMAP_ALLOC (NULL);
5186 need_ab_cleanup = BITMAP_ALLOC (NULL);
5189 eliminate_dom_walker::~eliminate_dom_walker ()
5191 BITMAP_FREE (need_eh_cleanup);
5192 BITMAP_FREE (need_ab_cleanup);
5195 /* Return a leader for OP that is available at the current point of the
5196 eliminate domwalk. */
5198 tree
5199 eliminate_dom_walker::eliminate_avail (tree op)
5201 tree valnum = VN_INFO (op)->valnum;
5202 if (TREE_CODE (valnum) == SSA_NAME)
5204 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
5205 return valnum;
5206 if (avail.length () > SSA_NAME_VERSION (valnum))
5207 return avail[SSA_NAME_VERSION (valnum)];
5209 else if (is_gimple_min_invariant (valnum))
5210 return valnum;
5211 return NULL_TREE;
5214 /* At the current point of the eliminate domwalk make OP available. */
5216 void
5217 eliminate_dom_walker::eliminate_push_avail (tree op)
5219 tree valnum = VN_INFO (op)->valnum;
5220 if (TREE_CODE (valnum) == SSA_NAME)
5222 if (avail.length () <= SSA_NAME_VERSION (valnum))
5223 avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1);
5224 tree pushop = op;
5225 if (avail[SSA_NAME_VERSION (valnum)])
5226 pushop = avail[SSA_NAME_VERSION (valnum)];
5227 avail_stack.safe_push (pushop);
5228 avail[SSA_NAME_VERSION (valnum)] = op;
5232 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
5233 the leader for the expression if insertion was successful. */
5235 tree
5236 eliminate_dom_walker::eliminate_insert (gimple_stmt_iterator *gsi, tree val)
5238 /* We can insert a sequence with a single assignment only. */
5239 gimple_seq stmts = VN_INFO (val)->expr;
5240 if (!gimple_seq_singleton_p (stmts))
5241 return NULL_TREE;
5242 gassign *stmt = dyn_cast <gassign *> (gimple_seq_first_stmt (stmts));
5243 if (!stmt
5244 || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
5245 && gimple_assign_rhs_code (stmt) != VIEW_CONVERT_EXPR
5246 && gimple_assign_rhs_code (stmt) != BIT_FIELD_REF
5247 && (gimple_assign_rhs_code (stmt) != BIT_AND_EXPR
5248 || TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)))
5249 return NULL_TREE;
5251 tree op = gimple_assign_rhs1 (stmt);
5252 if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR
5253 || gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5254 op = TREE_OPERAND (op, 0);
5255 tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (op) : op;
5256 if (!leader)
5257 return NULL_TREE;
5259 tree res;
5260 stmts = NULL;
5261 if (gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5262 res = gimple_build (&stmts, BIT_FIELD_REF,
5263 TREE_TYPE (val), leader,
5264 TREE_OPERAND (gimple_assign_rhs1 (stmt), 1),
5265 TREE_OPERAND (gimple_assign_rhs1 (stmt), 2));
5266 else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
5267 res = gimple_build (&stmts, BIT_AND_EXPR,
5268 TREE_TYPE (val), leader, gimple_assign_rhs2 (stmt));
5269 else
5270 res = gimple_build (&stmts, gimple_assign_rhs_code (stmt),
5271 TREE_TYPE (val), leader);
5272 if (TREE_CODE (res) != SSA_NAME
5273 || SSA_NAME_IS_DEFAULT_DEF (res)
5274 || gimple_bb (SSA_NAME_DEF_STMT (res)))
5276 gimple_seq_discard (stmts);
5278 /* During propagation we have to treat SSA info conservatively
5279 and thus we can end up simplifying the inserted expression
5280 at elimination time to sth not defined in stmts. */
5281 /* But then this is a redundancy we failed to detect. Which means
5282 res now has two values. That doesn't play well with how
5283 we track availability here, so give up. */
5284 if (dump_file && (dump_flags & TDF_DETAILS))
5286 if (TREE_CODE (res) == SSA_NAME)
5287 res = eliminate_avail (res);
5288 if (res)
5290 fprintf (dump_file, "Failed to insert expression for value ");
5291 print_generic_expr (dump_file, val);
5292 fprintf (dump_file, " which is really fully redundant to ");
5293 print_generic_expr (dump_file, res);
5294 fprintf (dump_file, "\n");
5298 return NULL_TREE;
5300 else
5302 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
5303 VN_INFO_GET (res)->valnum = val;
5306 insertions++;
5307 if (dump_file && (dump_flags & TDF_DETAILS))
5309 fprintf (dump_file, "Inserted ");
5310 print_gimple_stmt (dump_file, SSA_NAME_DEF_STMT (res), 0);
5313 return res;
5318 /* Perform elimination for the basic-block B during the domwalk. */
5320 edge
5321 eliminate_dom_walker::before_dom_children (basic_block b)
5323 /* Mark new bb. */
5324 avail_stack.safe_push (NULL_TREE);
5326 /* Skip unreachable blocks marked unreachable during the SCCVN domwalk. */
5327 edge_iterator ei;
5328 edge e;
5329 FOR_EACH_EDGE (e, ei, b->preds)
5330 if (e->flags & EDGE_EXECUTABLE)
5331 break;
5332 if (! e)
5333 return NULL;
5335 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
5337 gphi *phi = gsi.phi ();
5338 tree res = PHI_RESULT (phi);
5340 if (virtual_operand_p (res))
5342 gsi_next (&gsi);
5343 continue;
5346 tree sprime = eliminate_avail (res);
5347 if (sprime
5348 && sprime != res)
5350 if (dump_file && (dump_flags & TDF_DETAILS))
5352 fprintf (dump_file, "Replaced redundant PHI node defining ");
5353 print_generic_expr (dump_file, res);
5354 fprintf (dump_file, " with ");
5355 print_generic_expr (dump_file, sprime);
5356 fprintf (dump_file, "\n");
5359 /* If we inserted this PHI node ourself, it's not an elimination. */
5360 if (! inserted_exprs
5361 || ! bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
5362 eliminations++;
5364 /* If we will propagate into all uses don't bother to do
5365 anything. */
5366 if (may_propagate_copy (res, sprime))
5368 /* Mark the PHI for removal. */
5369 to_remove.safe_push (phi);
5370 gsi_next (&gsi);
5371 continue;
5374 remove_phi_node (&gsi, false);
5376 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
5377 sprime = fold_convert (TREE_TYPE (res), sprime);
5378 gimple *stmt = gimple_build_assign (res, sprime);
5379 gimple_stmt_iterator gsi2 = gsi_after_labels (b);
5380 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
5381 continue;
5384 eliminate_push_avail (res);
5385 gsi_next (&gsi);
5388 for (gimple_stmt_iterator gsi = gsi_start_bb (b);
5389 !gsi_end_p (gsi);
5390 gsi_next (&gsi))
5392 tree sprime = NULL_TREE;
5393 gimple *stmt = gsi_stmt (gsi);
5394 tree lhs = gimple_get_lhs (stmt);
5395 if (lhs && TREE_CODE (lhs) == SSA_NAME
5396 && !gimple_has_volatile_ops (stmt)
5397 /* See PR43491. Do not replace a global register variable when
5398 it is a the RHS of an assignment. Do replace local register
5399 variables since gcc does not guarantee a local variable will
5400 be allocated in register.
5401 ??? The fix isn't effective here. This should instead
5402 be ensured by not value-numbering them the same but treating
5403 them like volatiles? */
5404 && !(gimple_assign_single_p (stmt)
5405 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
5406 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))
5407 && is_global_var (gimple_assign_rhs1 (stmt)))))
5409 sprime = eliminate_avail (lhs);
5410 if (!sprime)
5412 /* If there is no existing usable leader but SCCVN thinks
5413 it has an expression it wants to use as replacement,
5414 insert that. */
5415 tree val = VN_INFO (lhs)->valnum;
5416 if (val != VN_TOP
5417 && TREE_CODE (val) == SSA_NAME
5418 && VN_INFO (val)->needs_insertion
5419 && VN_INFO (val)->expr != NULL
5420 && (sprime = eliminate_insert (&gsi, val)) != NULL_TREE)
5421 eliminate_push_avail (sprime);
5424 /* If this now constitutes a copy duplicate points-to
5425 and range info appropriately. This is especially
5426 important for inserted code. See tree-ssa-copy.c
5427 for similar code. */
5428 if (sprime
5429 && TREE_CODE (sprime) == SSA_NAME)
5431 basic_block sprime_b = gimple_bb (SSA_NAME_DEF_STMT (sprime));
5432 if (POINTER_TYPE_P (TREE_TYPE (lhs))
5433 && VN_INFO_PTR_INFO (lhs)
5434 && ! VN_INFO_PTR_INFO (sprime))
5436 duplicate_ssa_name_ptr_info (sprime,
5437 VN_INFO_PTR_INFO (lhs));
5438 if (b != sprime_b)
5439 mark_ptr_info_alignment_unknown
5440 (SSA_NAME_PTR_INFO (sprime));
5442 else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
5443 && VN_INFO_RANGE_INFO (lhs)
5444 && ! VN_INFO_RANGE_INFO (sprime)
5445 && b == sprime_b)
5446 duplicate_ssa_name_range_info (sprime,
5447 VN_INFO_RANGE_TYPE (lhs),
5448 VN_INFO_RANGE_INFO (lhs));
5451 /* Inhibit the use of an inserted PHI on a loop header when
5452 the address of the memory reference is a simple induction
5453 variable. In other cases the vectorizer won't do anything
5454 anyway (either it's loop invariant or a complicated
5455 expression). */
5456 if (sprime
5457 && TREE_CODE (sprime) == SSA_NAME
5458 && do_pre
5459 && (flag_tree_loop_vectorize || flag_tree_parallelize_loops > 1)
5460 && loop_outer (b->loop_father)
5461 && has_zero_uses (sprime)
5462 && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
5463 && gimple_assign_load_p (stmt))
5465 gimple *def_stmt = SSA_NAME_DEF_STMT (sprime);
5466 basic_block def_bb = gimple_bb (def_stmt);
5467 if (gimple_code (def_stmt) == GIMPLE_PHI
5468 && def_bb->loop_father->header == def_bb)
5470 loop_p loop = def_bb->loop_father;
5471 ssa_op_iter iter;
5472 tree op;
5473 bool found = false;
5474 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5476 affine_iv iv;
5477 def_bb = gimple_bb (SSA_NAME_DEF_STMT (op));
5478 if (def_bb
5479 && flow_bb_inside_loop_p (loop, def_bb)
5480 && simple_iv (loop, loop, op, &iv, true))
5482 found = true;
5483 break;
5486 if (found)
5488 if (dump_file && (dump_flags & TDF_DETAILS))
5490 fprintf (dump_file, "Not replacing ");
5491 print_gimple_expr (dump_file, stmt, 0);
5492 fprintf (dump_file, " with ");
5493 print_generic_expr (dump_file, sprime);
5494 fprintf (dump_file, " which would add a loop"
5495 " carried dependence to loop %d\n",
5496 loop->num);
5498 /* Don't keep sprime available. */
5499 sprime = NULL_TREE;
5504 if (sprime)
5506 /* If we can propagate the value computed for LHS into
5507 all uses don't bother doing anything with this stmt. */
5508 if (may_propagate_copy (lhs, sprime))
5510 /* Mark it for removal. */
5511 to_remove.safe_push (stmt);
5513 /* ??? Don't count copy/constant propagations. */
5514 if (gimple_assign_single_p (stmt)
5515 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5516 || gimple_assign_rhs1 (stmt) == sprime))
5517 continue;
5519 if (dump_file && (dump_flags & TDF_DETAILS))
5521 fprintf (dump_file, "Replaced ");
5522 print_gimple_expr (dump_file, stmt, 0);
5523 fprintf (dump_file, " with ");
5524 print_generic_expr (dump_file, sprime);
5525 fprintf (dump_file, " in all uses of ");
5526 print_gimple_stmt (dump_file, stmt, 0);
5529 eliminations++;
5530 continue;
5533 /* If this is an assignment from our leader (which
5534 happens in the case the value-number is a constant)
5535 then there is nothing to do. */
5536 if (gimple_assign_single_p (stmt)
5537 && sprime == gimple_assign_rhs1 (stmt))
5538 continue;
5540 /* Else replace its RHS. */
5541 bool can_make_abnormal_goto
5542 = is_gimple_call (stmt)
5543 && stmt_can_make_abnormal_goto (stmt);
5545 if (dump_file && (dump_flags & TDF_DETAILS))
5547 fprintf (dump_file, "Replaced ");
5548 print_gimple_expr (dump_file, stmt, 0);
5549 fprintf (dump_file, " with ");
5550 print_generic_expr (dump_file, sprime);
5551 fprintf (dump_file, " in ");
5552 print_gimple_stmt (dump_file, stmt, 0);
5555 eliminations++;
5556 gimple *orig_stmt = stmt;
5557 if (!useless_type_conversion_p (TREE_TYPE (lhs),
5558 TREE_TYPE (sprime)))
5559 sprime = fold_convert (TREE_TYPE (lhs), sprime);
5560 tree vdef = gimple_vdef (stmt);
5561 tree vuse = gimple_vuse (stmt);
5562 propagate_tree_value_into_stmt (&gsi, sprime);
5563 stmt = gsi_stmt (gsi);
5564 update_stmt (stmt);
5565 if (vdef != gimple_vdef (stmt))
5566 VN_INFO (vdef)->valnum = vuse;
5568 /* If we removed EH side-effects from the statement, clean
5569 its EH information. */
5570 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
5572 bitmap_set_bit (need_eh_cleanup,
5573 gimple_bb (stmt)->index);
5574 if (dump_file && (dump_flags & TDF_DETAILS))
5575 fprintf (dump_file, " Removed EH side-effects.\n");
5578 /* Likewise for AB side-effects. */
5579 if (can_make_abnormal_goto
5580 && !stmt_can_make_abnormal_goto (stmt))
5582 bitmap_set_bit (need_ab_cleanup,
5583 gimple_bb (stmt)->index);
5584 if (dump_file && (dump_flags & TDF_DETAILS))
5585 fprintf (dump_file, " Removed AB side-effects.\n");
5588 continue;
5592 /* If the statement is a scalar store, see if the expression
5593 has the same value number as its rhs. If so, the store is
5594 dead. */
5595 if (gimple_assign_single_p (stmt)
5596 && !gimple_has_volatile_ops (stmt)
5597 && !is_gimple_reg (gimple_assign_lhs (stmt))
5598 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5599 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
5601 tree val;
5602 tree rhs = gimple_assign_rhs1 (stmt);
5603 vn_reference_t vnresult;
5604 val = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_WALKREWRITE,
5605 &vnresult, false);
5606 if (TREE_CODE (rhs) == SSA_NAME)
5607 rhs = VN_INFO (rhs)->valnum;
5608 if (val
5609 && operand_equal_p (val, rhs, 0))
5611 /* We can only remove the later store if the former aliases
5612 at least all accesses the later one does or if the store
5613 was to readonly memory storing the same value. */
5614 alias_set_type set = get_alias_set (lhs);
5615 if (! vnresult
5616 || vnresult->set == set
5617 || alias_set_subset_of (set, vnresult->set))
5619 if (dump_file && (dump_flags & TDF_DETAILS))
5621 fprintf (dump_file, "Deleted redundant store ");
5622 print_gimple_stmt (dump_file, stmt, 0);
5625 /* Queue stmt for removal. */
5626 to_remove.safe_push (stmt);
5627 continue;
5632 /* If this is a control statement value numbering left edges
5633 unexecuted on force the condition in a way consistent with
5634 that. */
5635 if (gcond *cond = dyn_cast <gcond *> (stmt))
5637 if ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE)
5638 ^ (EDGE_SUCC (b, 1)->flags & EDGE_EXECUTABLE))
5640 if (dump_file && (dump_flags & TDF_DETAILS))
5642 fprintf (dump_file, "Removing unexecutable edge from ");
5643 print_gimple_stmt (dump_file, stmt, 0);
5645 if (((EDGE_SUCC (b, 0)->flags & EDGE_TRUE_VALUE) != 0)
5646 == ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE) != 0))
5647 gimple_cond_make_true (cond);
5648 else
5649 gimple_cond_make_false (cond);
5650 update_stmt (cond);
5651 el_todo |= TODO_cleanup_cfg;
5652 continue;
5656 bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
5657 bool was_noreturn = (is_gimple_call (stmt)
5658 && gimple_call_noreturn_p (stmt));
5659 tree vdef = gimple_vdef (stmt);
5660 tree vuse = gimple_vuse (stmt);
5662 /* If we didn't replace the whole stmt (or propagate the result
5663 into all uses), replace all uses on this stmt with their
5664 leaders. */
5665 bool modified = false;
5666 use_operand_p use_p;
5667 ssa_op_iter iter;
5668 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5670 tree use = USE_FROM_PTR (use_p);
5671 /* ??? The call code above leaves stmt operands un-updated. */
5672 if (TREE_CODE (use) != SSA_NAME)
5673 continue;
5674 tree sprime = eliminate_avail (use);
5675 if (sprime && sprime != use
5676 && may_propagate_copy (use, sprime)
5677 /* We substitute into debug stmts to avoid excessive
5678 debug temporaries created by removed stmts, but we need
5679 to avoid doing so for inserted sprimes as we never want
5680 to create debug temporaries for them. */
5681 && (!inserted_exprs
5682 || TREE_CODE (sprime) != SSA_NAME
5683 || !is_gimple_debug (stmt)
5684 || !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
5686 propagate_value (use_p, sprime);
5687 modified = true;
5691 /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
5692 into which is a requirement for the IPA devirt machinery. */
5693 gimple *old_stmt = stmt;
5694 if (modified)
5696 /* If a formerly non-invariant ADDR_EXPR is turned into an
5697 invariant one it was on a separate stmt. */
5698 if (gimple_assign_single_p (stmt)
5699 && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
5700 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
5701 gimple_stmt_iterator prev = gsi;
5702 gsi_prev (&prev);
5703 if (fold_stmt (&gsi))
5705 /* fold_stmt may have created new stmts inbetween
5706 the previous stmt and the folded stmt. Mark
5707 all defs created there as varying to not confuse
5708 the SCCVN machinery as we're using that even during
5709 elimination. */
5710 if (gsi_end_p (prev))
5711 prev = gsi_start_bb (b);
5712 else
5713 gsi_next (&prev);
5714 if (gsi_stmt (prev) != gsi_stmt (gsi))
5717 tree def;
5718 ssa_op_iter dit;
5719 FOR_EACH_SSA_TREE_OPERAND (def, gsi_stmt (prev),
5720 dit, SSA_OP_ALL_DEFS)
5721 /* As existing DEFs may move between stmts
5722 we have to guard VN_INFO_GET. */
5723 if (! has_VN_INFO (def))
5724 VN_INFO_GET (def)->valnum = def;
5725 if (gsi_stmt (prev) == gsi_stmt (gsi))
5726 break;
5727 gsi_next (&prev);
5729 while (1);
5731 stmt = gsi_stmt (gsi);
5732 /* In case we folded the stmt away schedule the NOP for removal. */
5733 if (gimple_nop_p (stmt))
5734 to_remove.safe_push (stmt);
5737 /* Visit indirect calls and turn them into direct calls if
5738 possible using the devirtualization machinery. Do this before
5739 checking for required EH/abnormal/noreturn cleanup as devird
5740 may expose more of those. */
5741 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
5743 tree fn = gimple_call_fn (call_stmt);
5744 if (fn
5745 && flag_devirtualize
5746 && virtual_method_call_p (fn))
5748 tree otr_type = obj_type_ref_class (fn);
5749 unsigned HOST_WIDE_INT otr_tok
5750 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn));
5751 tree instance;
5752 ipa_polymorphic_call_context context (current_function_decl,
5753 fn, stmt, &instance);
5754 context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn),
5755 otr_type, stmt);
5756 bool final;
5757 vec <cgraph_node *> targets
5758 = possible_polymorphic_call_targets (obj_type_ref_class (fn),
5759 otr_tok, context, &final);
5760 if (dump_file)
5761 dump_possible_polymorphic_call_targets (dump_file,
5762 obj_type_ref_class (fn),
5763 otr_tok, context);
5764 if (final && targets.length () <= 1 && dbg_cnt (devirt))
5766 tree fn;
5767 if (targets.length () == 1)
5768 fn = targets[0]->decl;
5769 else
5770 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5771 if (dump_enabled_p ())
5773 location_t loc = gimple_location (stmt);
5774 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
5775 "converting indirect call to "
5776 "function %s\n",
5777 lang_hooks.decl_printable_name (fn, 2));
5779 gimple_call_set_fndecl (call_stmt, fn);
5780 /* If changing the call to __builtin_unreachable
5781 or similar noreturn function, adjust gimple_call_fntype
5782 too. */
5783 if (gimple_call_noreturn_p (call_stmt)
5784 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
5785 && TYPE_ARG_TYPES (TREE_TYPE (fn))
5786 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn)))
5787 == void_type_node))
5788 gimple_call_set_fntype (call_stmt, TREE_TYPE (fn));
5789 maybe_remove_unused_call_args (cfun, call_stmt);
5790 modified = true;
5795 if (modified)
5797 /* When changing a call into a noreturn call, cfg cleanup
5798 is needed to fix up the noreturn call. */
5799 if (!was_noreturn
5800 && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
5801 to_fixup.safe_push (stmt);
5802 /* When changing a condition or switch into one we know what
5803 edge will be executed, schedule a cfg cleanup. */
5804 if ((gimple_code (stmt) == GIMPLE_COND
5805 && (gimple_cond_true_p (as_a <gcond *> (stmt))
5806 || gimple_cond_false_p (as_a <gcond *> (stmt))))
5807 || (gimple_code (stmt) == GIMPLE_SWITCH
5808 && TREE_CODE (gimple_switch_index
5809 (as_a <gswitch *> (stmt))) == INTEGER_CST))
5810 el_todo |= TODO_cleanup_cfg;
5811 /* If we removed EH side-effects from the statement, clean
5812 its EH information. */
5813 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
5815 bitmap_set_bit (need_eh_cleanup,
5816 gimple_bb (stmt)->index);
5817 if (dump_file && (dump_flags & TDF_DETAILS))
5818 fprintf (dump_file, " Removed EH side-effects.\n");
5820 /* Likewise for AB side-effects. */
5821 if (can_make_abnormal_goto
5822 && !stmt_can_make_abnormal_goto (stmt))
5824 bitmap_set_bit (need_ab_cleanup,
5825 gimple_bb (stmt)->index);
5826 if (dump_file && (dump_flags & TDF_DETAILS))
5827 fprintf (dump_file, " Removed AB side-effects.\n");
5829 update_stmt (stmt);
5830 if (vdef != gimple_vdef (stmt))
5831 VN_INFO (vdef)->valnum = vuse;
5834 /* Make new values available - for fully redundant LHS we
5835 continue with the next stmt above and skip this. */
5836 def_operand_p defp;
5837 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
5838 eliminate_push_avail (DEF_FROM_PTR (defp));
5841 /* Replace destination PHI arguments. */
5842 FOR_EACH_EDGE (e, ei, b->succs)
5843 if (e->flags & EDGE_EXECUTABLE)
5844 for (gphi_iterator gsi = gsi_start_phis (e->dest);
5845 !gsi_end_p (gsi);
5846 gsi_next (&gsi))
5848 gphi *phi = gsi.phi ();
5849 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
5850 tree arg = USE_FROM_PTR (use_p);
5851 if (TREE_CODE (arg) != SSA_NAME
5852 || virtual_operand_p (arg))
5853 continue;
5854 tree sprime = eliminate_avail (arg);
5855 if (sprime && may_propagate_copy (arg, sprime))
5856 propagate_value (use_p, sprime);
5858 return NULL;
5861 /* Make no longer available leaders no longer available. */
5863 void
5864 eliminate_dom_walker::after_dom_children (basic_block)
5866 tree entry;
5867 while ((entry = avail_stack.pop ()) != NULL_TREE)
5869 tree valnum = VN_INFO (entry)->valnum;
5870 tree old = avail[SSA_NAME_VERSION (valnum)];
5871 if (old == entry)
5872 avail[SSA_NAME_VERSION (valnum)] = NULL_TREE;
5873 else
5874 avail[SSA_NAME_VERSION (valnum)] = entry;
5878 /* Eliminate fully redundant computations. */
5880 unsigned int
5881 vn_eliminate (bitmap inserted_exprs)
5883 eliminate_dom_walker el (CDI_DOMINATORS, inserted_exprs);
5884 el.avail.reserve (num_ssa_names);
5886 el.walk (cfun->cfg->x_entry_block_ptr);
5888 /* We cannot remove stmts during BB walk, especially not release SSA
5889 names there as this confuses the VN machinery. The stmts ending
5890 up in to_remove are either stores or simple copies.
5891 Remove stmts in reverse order to make debug stmt creation possible. */
5892 while (!el.to_remove.is_empty ())
5894 gimple *stmt = el.to_remove.pop ();
5896 if (dump_file && (dump_flags & TDF_DETAILS))
5898 fprintf (dump_file, "Removing dead stmt ");
5899 print_gimple_stmt (dump_file, stmt, 0, 0);
5902 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5903 if (gimple_code (stmt) == GIMPLE_PHI)
5904 remove_phi_node (&gsi, true);
5905 else
5907 basic_block bb = gimple_bb (stmt);
5908 unlink_stmt_vdef (stmt);
5909 if (gsi_remove (&gsi, true))
5910 bitmap_set_bit (el.need_eh_cleanup, bb->index);
5911 if (is_gimple_call (stmt) && stmt_can_make_abnormal_goto (stmt))
5912 bitmap_set_bit (el.need_ab_cleanup, bb->index);
5913 release_defs (stmt);
5916 /* Removing a stmt may expose a forwarder block. */
5917 el.el_todo |= TODO_cleanup_cfg;
5920 /* Fixup stmts that became noreturn calls. This may require splitting
5921 blocks and thus isn't possible during the dominator walk. Do this
5922 in reverse order so we don't inadvertedly remove a stmt we want to
5923 fixup by visiting a dominating now noreturn call first. */
5924 while (!el.to_fixup.is_empty ())
5926 gimple *stmt = el.to_fixup.pop ();
5928 if (dump_file && (dump_flags & TDF_DETAILS))
5930 fprintf (dump_file, "Fixing up noreturn call ");
5931 print_gimple_stmt (dump_file, stmt, 0);
5934 if (fixup_noreturn_call (stmt))
5935 el.el_todo |= TODO_cleanup_cfg;
5938 bool do_eh_cleanup = !bitmap_empty_p (el.need_eh_cleanup);
5939 bool do_ab_cleanup = !bitmap_empty_p (el.need_ab_cleanup);
5941 if (do_eh_cleanup)
5942 gimple_purge_all_dead_eh_edges (el.need_eh_cleanup);
5944 if (do_ab_cleanup)
5945 gimple_purge_all_dead_abnormal_call_edges (el.need_ab_cleanup);
5947 if (do_eh_cleanup || do_ab_cleanup)
5948 el.el_todo |= TODO_cleanup_cfg;
5950 statistics_counter_event (cfun, "Eliminated", el.eliminations);
5951 statistics_counter_event (cfun, "Insertions", el.insertions);
5953 return el.el_todo;
5957 namespace {
5959 const pass_data pass_data_fre =
5961 GIMPLE_PASS, /* type */
5962 "fre", /* name */
5963 OPTGROUP_NONE, /* optinfo_flags */
5964 TV_TREE_FRE, /* tv_id */
5965 ( PROP_cfg | PROP_ssa ), /* properties_required */
5966 0, /* properties_provided */
5967 0, /* properties_destroyed */
5968 0, /* todo_flags_start */
5969 0, /* todo_flags_finish */
5972 class pass_fre : public gimple_opt_pass
5974 public:
5975 pass_fre (gcc::context *ctxt)
5976 : gimple_opt_pass (pass_data_fre, ctxt)
5979 /* opt_pass methods: */
5980 opt_pass * clone () { return new pass_fre (m_ctxt); }
5981 virtual bool gate (function *) { return flag_tree_fre != 0; }
5982 virtual unsigned int execute (function *);
5984 }; // class pass_fre
5986 unsigned int
5987 pass_fre::execute (function *)
5989 unsigned int todo = 0;
5991 run_scc_vn (VN_WALKREWRITE);
5993 /* Remove all the redundant expressions. */
5994 todo |= vn_eliminate (NULL);
5996 scc_vn_restore_ssa_info ();
5997 free_scc_vn ();
5999 return todo;
6002 } // anon namespace
6004 gimple_opt_pass *
6005 make_pass_fre (gcc::context *ctxt)
6007 return new pass_fre (ctxt);