pa.c (pa_som_asm_init_sections): Fix comment.
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blobb6f23f8af8c46226271e05aafa204f263a1d235a
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2017 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "insn-config.h"
32 #include "memmodel.h"
33 #include "emit-rtl.h"
34 #include "cgraph.h"
35 #include "gimple-pretty-print.h"
36 #include "alias.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "cfganal.h"
40 #include "tree-inline.h"
41 #include "internal-fn.h"
42 #include "gimple-fold.h"
43 #include "tree-eh.h"
44 #include "gimplify.h"
45 #include "flags.h"
46 #include "dojump.h"
47 #include "explow.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "stmt.h"
51 #include "expr.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "dumpfile.h"
55 #include "cfgloop.h"
56 #include "params.h"
57 #include "tree-ssa-propagate.h"
58 #include "tree-cfg.h"
59 #include "domwalk.h"
60 #include "gimple-iterator.h"
61 #include "gimple-match.h"
62 #include "stringpool.h"
63 #include "attribs.h"
64 #include "tree-pass.h"
65 #include "statistics.h"
66 #include "langhooks.h"
67 #include "ipa-utils.h"
68 #include "dbgcnt.h"
69 #include "tree-cfgcleanup.h"
70 #include "tree-ssa-loop.h"
71 #include "tree-scalar-evolution.h"
72 #include "tree-ssa-sccvn.h"
74 /* This algorithm is based on the SCC algorithm presented by Keith
75 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
76 (http://citeseer.ist.psu.edu/41805.html). In
77 straight line code, it is equivalent to a regular hash based value
78 numbering that is performed in reverse postorder.
80 For code with cycles, there are two alternatives, both of which
81 require keeping the hashtables separate from the actual list of
82 value numbers for SSA names.
84 1. Iterate value numbering in an RPO walk of the blocks, removing
85 all the entries from the hashtable after each iteration (but
86 keeping the SSA name->value number mapping between iterations).
87 Iterate until it does not change.
89 2. Perform value numbering as part of an SCC walk on the SSA graph,
90 iterating only the cycles in the SSA graph until they do not change
91 (using a separate, optimistic hashtable for value numbering the SCC
92 operands).
94 The second is not just faster in practice (because most SSA graph
95 cycles do not involve all the variables in the graph), it also has
96 some nice properties.
98 One of these nice properties is that when we pop an SCC off the
99 stack, we are guaranteed to have processed all the operands coming from
100 *outside of that SCC*, so we do not need to do anything special to
101 ensure they have value numbers.
103 Another nice property is that the SCC walk is done as part of a DFS
104 of the SSA graph, which makes it easy to perform combining and
105 simplifying operations at the same time.
107 The code below is deliberately written in a way that makes it easy
108 to separate the SCC walk from the other work it does.
110 In order to propagate constants through the code, we track which
111 expressions contain constants, and use those while folding. In
112 theory, we could also track expressions whose value numbers are
113 replaced, in case we end up folding based on expression
114 identities.
116 In order to value number memory, we assign value numbers to vuses.
117 This enables us to note that, for example, stores to the same
118 address of the same value from the same starting memory states are
119 equivalent.
120 TODO:
122 1. We can iterate only the changing portions of the SCC's, but
123 I have not seen an SCC big enough for this to be a win.
124 2. If you differentiate between phi nodes for loops and phi nodes
125 for if-then-else, you can properly consider phi nodes in different
126 blocks for equivalence.
127 3. We could value number vuses in more cases, particularly, whole
128 structure copies.
132 static tree *last_vuse_ptr;
133 static vn_lookup_kind vn_walk_kind;
134 static vn_lookup_kind default_vn_walk_kind;
135 bitmap const_parms;
137 /* vn_nary_op hashtable helpers. */
139 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
141 typedef vn_nary_op_s *compare_type;
142 static inline hashval_t hash (const vn_nary_op_s *);
143 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
146 /* Return the computed hashcode for nary operation P1. */
148 inline hashval_t
149 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
151 return vno1->hashcode;
154 /* Compare nary operations P1 and P2 and return true if they are
155 equivalent. */
157 inline bool
158 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
160 return vn_nary_op_eq (vno1, vno2);
163 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
164 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
167 /* vn_phi hashtable helpers. */
169 static int
170 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
172 struct vn_phi_hasher : pointer_hash <vn_phi_s>
174 static inline hashval_t hash (const vn_phi_s *);
175 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
176 static inline void remove (vn_phi_s *);
179 /* Return the computed hashcode for phi operation P1. */
181 inline hashval_t
182 vn_phi_hasher::hash (const vn_phi_s *vp1)
184 return vp1->hashcode;
187 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
189 inline bool
190 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
192 return vn_phi_eq (vp1, vp2);
195 /* Free a phi operation structure VP. */
197 inline void
198 vn_phi_hasher::remove (vn_phi_s *phi)
200 phi->phiargs.release ();
203 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
204 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
207 /* Compare two reference operands P1 and P2 for equality. Return true if
208 they are equal, and false otherwise. */
210 static int
211 vn_reference_op_eq (const void *p1, const void *p2)
213 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
214 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
216 return (vro1->opcode == vro2->opcode
217 /* We do not care for differences in type qualification. */
218 && (vro1->type == vro2->type
219 || (vro1->type && vro2->type
220 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
221 TYPE_MAIN_VARIANT (vro2->type))))
222 && expressions_equal_p (vro1->op0, vro2->op0)
223 && expressions_equal_p (vro1->op1, vro2->op1)
224 && expressions_equal_p (vro1->op2, vro2->op2));
227 /* Free a reference operation structure VP. */
229 static inline void
230 free_reference (vn_reference_s *vr)
232 vr->operands.release ();
236 /* vn_reference hashtable helpers. */
238 struct vn_reference_hasher : pointer_hash <vn_reference_s>
240 static inline hashval_t hash (const vn_reference_s *);
241 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
242 static inline void remove (vn_reference_s *);
245 /* Return the hashcode for a given reference operation P1. */
247 inline hashval_t
248 vn_reference_hasher::hash (const vn_reference_s *vr1)
250 return vr1->hashcode;
253 inline bool
254 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
256 return vn_reference_eq (v, c);
259 inline void
260 vn_reference_hasher::remove (vn_reference_s *v)
262 free_reference (v);
265 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
266 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
269 /* The set of hashtables and alloc_pool's for their items. */
271 typedef struct vn_tables_s
273 vn_nary_op_table_type *nary;
274 vn_phi_table_type *phis;
275 vn_reference_table_type *references;
276 struct obstack nary_obstack;
277 object_allocator<vn_phi_s> *phis_pool;
278 object_allocator<vn_reference_s> *references_pool;
279 } *vn_tables_t;
282 /* vn_constant hashtable helpers. */
284 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
286 static inline hashval_t hash (const vn_constant_s *);
287 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
290 /* Hash table hash function for vn_constant_t. */
292 inline hashval_t
293 vn_constant_hasher::hash (const vn_constant_s *vc1)
295 return vc1->hashcode;
298 /* Hash table equality function for vn_constant_t. */
300 inline bool
301 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
303 if (vc1->hashcode != vc2->hashcode)
304 return false;
306 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
309 static hash_table<vn_constant_hasher> *constant_to_value_id;
310 static bitmap constant_value_ids;
313 /* Valid hashtables storing information we have proven to be
314 correct. */
316 static vn_tables_t valid_info;
318 /* Optimistic hashtables storing information we are making assumptions about
319 during iterations. */
321 static vn_tables_t optimistic_info;
323 /* Pointer to the set of hashtables that is currently being used.
324 Should always point to either the optimistic_info, or the
325 valid_info. */
327 static vn_tables_t current_info;
330 /* Reverse post order index for each basic block. */
332 static int *rpo_numbers;
334 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
336 /* Return the SSA value of the VUSE x, supporting released VDEFs
337 during elimination which will value-number the VDEF to the
338 associated VUSE (but not substitute in the whole lattice). */
340 static inline tree
341 vuse_ssa_val (tree x)
343 if (!x)
344 return NULL_TREE;
348 tree tem = SSA_VAL (x);
349 /* stmt walking can walk over a backedge and reach code we didn't
350 value-number yet. */
351 if (tem == VN_TOP)
352 return x;
353 x = tem;
355 while (SSA_NAME_IN_FREE_LIST (x));
357 return x;
360 /* This represents the top of the VN lattice, which is the universal
361 value. */
363 tree VN_TOP;
365 /* Unique counter for our value ids. */
367 static unsigned int next_value_id;
369 /* Next DFS number and the stack for strongly connected component
370 detection. */
372 static unsigned int next_dfs_num;
373 static vec<tree> sccstack;
377 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
378 are allocated on an obstack for locality reasons, and to free them
379 without looping over the vec. */
381 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
382 static struct obstack vn_ssa_aux_obstack;
384 /* Return whether there is value numbering information for a given SSA name. */
386 bool
387 has_VN_INFO (tree name)
389 if (SSA_NAME_VERSION (name) < vn_ssa_aux_table.length ())
390 return vn_ssa_aux_table[SSA_NAME_VERSION (name)] != NULL;
391 return false;
394 /* Return the value numbering information for a given SSA name. */
396 vn_ssa_aux_t
397 VN_INFO (tree name)
399 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
400 gcc_checking_assert (res);
401 return res;
404 /* Set the value numbering info for a given SSA name to a given
405 value. */
407 static inline void
408 VN_INFO_SET (tree name, vn_ssa_aux_t value)
410 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
413 /* Initialize the value numbering info for a given SSA name.
414 This should be called just once for every SSA name. */
416 vn_ssa_aux_t
417 VN_INFO_GET (tree name)
419 vn_ssa_aux_t newinfo;
421 gcc_assert (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ()
422 || vn_ssa_aux_table[SSA_NAME_VERSION (name)] == NULL);
423 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
424 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
425 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
426 vn_ssa_aux_table.safe_grow_cleared (SSA_NAME_VERSION (name) + 1);
427 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
428 return newinfo;
432 /* Return the vn_kind the expression computed by the stmt should be
433 associated with. */
435 enum vn_kind
436 vn_get_stmt_kind (gimple *stmt)
438 switch (gimple_code (stmt))
440 case GIMPLE_CALL:
441 return VN_REFERENCE;
442 case GIMPLE_PHI:
443 return VN_PHI;
444 case GIMPLE_ASSIGN:
446 enum tree_code code = gimple_assign_rhs_code (stmt);
447 tree rhs1 = gimple_assign_rhs1 (stmt);
448 switch (get_gimple_rhs_class (code))
450 case GIMPLE_UNARY_RHS:
451 case GIMPLE_BINARY_RHS:
452 case GIMPLE_TERNARY_RHS:
453 return VN_NARY;
454 case GIMPLE_SINGLE_RHS:
455 switch (TREE_CODE_CLASS (code))
457 case tcc_reference:
458 /* VOP-less references can go through unary case. */
459 if ((code == REALPART_EXPR
460 || code == IMAGPART_EXPR
461 || code == VIEW_CONVERT_EXPR
462 || code == BIT_FIELD_REF)
463 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
464 return VN_NARY;
466 /* Fallthrough. */
467 case tcc_declaration:
468 return VN_REFERENCE;
470 case tcc_constant:
471 return VN_CONSTANT;
473 default:
474 if (code == ADDR_EXPR)
475 return (is_gimple_min_invariant (rhs1)
476 ? VN_CONSTANT : VN_REFERENCE);
477 else if (code == CONSTRUCTOR)
478 return VN_NARY;
479 return VN_NONE;
481 default:
482 return VN_NONE;
485 default:
486 return VN_NONE;
490 /* Lookup a value id for CONSTANT and return it. If it does not
491 exist returns 0. */
493 unsigned int
494 get_constant_value_id (tree constant)
496 vn_constant_s **slot;
497 struct vn_constant_s vc;
499 vc.hashcode = vn_hash_constant_with_type (constant);
500 vc.constant = constant;
501 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
502 if (slot)
503 return (*slot)->value_id;
504 return 0;
507 /* Lookup a value id for CONSTANT, and if it does not exist, create a
508 new one and return it. If it does exist, return it. */
510 unsigned int
511 get_or_alloc_constant_value_id (tree constant)
513 vn_constant_s **slot;
514 struct vn_constant_s vc;
515 vn_constant_t vcp;
517 vc.hashcode = vn_hash_constant_with_type (constant);
518 vc.constant = constant;
519 slot = constant_to_value_id->find_slot (&vc, INSERT);
520 if (*slot)
521 return (*slot)->value_id;
523 vcp = XNEW (struct vn_constant_s);
524 vcp->hashcode = vc.hashcode;
525 vcp->constant = constant;
526 vcp->value_id = get_next_value_id ();
527 *slot = vcp;
528 bitmap_set_bit (constant_value_ids, vcp->value_id);
529 return vcp->value_id;
532 /* Return true if V is a value id for a constant. */
534 bool
535 value_id_constant_p (unsigned int v)
537 return bitmap_bit_p (constant_value_ids, v);
540 /* Compute the hash for a reference operand VRO1. */
542 static void
543 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
545 hstate.add_int (vro1->opcode);
546 if (vro1->op0)
547 inchash::add_expr (vro1->op0, hstate);
548 if (vro1->op1)
549 inchash::add_expr (vro1->op1, hstate);
550 if (vro1->op2)
551 inchash::add_expr (vro1->op2, hstate);
554 /* Compute a hash for the reference operation VR1 and return it. */
556 static hashval_t
557 vn_reference_compute_hash (const vn_reference_t vr1)
559 inchash::hash hstate;
560 hashval_t result;
561 int i;
562 vn_reference_op_t vro;
563 HOST_WIDE_INT off = -1;
564 bool deref = false;
566 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
568 if (vro->opcode == MEM_REF)
569 deref = true;
570 else if (vro->opcode != ADDR_EXPR)
571 deref = false;
572 if (vro->off != -1)
574 if (off == -1)
575 off = 0;
576 off += vro->off;
578 else
580 if (off != -1
581 && off != 0)
582 hstate.add_int (off);
583 off = -1;
584 if (deref
585 && vro->opcode == ADDR_EXPR)
587 if (vro->op0)
589 tree op = TREE_OPERAND (vro->op0, 0);
590 hstate.add_int (TREE_CODE (op));
591 inchash::add_expr (op, hstate);
594 else
595 vn_reference_op_compute_hash (vro, hstate);
598 result = hstate.end ();
599 /* ??? We would ICE later if we hash instead of adding that in. */
600 if (vr1->vuse)
601 result += SSA_NAME_VERSION (vr1->vuse);
603 return result;
606 /* Return true if reference operations VR1 and VR2 are equivalent. This
607 means they have the same set of operands and vuses. */
609 bool
610 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
612 unsigned i, j;
614 /* Early out if this is not a hash collision. */
615 if (vr1->hashcode != vr2->hashcode)
616 return false;
618 /* The VOP needs to be the same. */
619 if (vr1->vuse != vr2->vuse)
620 return false;
622 /* If the operands are the same we are done. */
623 if (vr1->operands == vr2->operands)
624 return true;
626 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
627 return false;
629 if (INTEGRAL_TYPE_P (vr1->type)
630 && INTEGRAL_TYPE_P (vr2->type))
632 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
633 return false;
635 else if (INTEGRAL_TYPE_P (vr1->type)
636 && (TYPE_PRECISION (vr1->type)
637 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
638 return false;
639 else if (INTEGRAL_TYPE_P (vr2->type)
640 && (TYPE_PRECISION (vr2->type)
641 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
642 return false;
644 i = 0;
645 j = 0;
648 HOST_WIDE_INT off1 = 0, off2 = 0;
649 vn_reference_op_t vro1, vro2;
650 vn_reference_op_s tem1, tem2;
651 bool deref1 = false, deref2 = false;
652 for (; vr1->operands.iterate (i, &vro1); i++)
654 if (vro1->opcode == MEM_REF)
655 deref1 = true;
656 /* Do not look through a storage order barrier. */
657 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
658 return false;
659 if (vro1->off == -1)
660 break;
661 off1 += vro1->off;
663 for (; vr2->operands.iterate (j, &vro2); j++)
665 if (vro2->opcode == MEM_REF)
666 deref2 = true;
667 /* Do not look through a storage order barrier. */
668 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
669 return false;
670 if (vro2->off == -1)
671 break;
672 off2 += vro2->off;
674 if (off1 != off2)
675 return false;
676 if (deref1 && vro1->opcode == ADDR_EXPR)
678 memset (&tem1, 0, sizeof (tem1));
679 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
680 tem1.type = TREE_TYPE (tem1.op0);
681 tem1.opcode = TREE_CODE (tem1.op0);
682 vro1 = &tem1;
683 deref1 = false;
685 if (deref2 && vro2->opcode == ADDR_EXPR)
687 memset (&tem2, 0, sizeof (tem2));
688 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
689 tem2.type = TREE_TYPE (tem2.op0);
690 tem2.opcode = TREE_CODE (tem2.op0);
691 vro2 = &tem2;
692 deref2 = false;
694 if (deref1 != deref2)
695 return false;
696 if (!vn_reference_op_eq (vro1, vro2))
697 return false;
698 ++j;
699 ++i;
701 while (vr1->operands.length () != i
702 || vr2->operands.length () != j);
704 return true;
707 /* Copy the operations present in load/store REF into RESULT, a vector of
708 vn_reference_op_s's. */
710 static void
711 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
713 if (TREE_CODE (ref) == TARGET_MEM_REF)
715 vn_reference_op_s temp;
717 result->reserve (3);
719 memset (&temp, 0, sizeof (temp));
720 temp.type = TREE_TYPE (ref);
721 temp.opcode = TREE_CODE (ref);
722 temp.op0 = TMR_INDEX (ref);
723 temp.op1 = TMR_STEP (ref);
724 temp.op2 = TMR_OFFSET (ref);
725 temp.off = -1;
726 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
727 temp.base = MR_DEPENDENCE_BASE (ref);
728 result->quick_push (temp);
730 memset (&temp, 0, sizeof (temp));
731 temp.type = NULL_TREE;
732 temp.opcode = ERROR_MARK;
733 temp.op0 = TMR_INDEX2 (ref);
734 temp.off = -1;
735 result->quick_push (temp);
737 memset (&temp, 0, sizeof (temp));
738 temp.type = NULL_TREE;
739 temp.opcode = TREE_CODE (TMR_BASE (ref));
740 temp.op0 = TMR_BASE (ref);
741 temp.off = -1;
742 result->quick_push (temp);
743 return;
746 /* For non-calls, store the information that makes up the address. */
747 tree orig = ref;
748 while (ref)
750 vn_reference_op_s temp;
752 memset (&temp, 0, sizeof (temp));
753 temp.type = TREE_TYPE (ref);
754 temp.opcode = TREE_CODE (ref);
755 temp.off = -1;
757 switch (temp.opcode)
759 case MODIFY_EXPR:
760 temp.op0 = TREE_OPERAND (ref, 1);
761 break;
762 case WITH_SIZE_EXPR:
763 temp.op0 = TREE_OPERAND (ref, 1);
764 temp.off = 0;
765 break;
766 case MEM_REF:
767 /* The base address gets its own vn_reference_op_s structure. */
768 temp.op0 = TREE_OPERAND (ref, 1);
770 offset_int off = mem_ref_offset (ref);
771 if (wi::fits_shwi_p (off))
772 temp.off = off.to_shwi ();
774 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
775 temp.base = MR_DEPENDENCE_BASE (ref);
776 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
777 break;
778 case BIT_FIELD_REF:
779 /* Record bits, position and storage order. */
780 temp.op0 = TREE_OPERAND (ref, 1);
781 temp.op1 = TREE_OPERAND (ref, 2);
782 if (tree_fits_shwi_p (TREE_OPERAND (ref, 2)))
784 HOST_WIDE_INT off = tree_to_shwi (TREE_OPERAND (ref, 2));
785 if (off % BITS_PER_UNIT == 0)
786 temp.off = off / BITS_PER_UNIT;
788 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
789 break;
790 case COMPONENT_REF:
791 /* The field decl is enough to unambiguously specify the field,
792 a matching type is not necessary and a mismatching type
793 is always a spurious difference. */
794 temp.type = NULL_TREE;
795 temp.op0 = TREE_OPERAND (ref, 1);
796 temp.op1 = TREE_OPERAND (ref, 2);
798 tree this_offset = component_ref_field_offset (ref);
799 if (this_offset
800 && TREE_CODE (this_offset) == INTEGER_CST)
802 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
803 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
805 offset_int off
806 = (wi::to_offset (this_offset)
807 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
808 if (wi::fits_shwi_p (off)
809 /* Probibit value-numbering zero offset components
810 of addresses the same before the pass folding
811 __builtin_object_size had a chance to run
812 (checking cfun->after_inlining does the
813 trick here). */
814 && (TREE_CODE (orig) != ADDR_EXPR
815 || off != 0
816 || cfun->after_inlining))
817 temp.off = off.to_shwi ();
821 break;
822 case ARRAY_RANGE_REF:
823 case ARRAY_REF:
825 tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
826 /* Record index as operand. */
827 temp.op0 = TREE_OPERAND (ref, 1);
828 /* Always record lower bounds and element size. */
829 temp.op1 = array_ref_low_bound (ref);
830 /* But record element size in units of the type alignment. */
831 temp.op2 = TREE_OPERAND (ref, 3);
832 temp.align = eltype->type_common.align;
833 if (! temp.op2)
834 temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
835 size_int (TYPE_ALIGN_UNIT (eltype)));
836 if (TREE_CODE (temp.op0) == INTEGER_CST
837 && TREE_CODE (temp.op1) == INTEGER_CST
838 && TREE_CODE (temp.op2) == INTEGER_CST)
840 offset_int off = ((wi::to_offset (temp.op0)
841 - wi::to_offset (temp.op1))
842 * wi::to_offset (temp.op2)
843 * vn_ref_op_align_unit (&temp));
844 if (wi::fits_shwi_p (off))
845 temp.off = off.to_shwi();
848 break;
849 case VAR_DECL:
850 if (DECL_HARD_REGISTER (ref))
852 temp.op0 = ref;
853 break;
855 /* Fallthru. */
856 case PARM_DECL:
857 case CONST_DECL:
858 case RESULT_DECL:
859 /* Canonicalize decls to MEM[&decl] which is what we end up with
860 when valueizing MEM[ptr] with ptr = &decl. */
861 temp.opcode = MEM_REF;
862 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
863 temp.off = 0;
864 result->safe_push (temp);
865 temp.opcode = ADDR_EXPR;
866 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
867 temp.type = TREE_TYPE (temp.op0);
868 temp.off = -1;
869 break;
870 case STRING_CST:
871 case INTEGER_CST:
872 case COMPLEX_CST:
873 case VECTOR_CST:
874 case REAL_CST:
875 case FIXED_CST:
876 case CONSTRUCTOR:
877 case SSA_NAME:
878 temp.op0 = ref;
879 break;
880 case ADDR_EXPR:
881 if (is_gimple_min_invariant (ref))
883 temp.op0 = ref;
884 break;
886 break;
887 /* These are only interesting for their operands, their
888 existence, and their type. They will never be the last
889 ref in the chain of references (IE they require an
890 operand), so we don't have to put anything
891 for op* as it will be handled by the iteration */
892 case REALPART_EXPR:
893 temp.off = 0;
894 break;
895 case VIEW_CONVERT_EXPR:
896 temp.off = 0;
897 temp.reverse = storage_order_barrier_p (ref);
898 break;
899 case IMAGPART_EXPR:
900 /* This is only interesting for its constant offset. */
901 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
902 break;
903 default:
904 gcc_unreachable ();
906 result->safe_push (temp);
908 if (REFERENCE_CLASS_P (ref)
909 || TREE_CODE (ref) == MODIFY_EXPR
910 || TREE_CODE (ref) == WITH_SIZE_EXPR
911 || (TREE_CODE (ref) == ADDR_EXPR
912 && !is_gimple_min_invariant (ref)))
913 ref = TREE_OPERAND (ref, 0);
914 else
915 ref = NULL_TREE;
919 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
920 operands in *OPS, the reference alias set SET and the reference type TYPE.
921 Return true if something useful was produced. */
923 bool
924 ao_ref_init_from_vn_reference (ao_ref *ref,
925 alias_set_type set, tree type,
926 vec<vn_reference_op_s> ops)
928 vn_reference_op_t op;
929 unsigned i;
930 tree base = NULL_TREE;
931 tree *op0_p = &base;
932 offset_int offset = 0;
933 offset_int max_size;
934 offset_int size = -1;
935 tree size_tree = NULL_TREE;
936 alias_set_type base_alias_set = -1;
938 /* First get the final access size from just the outermost expression. */
939 op = &ops[0];
940 if (op->opcode == COMPONENT_REF)
941 size_tree = DECL_SIZE (op->op0);
942 else if (op->opcode == BIT_FIELD_REF)
943 size_tree = op->op0;
944 else
946 machine_mode mode = TYPE_MODE (type);
947 if (mode == BLKmode)
948 size_tree = TYPE_SIZE (type);
949 else
950 size = int (GET_MODE_BITSIZE (mode));
952 if (size_tree != NULL_TREE
953 && TREE_CODE (size_tree) == INTEGER_CST)
954 size = wi::to_offset (size_tree);
956 /* Initially, maxsize is the same as the accessed element size.
957 In the following it will only grow (or become -1). */
958 max_size = size;
960 /* Compute cumulative bit-offset for nested component-refs and array-refs,
961 and find the ultimate containing object. */
962 FOR_EACH_VEC_ELT (ops, i, op)
964 switch (op->opcode)
966 /* These may be in the reference ops, but we cannot do anything
967 sensible with them here. */
968 case ADDR_EXPR:
969 /* Apart from ADDR_EXPR arguments to MEM_REF. */
970 if (base != NULL_TREE
971 && TREE_CODE (base) == MEM_REF
972 && op->op0
973 && DECL_P (TREE_OPERAND (op->op0, 0)))
975 vn_reference_op_t pop = &ops[i-1];
976 base = TREE_OPERAND (op->op0, 0);
977 if (pop->off == -1)
979 max_size = -1;
980 offset = 0;
982 else
983 offset += pop->off * BITS_PER_UNIT;
984 op0_p = NULL;
985 break;
987 /* Fallthru. */
988 case CALL_EXPR:
989 return false;
991 /* Record the base objects. */
992 case MEM_REF:
993 base_alias_set = get_deref_alias_set (op->op0);
994 *op0_p = build2 (MEM_REF, op->type,
995 NULL_TREE, op->op0);
996 MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
997 MR_DEPENDENCE_BASE (*op0_p) = op->base;
998 op0_p = &TREE_OPERAND (*op0_p, 0);
999 break;
1001 case VAR_DECL:
1002 case PARM_DECL:
1003 case RESULT_DECL:
1004 case SSA_NAME:
1005 *op0_p = op->op0;
1006 op0_p = NULL;
1007 break;
1009 /* And now the usual component-reference style ops. */
1010 case BIT_FIELD_REF:
1011 offset += wi::to_offset (op->op1);
1012 break;
1014 case COMPONENT_REF:
1016 tree field = op->op0;
1017 /* We do not have a complete COMPONENT_REF tree here so we
1018 cannot use component_ref_field_offset. Do the interesting
1019 parts manually. */
1020 tree this_offset = DECL_FIELD_OFFSET (field);
1022 if (op->op1 || TREE_CODE (this_offset) != INTEGER_CST)
1023 max_size = -1;
1024 else
1026 offset_int woffset = (wi::to_offset (this_offset)
1027 << LOG2_BITS_PER_UNIT);
1028 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1029 offset += woffset;
1031 break;
1034 case ARRAY_RANGE_REF:
1035 case ARRAY_REF:
1036 /* We recorded the lower bound and the element size. */
1037 if (TREE_CODE (op->op0) != INTEGER_CST
1038 || TREE_CODE (op->op1) != INTEGER_CST
1039 || TREE_CODE (op->op2) != INTEGER_CST)
1040 max_size = -1;
1041 else
1043 offset_int woffset
1044 = wi::sext (wi::to_offset (op->op0) - wi::to_offset (op->op1),
1045 TYPE_PRECISION (TREE_TYPE (op->op0)));
1046 woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
1047 woffset <<= LOG2_BITS_PER_UNIT;
1048 offset += woffset;
1050 break;
1052 case REALPART_EXPR:
1053 break;
1055 case IMAGPART_EXPR:
1056 offset += size;
1057 break;
1059 case VIEW_CONVERT_EXPR:
1060 break;
1062 case STRING_CST:
1063 case INTEGER_CST:
1064 case COMPLEX_CST:
1065 case VECTOR_CST:
1066 case REAL_CST:
1067 case CONSTRUCTOR:
1068 case CONST_DECL:
1069 return false;
1071 default:
1072 return false;
1076 if (base == NULL_TREE)
1077 return false;
1079 ref->ref = NULL_TREE;
1080 ref->base = base;
1081 ref->ref_alias_set = set;
1082 if (base_alias_set != -1)
1083 ref->base_alias_set = base_alias_set;
1084 else
1085 ref->base_alias_set = get_alias_set (base);
1086 /* We discount volatiles from value-numbering elsewhere. */
1087 ref->volatile_p = false;
1089 if (!wi::fits_shwi_p (size) || wi::neg_p (size))
1091 ref->offset = 0;
1092 ref->size = -1;
1093 ref->max_size = -1;
1094 return true;
1097 ref->size = size.to_shwi ();
1099 if (!wi::fits_shwi_p (offset))
1101 ref->offset = 0;
1102 ref->max_size = -1;
1103 return true;
1106 ref->offset = offset.to_shwi ();
1108 if (!wi::fits_shwi_p (max_size) || wi::neg_p (max_size))
1109 ref->max_size = -1;
1110 else
1111 ref->max_size = max_size.to_shwi ();
1113 return true;
1116 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1117 vn_reference_op_s's. */
1119 static void
1120 copy_reference_ops_from_call (gcall *call,
1121 vec<vn_reference_op_s> *result)
1123 vn_reference_op_s temp;
1124 unsigned i;
1125 tree lhs = gimple_call_lhs (call);
1126 int lr;
1128 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1129 different. By adding the lhs here in the vector, we ensure that the
1130 hashcode is different, guaranteeing a different value number. */
1131 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1133 memset (&temp, 0, sizeof (temp));
1134 temp.opcode = MODIFY_EXPR;
1135 temp.type = TREE_TYPE (lhs);
1136 temp.op0 = lhs;
1137 temp.off = -1;
1138 result->safe_push (temp);
1141 /* Copy the type, opcode, function, static chain and EH region, if any. */
1142 memset (&temp, 0, sizeof (temp));
1143 temp.type = gimple_call_return_type (call);
1144 temp.opcode = CALL_EXPR;
1145 temp.op0 = gimple_call_fn (call);
1146 temp.op1 = gimple_call_chain (call);
1147 if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1148 temp.op2 = size_int (lr);
1149 temp.off = -1;
1150 if (gimple_call_with_bounds_p (call))
1151 temp.with_bounds = 1;
1152 result->safe_push (temp);
1154 /* Copy the call arguments. As they can be references as well,
1155 just chain them together. */
1156 for (i = 0; i < gimple_call_num_args (call); ++i)
1158 tree callarg = gimple_call_arg (call, i);
1159 copy_reference_ops_from_ref (callarg, result);
1163 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1164 *I_P to point to the last element of the replacement. */
1165 static bool
1166 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1167 unsigned int *i_p)
1169 unsigned int i = *i_p;
1170 vn_reference_op_t op = &(*ops)[i];
1171 vn_reference_op_t mem_op = &(*ops)[i - 1];
1172 tree addr_base;
1173 HOST_WIDE_INT addr_offset = 0;
1175 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1176 from .foo.bar to the preceding MEM_REF offset and replace the
1177 address with &OBJ. */
1178 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1179 &addr_offset);
1180 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1181 if (addr_base != TREE_OPERAND (op->op0, 0))
1183 offset_int off = offset_int::from (wi::to_wide (mem_op->op0), SIGNED);
1184 off += addr_offset;
1185 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1186 op->op0 = build_fold_addr_expr (addr_base);
1187 if (tree_fits_shwi_p (mem_op->op0))
1188 mem_op->off = tree_to_shwi (mem_op->op0);
1189 else
1190 mem_op->off = -1;
1191 return true;
1193 return false;
1196 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1197 *I_P to point to the last element of the replacement. */
1198 static bool
1199 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1200 unsigned int *i_p)
1202 unsigned int i = *i_p;
1203 vn_reference_op_t op = &(*ops)[i];
1204 vn_reference_op_t mem_op = &(*ops)[i - 1];
1205 gimple *def_stmt;
1206 enum tree_code code;
1207 offset_int off;
1209 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1210 if (!is_gimple_assign (def_stmt))
1211 return false;
1213 code = gimple_assign_rhs_code (def_stmt);
1214 if (code != ADDR_EXPR
1215 && code != POINTER_PLUS_EXPR)
1216 return false;
1218 off = offset_int::from (wi::to_wide (mem_op->op0), SIGNED);
1220 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1221 from .foo.bar to the preceding MEM_REF offset and replace the
1222 address with &OBJ. */
1223 if (code == ADDR_EXPR)
1225 tree addr, addr_base;
1226 HOST_WIDE_INT addr_offset;
1228 addr = gimple_assign_rhs1 (def_stmt);
1229 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1230 &addr_offset);
1231 /* If that didn't work because the address isn't invariant propagate
1232 the reference tree from the address operation in case the current
1233 dereference isn't offsetted. */
1234 if (!addr_base
1235 && *i_p == ops->length () - 1
1236 && off == 0
1237 /* This makes us disable this transform for PRE where the
1238 reference ops might be also used for code insertion which
1239 is invalid. */
1240 && default_vn_walk_kind == VN_WALKREWRITE)
1242 auto_vec<vn_reference_op_s, 32> tem;
1243 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1244 /* Make sure to preserve TBAA info. The only objects not
1245 wrapped in MEM_REFs that can have their address taken are
1246 STRING_CSTs. */
1247 if (tem.length () >= 2
1248 && tem[tem.length () - 2].opcode == MEM_REF)
1250 vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1251 new_mem_op->op0
1252 = wide_int_to_tree (TREE_TYPE (mem_op->op0),
1253 wi::to_wide (new_mem_op->op0));
1255 else
1256 gcc_assert (tem.last ().opcode == STRING_CST);
1257 ops->pop ();
1258 ops->pop ();
1259 ops->safe_splice (tem);
1260 --*i_p;
1261 return true;
1263 if (!addr_base
1264 || TREE_CODE (addr_base) != MEM_REF)
1265 return false;
1267 off += addr_offset;
1268 off += mem_ref_offset (addr_base);
1269 op->op0 = TREE_OPERAND (addr_base, 0);
1271 else
1273 tree ptr, ptroff;
1274 ptr = gimple_assign_rhs1 (def_stmt);
1275 ptroff = gimple_assign_rhs2 (def_stmt);
1276 if (TREE_CODE (ptr) != SSA_NAME
1277 || TREE_CODE (ptroff) != INTEGER_CST)
1278 return false;
1280 off += wi::to_offset (ptroff);
1281 op->op0 = ptr;
1284 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1285 if (tree_fits_shwi_p (mem_op->op0))
1286 mem_op->off = tree_to_shwi (mem_op->op0);
1287 else
1288 mem_op->off = -1;
1289 if (TREE_CODE (op->op0) == SSA_NAME)
1290 op->op0 = SSA_VAL (op->op0);
1291 if (TREE_CODE (op->op0) != SSA_NAME)
1292 op->opcode = TREE_CODE (op->op0);
1294 /* And recurse. */
1295 if (TREE_CODE (op->op0) == SSA_NAME)
1296 vn_reference_maybe_forwprop_address (ops, i_p);
1297 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1298 vn_reference_fold_indirect (ops, i_p);
1299 return true;
1302 /* Optimize the reference REF to a constant if possible or return
1303 NULL_TREE if not. */
1305 tree
1306 fully_constant_vn_reference_p (vn_reference_t ref)
1308 vec<vn_reference_op_s> operands = ref->operands;
1309 vn_reference_op_t op;
1311 /* Try to simplify the translated expression if it is
1312 a call to a builtin function with at most two arguments. */
1313 op = &operands[0];
1314 if (op->opcode == CALL_EXPR
1315 && TREE_CODE (op->op0) == ADDR_EXPR
1316 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1317 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1318 && operands.length () >= 2
1319 && operands.length () <= 3)
1321 vn_reference_op_t arg0, arg1 = NULL;
1322 bool anyconst = false;
1323 arg0 = &operands[1];
1324 if (operands.length () > 2)
1325 arg1 = &operands[2];
1326 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1327 || (arg0->opcode == ADDR_EXPR
1328 && is_gimple_min_invariant (arg0->op0)))
1329 anyconst = true;
1330 if (arg1
1331 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1332 || (arg1->opcode == ADDR_EXPR
1333 && is_gimple_min_invariant (arg1->op0))))
1334 anyconst = true;
1335 if (anyconst)
1337 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1338 arg1 ? 2 : 1,
1339 arg0->op0,
1340 arg1 ? arg1->op0 : NULL);
1341 if (folded
1342 && TREE_CODE (folded) == NOP_EXPR)
1343 folded = TREE_OPERAND (folded, 0);
1344 if (folded
1345 && is_gimple_min_invariant (folded))
1346 return folded;
1350 /* Simplify reads from constants or constant initializers. */
1351 else if (BITS_PER_UNIT == 8
1352 && is_gimple_reg_type (ref->type)
1353 && (!INTEGRAL_TYPE_P (ref->type)
1354 || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
1356 HOST_WIDE_INT off = 0;
1357 HOST_WIDE_INT size;
1358 if (INTEGRAL_TYPE_P (ref->type))
1359 size = TYPE_PRECISION (ref->type);
1360 else
1361 size = tree_to_shwi (TYPE_SIZE (ref->type));
1362 if (size % BITS_PER_UNIT != 0
1363 || size > MAX_BITSIZE_MODE_ANY_MODE)
1364 return NULL_TREE;
1365 size /= BITS_PER_UNIT;
1366 unsigned i;
1367 for (i = 0; i < operands.length (); ++i)
1369 if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1371 ++i;
1372 break;
1374 if (operands[i].off == -1)
1375 return NULL_TREE;
1376 off += operands[i].off;
1377 if (operands[i].opcode == MEM_REF)
1379 ++i;
1380 break;
1383 vn_reference_op_t base = &operands[--i];
1384 tree ctor = error_mark_node;
1385 tree decl = NULL_TREE;
1386 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1387 ctor = base->op0;
1388 else if (base->opcode == MEM_REF
1389 && base[1].opcode == ADDR_EXPR
1390 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1391 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL
1392 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == STRING_CST))
1394 decl = TREE_OPERAND (base[1].op0, 0);
1395 if (TREE_CODE (decl) == STRING_CST)
1396 ctor = decl;
1397 else
1398 ctor = ctor_for_folding (decl);
1400 if (ctor == NULL_TREE)
1401 return build_zero_cst (ref->type);
1402 else if (ctor != error_mark_node)
1404 if (decl)
1406 tree res = fold_ctor_reference (ref->type, ctor,
1407 off * BITS_PER_UNIT,
1408 size * BITS_PER_UNIT, decl);
1409 if (res)
1411 STRIP_USELESS_TYPE_CONVERSION (res);
1412 if (is_gimple_min_invariant (res))
1413 return res;
1416 else
1418 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1419 int len = native_encode_expr (ctor, buf, size, off);
1420 if (len > 0)
1421 return native_interpret_expr (ref->type, buf, len);
1426 return NULL_TREE;
1429 /* Return true if OPS contain a storage order barrier. */
1431 static bool
1432 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1434 vn_reference_op_t op;
1435 unsigned i;
1437 FOR_EACH_VEC_ELT (ops, i, op)
1438 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1439 return true;
1441 return false;
1444 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1445 structures into their value numbers. This is done in-place, and
1446 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1447 whether any operands were valueized. */
1449 static vec<vn_reference_op_s>
1450 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1452 vn_reference_op_t vro;
1453 unsigned int i;
1455 *valueized_anything = false;
1457 FOR_EACH_VEC_ELT (orig, i, vro)
1459 if (vro->opcode == SSA_NAME
1460 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1462 tree tem = SSA_VAL (vro->op0);
1463 if (tem != vro->op0)
1465 *valueized_anything = true;
1466 vro->op0 = tem;
1468 /* If it transforms from an SSA_NAME to a constant, update
1469 the opcode. */
1470 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1471 vro->opcode = TREE_CODE (vro->op0);
1473 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1475 tree tem = SSA_VAL (vro->op1);
1476 if (tem != vro->op1)
1478 *valueized_anything = true;
1479 vro->op1 = tem;
1482 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1484 tree tem = SSA_VAL (vro->op2);
1485 if (tem != vro->op2)
1487 *valueized_anything = true;
1488 vro->op2 = tem;
1491 /* If it transforms from an SSA_NAME to an address, fold with
1492 a preceding indirect reference. */
1493 if (i > 0
1494 && vro->op0
1495 && TREE_CODE (vro->op0) == ADDR_EXPR
1496 && orig[i - 1].opcode == MEM_REF)
1498 if (vn_reference_fold_indirect (&orig, &i))
1499 *valueized_anything = true;
1501 else if (i > 0
1502 && vro->opcode == SSA_NAME
1503 && orig[i - 1].opcode == MEM_REF)
1505 if (vn_reference_maybe_forwprop_address (&orig, &i))
1506 *valueized_anything = true;
1508 /* If it transforms a non-constant ARRAY_REF into a constant
1509 one, adjust the constant offset. */
1510 else if (vro->opcode == ARRAY_REF
1511 && vro->off == -1
1512 && TREE_CODE (vro->op0) == INTEGER_CST
1513 && TREE_CODE (vro->op1) == INTEGER_CST
1514 && TREE_CODE (vro->op2) == INTEGER_CST)
1516 offset_int off = ((wi::to_offset (vro->op0)
1517 - wi::to_offset (vro->op1))
1518 * wi::to_offset (vro->op2)
1519 * vn_ref_op_align_unit (vro));
1520 if (wi::fits_shwi_p (off))
1521 vro->off = off.to_shwi ();
1525 return orig;
1528 static vec<vn_reference_op_s>
1529 valueize_refs (vec<vn_reference_op_s> orig)
1531 bool tem;
1532 return valueize_refs_1 (orig, &tem);
1535 static vec<vn_reference_op_s> shared_lookup_references;
1537 /* Create a vector of vn_reference_op_s structures from REF, a
1538 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1539 this function. *VALUEIZED_ANYTHING will specify whether any
1540 operands were valueized. */
1542 static vec<vn_reference_op_s>
1543 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1545 if (!ref)
1546 return vNULL;
1547 shared_lookup_references.truncate (0);
1548 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1549 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1550 valueized_anything);
1551 return shared_lookup_references;
1554 /* Create a vector of vn_reference_op_s structures from CALL, a
1555 call statement. The vector is shared among all callers of
1556 this function. */
1558 static vec<vn_reference_op_s>
1559 valueize_shared_reference_ops_from_call (gcall *call)
1561 if (!call)
1562 return vNULL;
1563 shared_lookup_references.truncate (0);
1564 copy_reference_ops_from_call (call, &shared_lookup_references);
1565 shared_lookup_references = valueize_refs (shared_lookup_references);
1566 return shared_lookup_references;
1569 /* Lookup a SCCVN reference operation VR in the current hash table.
1570 Returns the resulting value number if it exists in the hash table,
1571 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1572 vn_reference_t stored in the hashtable if something is found. */
1574 static tree
1575 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1577 vn_reference_s **slot;
1578 hashval_t hash;
1580 hash = vr->hashcode;
1581 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1582 if (!slot && current_info == optimistic_info)
1583 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1584 if (slot)
1586 if (vnresult)
1587 *vnresult = (vn_reference_t)*slot;
1588 return ((vn_reference_t)*slot)->result;
1591 return NULL_TREE;
1594 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1595 with the current VUSE and performs the expression lookup. */
1597 static void *
1598 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1599 unsigned int cnt, void *vr_)
1601 vn_reference_t vr = (vn_reference_t)vr_;
1602 vn_reference_s **slot;
1603 hashval_t hash;
1605 /* This bounds the stmt walks we perform on reference lookups
1606 to O(1) instead of O(N) where N is the number of dominating
1607 stores. */
1608 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1609 return (void *)-1;
1611 if (last_vuse_ptr)
1612 *last_vuse_ptr = vuse;
1614 /* Fixup vuse and hash. */
1615 if (vr->vuse)
1616 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1617 vr->vuse = vuse_ssa_val (vuse);
1618 if (vr->vuse)
1619 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1621 hash = vr->hashcode;
1622 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1623 if (!slot && current_info == optimistic_info)
1624 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1625 if (slot)
1626 return *slot;
1628 return NULL;
1631 /* Lookup an existing or insert a new vn_reference entry into the
1632 value table for the VUSE, SET, TYPE, OPERANDS reference which
1633 has the value VALUE which is either a constant or an SSA name. */
1635 static vn_reference_t
1636 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1637 alias_set_type set,
1638 tree type,
1639 vec<vn_reference_op_s,
1640 va_heap> operands,
1641 tree value)
1643 vn_reference_s vr1;
1644 vn_reference_t result;
1645 unsigned value_id;
1646 vr1.vuse = vuse;
1647 vr1.operands = operands;
1648 vr1.type = type;
1649 vr1.set = set;
1650 vr1.hashcode = vn_reference_compute_hash (&vr1);
1651 if (vn_reference_lookup_1 (&vr1, &result))
1652 return result;
1653 if (TREE_CODE (value) == SSA_NAME)
1654 value_id = VN_INFO (value)->value_id;
1655 else
1656 value_id = get_or_alloc_constant_value_id (value);
1657 return vn_reference_insert_pieces (vuse, set, type,
1658 operands.copy (), value, value_id);
1661 static vn_nary_op_t vn_nary_op_insert_stmt (gimple *stmt, tree result);
1662 static unsigned mprts_hook_cnt;
1664 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
1666 static tree
1667 vn_lookup_simplify_result (code_helper rcode, tree type, tree *ops_)
1669 if (!rcode.is_tree_code ())
1670 return NULL_TREE;
1671 tree *ops = ops_;
1672 unsigned int length = TREE_CODE_LENGTH ((tree_code) rcode);
1673 if (rcode == CONSTRUCTOR
1674 /* ??? We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
1675 and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree. */
1676 && TREE_CODE (ops_[0]) == CONSTRUCTOR)
1678 length = CONSTRUCTOR_NELTS (ops_[0]);
1679 ops = XALLOCAVEC (tree, length);
1680 for (unsigned i = 0; i < length; ++i)
1681 ops[i] = CONSTRUCTOR_ELT (ops_[0], i)->value;
1683 vn_nary_op_t vnresult = NULL;
1684 tree res = vn_nary_op_lookup_pieces (length, (tree_code) rcode,
1685 type, ops, &vnresult);
1686 /* We can end up endlessly recursing simplifications if the lookup above
1687 presents us with a def-use chain that mirrors the original simplification.
1688 See PR80887 for an example. Limit successful lookup artificially
1689 to 10 times if we are called as mprts_hook. */
1690 if (res
1691 && mprts_hook
1692 && --mprts_hook_cnt == 0)
1694 if (dump_file && (dump_flags & TDF_DETAILS))
1695 fprintf (dump_file, "Resetting mprts_hook after too many "
1696 "invocations.\n");
1697 mprts_hook = NULL;
1699 return res;
1702 /* Return a value-number for RCODE OPS... either by looking up an existing
1703 value-number for the simplified result or by inserting the operation if
1704 INSERT is true. */
1706 static tree
1707 vn_nary_build_or_lookup_1 (code_helper rcode, tree type, tree *ops,
1708 bool insert)
1710 tree result = NULL_TREE;
1711 /* We will be creating a value number for
1712 RCODE (OPS...).
1713 So first simplify and lookup this expression to see if it
1714 is already available. */
1715 mprts_hook = vn_lookup_simplify_result;
1716 mprts_hook_cnt = 9;
1717 bool res = false;
1718 switch (TREE_CODE_LENGTH ((tree_code) rcode))
1720 case 1:
1721 res = gimple_resimplify1 (NULL, &rcode, type, ops, vn_valueize);
1722 break;
1723 case 2:
1724 res = gimple_resimplify2 (NULL, &rcode, type, ops, vn_valueize);
1725 break;
1726 case 3:
1727 res = gimple_resimplify3 (NULL, &rcode, type, ops, vn_valueize);
1728 break;
1730 mprts_hook = NULL;
1731 gimple *new_stmt = NULL;
1732 if (res
1733 && gimple_simplified_result_is_gimple_val (rcode, ops))
1734 /* The expression is already available. */
1735 result = ops[0];
1736 else
1738 tree val = vn_lookup_simplify_result (rcode, type, ops);
1739 if (!val && insert)
1741 gimple_seq stmts = NULL;
1742 result = maybe_push_res_to_seq (rcode, type, ops, &stmts);
1743 if (result)
1745 gcc_assert (gimple_seq_singleton_p (stmts));
1746 new_stmt = gimple_seq_first_stmt (stmts);
1749 else
1750 /* The expression is already available. */
1751 result = val;
1753 if (new_stmt)
1755 /* The expression is not yet available, value-number lhs to
1756 the new SSA_NAME we created. */
1757 /* Initialize value-number information properly. */
1758 VN_INFO_GET (result)->valnum = result;
1759 VN_INFO (result)->value_id = get_next_value_id ();
1760 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
1761 new_stmt);
1762 VN_INFO (result)->needs_insertion = true;
1763 /* ??? PRE phi-translation inserts NARYs without corresponding
1764 SSA name result. Re-use those but set their result according
1765 to the stmt we just built. */
1766 vn_nary_op_t nary = NULL;
1767 vn_nary_op_lookup_stmt (new_stmt, &nary);
1768 if (nary)
1770 gcc_assert (nary->result == NULL_TREE);
1771 nary->result = gimple_assign_lhs (new_stmt);
1773 /* As all "inserted" statements are singleton SCCs, insert
1774 to the valid table. This is strictly needed to
1775 avoid re-generating new value SSA_NAMEs for the same
1776 expression during SCC iteration over and over (the
1777 optimistic table gets cleared after each iteration).
1778 We do not need to insert into the optimistic table, as
1779 lookups there will fall back to the valid table. */
1780 else if (current_info == optimistic_info)
1782 current_info = valid_info;
1783 vn_nary_op_insert_stmt (new_stmt, result);
1784 current_info = optimistic_info;
1786 else
1787 vn_nary_op_insert_stmt (new_stmt, result);
1788 if (dump_file && (dump_flags & TDF_DETAILS))
1790 fprintf (dump_file, "Inserting name ");
1791 print_generic_expr (dump_file, result);
1792 fprintf (dump_file, " for expression ");
1793 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
1794 fprintf (dump_file, "\n");
1797 return result;
1800 /* Return a value-number for RCODE OPS... either by looking up an existing
1801 value-number for the simplified result or by inserting the operation. */
1803 static tree
1804 vn_nary_build_or_lookup (code_helper rcode, tree type, tree *ops)
1806 return vn_nary_build_or_lookup_1 (rcode, type, ops, true);
1809 /* Try to simplify the expression RCODE OPS... of type TYPE and return
1810 its value if present. */
1812 tree
1813 vn_nary_simplify (vn_nary_op_t nary)
1815 if (nary->length > 3)
1816 return NULL_TREE;
1817 tree ops[3];
1818 memcpy (ops, nary->op, sizeof (tree) * nary->length);
1819 return vn_nary_build_or_lookup_1 (nary->opcode, nary->type, ops, false);
1823 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1824 from the statement defining VUSE and if not successful tries to
1825 translate *REFP and VR_ through an aggregate copy at the definition
1826 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
1827 of *REF and *VR. If only disambiguation was performed then
1828 *DISAMBIGUATE_ONLY is set to true. */
1830 static void *
1831 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1832 bool *disambiguate_only)
1834 vn_reference_t vr = (vn_reference_t)vr_;
1835 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
1836 tree base = ao_ref_base (ref);
1837 HOST_WIDE_INT offset, maxsize;
1838 static vec<vn_reference_op_s> lhs_ops;
1839 ao_ref lhs_ref;
1840 bool lhs_ref_ok = false;
1842 /* If the reference is based on a parameter that was determined as
1843 pointing to readonly memory it doesn't change. */
1844 if (TREE_CODE (base) == MEM_REF
1845 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
1846 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
1847 && bitmap_bit_p (const_parms,
1848 SSA_NAME_VERSION (TREE_OPERAND (base, 0))))
1850 *disambiguate_only = true;
1851 return NULL;
1854 /* First try to disambiguate after value-replacing in the definitions LHS. */
1855 if (is_gimple_assign (def_stmt))
1857 tree lhs = gimple_assign_lhs (def_stmt);
1858 bool valueized_anything = false;
1859 /* Avoid re-allocation overhead. */
1860 lhs_ops.truncate (0);
1861 copy_reference_ops_from_ref (lhs, &lhs_ops);
1862 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1863 if (valueized_anything)
1865 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1866 get_alias_set (lhs),
1867 TREE_TYPE (lhs), lhs_ops);
1868 if (lhs_ref_ok
1869 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1871 *disambiguate_only = true;
1872 return NULL;
1875 else
1877 ao_ref_init (&lhs_ref, lhs);
1878 lhs_ref_ok = true;
1881 /* If we reach a clobbering statement try to skip it and see if
1882 we find a VN result with exactly the same value as the
1883 possible clobber. In this case we can ignore the clobber
1884 and return the found value.
1885 Note that we don't need to worry about partial overlapping
1886 accesses as we then can use TBAA to disambiguate against the
1887 clobbering statement when looking up a load (thus the
1888 VN_WALKREWRITE guard). */
1889 if (vn_walk_kind == VN_WALKREWRITE
1890 && is_gimple_reg_type (TREE_TYPE (lhs))
1891 && types_compatible_p (TREE_TYPE (lhs), vr->type))
1893 tree *saved_last_vuse_ptr = last_vuse_ptr;
1894 /* Do not update last_vuse_ptr in vn_reference_lookup_2. */
1895 last_vuse_ptr = NULL;
1896 tree saved_vuse = vr->vuse;
1897 hashval_t saved_hashcode = vr->hashcode;
1898 void *res = vn_reference_lookup_2 (ref,
1899 gimple_vuse (def_stmt), 0, vr);
1900 /* Need to restore vr->vuse and vr->hashcode. */
1901 vr->vuse = saved_vuse;
1902 vr->hashcode = saved_hashcode;
1903 last_vuse_ptr = saved_last_vuse_ptr;
1904 if (res && res != (void *)-1)
1906 vn_reference_t vnresult = (vn_reference_t) res;
1907 if (vnresult->result
1908 && operand_equal_p (vnresult->result,
1909 gimple_assign_rhs1 (def_stmt), 0))
1910 return res;
1914 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1915 && gimple_call_num_args (def_stmt) <= 4)
1917 /* For builtin calls valueize its arguments and call the
1918 alias oracle again. Valueization may improve points-to
1919 info of pointers and constify size and position arguments.
1920 Originally this was motivated by PR61034 which has
1921 conditional calls to free falsely clobbering ref because
1922 of imprecise points-to info of the argument. */
1923 tree oldargs[4];
1924 bool valueized_anything = false;
1925 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1927 oldargs[i] = gimple_call_arg (def_stmt, i);
1928 tree val = vn_valueize (oldargs[i]);
1929 if (val != oldargs[i])
1931 gimple_call_set_arg (def_stmt, i, val);
1932 valueized_anything = true;
1935 if (valueized_anything)
1937 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1938 ref);
1939 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1940 gimple_call_set_arg (def_stmt, i, oldargs[i]);
1941 if (!res)
1943 *disambiguate_only = true;
1944 return NULL;
1949 if (*disambiguate_only)
1950 return (void *)-1;
1952 offset = ref->offset;
1953 maxsize = ref->max_size;
1955 /* If we cannot constrain the size of the reference we cannot
1956 test if anything kills it. */
1957 if (maxsize == -1)
1958 return (void *)-1;
1960 /* We can't deduce anything useful from clobbers. */
1961 if (gimple_clobber_p (def_stmt))
1962 return (void *)-1;
1964 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1965 from that definition.
1966 1) Memset. */
1967 if (is_gimple_reg_type (vr->type)
1968 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1969 && integer_zerop (gimple_call_arg (def_stmt, 1))
1970 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2))
1971 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1973 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1974 tree base2;
1975 HOST_WIDE_INT offset2, size2, maxsize2;
1976 bool reverse;
1977 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
1978 &reverse);
1979 size2 = tree_to_uhwi (gimple_call_arg (def_stmt, 2)) * 8;
1980 if ((unsigned HOST_WIDE_INT)size2 / 8
1981 == tree_to_uhwi (gimple_call_arg (def_stmt, 2))
1982 && maxsize2 != -1
1983 && operand_equal_p (base, base2, 0)
1984 && offset2 <= offset
1985 && offset2 + size2 >= offset + maxsize)
1987 tree val = build_zero_cst (vr->type);
1988 return vn_reference_lookup_or_insert_for_pieces
1989 (vuse, vr->set, vr->type, vr->operands, val);
1993 /* 2) Assignment from an empty CONSTRUCTOR. */
1994 else if (is_gimple_reg_type (vr->type)
1995 && gimple_assign_single_p (def_stmt)
1996 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1997 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1999 tree base2;
2000 HOST_WIDE_INT offset2, size2, maxsize2;
2001 bool reverse;
2002 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2003 &offset2, &size2, &maxsize2, &reverse);
2004 if (maxsize2 != -1
2005 && operand_equal_p (base, base2, 0)
2006 && offset2 <= offset
2007 && offset2 + size2 >= offset + maxsize)
2009 tree val = build_zero_cst (vr->type);
2010 return vn_reference_lookup_or_insert_for_pieces
2011 (vuse, vr->set, vr->type, vr->operands, val);
2015 /* 3) Assignment from a constant. We can use folds native encode/interpret
2016 routines to extract the assigned bits. */
2017 else if (ref->size == maxsize
2018 && is_gimple_reg_type (vr->type)
2019 && !contains_storage_order_barrier_p (vr->operands)
2020 && gimple_assign_single_p (def_stmt)
2021 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
2022 && maxsize % BITS_PER_UNIT == 0
2023 && offset % BITS_PER_UNIT == 0
2024 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
2025 || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
2026 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
2028 tree base2;
2029 HOST_WIDE_INT offset2, size2, maxsize2;
2030 bool reverse;
2031 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2032 &offset2, &size2, &maxsize2, &reverse);
2033 if (!reverse
2034 && maxsize2 != -1
2035 && maxsize2 == size2
2036 && size2 % BITS_PER_UNIT == 0
2037 && offset2 % BITS_PER_UNIT == 0
2038 && operand_equal_p (base, base2, 0)
2039 && offset2 <= offset
2040 && offset2 + size2 >= offset + maxsize)
2042 /* We support up to 512-bit values (for V8DFmode). */
2043 unsigned char buffer[64];
2044 int len;
2046 tree rhs = gimple_assign_rhs1 (def_stmt);
2047 if (TREE_CODE (rhs) == SSA_NAME)
2048 rhs = SSA_VAL (rhs);
2049 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
2050 buffer, sizeof (buffer));
2051 if (len > 0)
2053 tree type = vr->type;
2054 /* Make sure to interpret in a type that has a range
2055 covering the whole access size. */
2056 if (INTEGRAL_TYPE_P (vr->type)
2057 && ref->size != TYPE_PRECISION (vr->type))
2058 type = build_nonstandard_integer_type (ref->size,
2059 TYPE_UNSIGNED (type));
2060 tree val = native_interpret_expr (type,
2061 buffer
2062 + ((offset - offset2)
2063 / BITS_PER_UNIT),
2064 ref->size / BITS_PER_UNIT);
2065 /* If we chop off bits because the types precision doesn't
2066 match the memory access size this is ok when optimizing
2067 reads but not when called from the DSE code during
2068 elimination. */
2069 if (val
2070 && type != vr->type)
2072 if (! int_fits_type_p (val, vr->type))
2073 val = NULL_TREE;
2074 else
2075 val = fold_convert (vr->type, val);
2078 if (val)
2079 return vn_reference_lookup_or_insert_for_pieces
2080 (vuse, vr->set, vr->type, vr->operands, val);
2085 /* 4) Assignment from an SSA name which definition we may be able
2086 to access pieces from. */
2087 else if (ref->size == maxsize
2088 && is_gimple_reg_type (vr->type)
2089 && !contains_storage_order_barrier_p (vr->operands)
2090 && gimple_assign_single_p (def_stmt)
2091 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
2093 tree base2;
2094 HOST_WIDE_INT offset2, size2, maxsize2;
2095 bool reverse;
2096 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2097 &offset2, &size2, &maxsize2,
2098 &reverse);
2099 if (!reverse
2100 && maxsize2 != -1
2101 && maxsize2 == size2
2102 && operand_equal_p (base, base2, 0)
2103 && offset2 <= offset
2104 && offset2 + size2 >= offset + maxsize
2105 /* ??? We can't handle bitfield precision extracts without
2106 either using an alternate type for the BIT_FIELD_REF and
2107 then doing a conversion or possibly adjusting the offset
2108 according to endianness. */
2109 && (! INTEGRAL_TYPE_P (vr->type)
2110 || ref->size == TYPE_PRECISION (vr->type))
2111 && ref->size % BITS_PER_UNIT == 0)
2113 code_helper rcode = BIT_FIELD_REF;
2114 tree ops[3];
2115 ops[0] = SSA_VAL (gimple_assign_rhs1 (def_stmt));
2116 ops[1] = bitsize_int (ref->size);
2117 ops[2] = bitsize_int (offset - offset2);
2118 tree val = vn_nary_build_or_lookup (rcode, vr->type, ops);
2119 if (val
2120 && (TREE_CODE (val) != SSA_NAME
2121 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2123 vn_reference_t res = vn_reference_lookup_or_insert_for_pieces
2124 (vuse, vr->set, vr->type, vr->operands, val);
2125 return res;
2130 /* 5) For aggregate copies translate the reference through them if
2131 the copy kills ref. */
2132 else if (vn_walk_kind == VN_WALKREWRITE
2133 && gimple_assign_single_p (def_stmt)
2134 && (DECL_P (gimple_assign_rhs1 (def_stmt))
2135 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
2136 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
2138 tree base2;
2139 HOST_WIDE_INT maxsize2;
2140 int i, j, k;
2141 auto_vec<vn_reference_op_s> rhs;
2142 vn_reference_op_t vro;
2143 ao_ref r;
2145 if (!lhs_ref_ok)
2146 return (void *)-1;
2148 /* See if the assignment kills REF. */
2149 base2 = ao_ref_base (&lhs_ref);
2150 maxsize2 = lhs_ref.max_size;
2151 if (maxsize2 == -1
2152 || (base != base2
2153 && (TREE_CODE (base) != MEM_REF
2154 || TREE_CODE (base2) != MEM_REF
2155 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
2156 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
2157 TREE_OPERAND (base2, 1))))
2158 || !stmt_kills_ref_p (def_stmt, ref))
2159 return (void *)-1;
2161 /* Find the common base of ref and the lhs. lhs_ops already
2162 contains valueized operands for the lhs. */
2163 i = vr->operands.length () - 1;
2164 j = lhs_ops.length () - 1;
2165 while (j >= 0 && i >= 0
2166 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
2168 i--;
2169 j--;
2172 /* ??? The innermost op should always be a MEM_REF and we already
2173 checked that the assignment to the lhs kills vr. Thus for
2174 aggregate copies using char[] types the vn_reference_op_eq
2175 may fail when comparing types for compatibility. But we really
2176 don't care here - further lookups with the rewritten operands
2177 will simply fail if we messed up types too badly. */
2178 HOST_WIDE_INT extra_off = 0;
2179 if (j == 0 && i >= 0
2180 && lhs_ops[0].opcode == MEM_REF
2181 && lhs_ops[0].off != -1)
2183 if (lhs_ops[0].off == vr->operands[i].off)
2184 i--, j--;
2185 else if (vr->operands[i].opcode == MEM_REF
2186 && vr->operands[i].off != -1)
2188 extra_off = vr->operands[i].off - lhs_ops[0].off;
2189 i--, j--;
2193 /* i now points to the first additional op.
2194 ??? LHS may not be completely contained in VR, one or more
2195 VIEW_CONVERT_EXPRs could be in its way. We could at least
2196 try handling outermost VIEW_CONVERT_EXPRs. */
2197 if (j != -1)
2198 return (void *)-1;
2200 /* Punt if the additional ops contain a storage order barrier. */
2201 for (k = i; k >= 0; k--)
2203 vro = &vr->operands[k];
2204 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
2205 return (void *)-1;
2208 /* Now re-write REF to be based on the rhs of the assignment. */
2209 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
2211 /* Apply an extra offset to the inner MEM_REF of the RHS. */
2212 if (extra_off != 0)
2214 if (rhs.length () < 2
2215 || rhs[0].opcode != MEM_REF
2216 || rhs[0].off == -1)
2217 return (void *)-1;
2218 rhs[0].off += extra_off;
2219 rhs[0].op0 = int_const_binop (PLUS_EXPR, rhs[0].op0,
2220 build_int_cst (TREE_TYPE (rhs[0].op0),
2221 extra_off));
2224 /* We need to pre-pend vr->operands[0..i] to rhs. */
2225 vec<vn_reference_op_s> old = vr->operands;
2226 if (i + 1 + rhs.length () > vr->operands.length ())
2227 vr->operands.safe_grow (i + 1 + rhs.length ());
2228 else
2229 vr->operands.truncate (i + 1 + rhs.length ());
2230 FOR_EACH_VEC_ELT (rhs, j, vro)
2231 vr->operands[i + 1 + j] = *vro;
2232 vr->operands = valueize_refs (vr->operands);
2233 if (old == shared_lookup_references)
2234 shared_lookup_references = vr->operands;
2235 vr->hashcode = vn_reference_compute_hash (vr);
2237 /* Try folding the new reference to a constant. */
2238 tree val = fully_constant_vn_reference_p (vr);
2239 if (val)
2240 return vn_reference_lookup_or_insert_for_pieces
2241 (vuse, vr->set, vr->type, vr->operands, val);
2243 /* Adjust *ref from the new operands. */
2244 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2245 return (void *)-1;
2246 /* This can happen with bitfields. */
2247 if (ref->size != r.size)
2248 return (void *)-1;
2249 *ref = r;
2251 /* Do not update last seen VUSE after translating. */
2252 last_vuse_ptr = NULL;
2254 /* Keep looking for the adjusted *REF / VR pair. */
2255 return NULL;
2258 /* 6) For memcpy copies translate the reference through them if
2259 the copy kills ref. */
2260 else if (vn_walk_kind == VN_WALKREWRITE
2261 && is_gimple_reg_type (vr->type)
2262 /* ??? Handle BCOPY as well. */
2263 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2264 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2265 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2266 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2267 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2268 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2269 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2270 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2)))
2272 tree lhs, rhs;
2273 ao_ref r;
2274 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
2275 vn_reference_op_s op;
2276 HOST_WIDE_INT at;
2278 /* Only handle non-variable, addressable refs. */
2279 if (ref->size != maxsize
2280 || offset % BITS_PER_UNIT != 0
2281 || ref->size % BITS_PER_UNIT != 0)
2282 return (void *)-1;
2284 /* Extract a pointer base and an offset for the destination. */
2285 lhs = gimple_call_arg (def_stmt, 0);
2286 lhs_offset = 0;
2287 if (TREE_CODE (lhs) == SSA_NAME)
2289 lhs = SSA_VAL (lhs);
2290 if (TREE_CODE (lhs) == SSA_NAME)
2292 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
2293 if (gimple_assign_single_p (def_stmt)
2294 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2295 lhs = gimple_assign_rhs1 (def_stmt);
2298 if (TREE_CODE (lhs) == ADDR_EXPR)
2300 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2301 &lhs_offset);
2302 if (!tem)
2303 return (void *)-1;
2304 if (TREE_CODE (tem) == MEM_REF
2305 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2307 lhs = TREE_OPERAND (tem, 0);
2308 if (TREE_CODE (lhs) == SSA_NAME)
2309 lhs = SSA_VAL (lhs);
2310 lhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2312 else if (DECL_P (tem))
2313 lhs = build_fold_addr_expr (tem);
2314 else
2315 return (void *)-1;
2317 if (TREE_CODE (lhs) != SSA_NAME
2318 && TREE_CODE (lhs) != ADDR_EXPR)
2319 return (void *)-1;
2321 /* Extract a pointer base and an offset for the source. */
2322 rhs = gimple_call_arg (def_stmt, 1);
2323 rhs_offset = 0;
2324 if (TREE_CODE (rhs) == SSA_NAME)
2325 rhs = SSA_VAL (rhs);
2326 if (TREE_CODE (rhs) == ADDR_EXPR)
2328 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2329 &rhs_offset);
2330 if (!tem)
2331 return (void *)-1;
2332 if (TREE_CODE (tem) == MEM_REF
2333 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2335 rhs = TREE_OPERAND (tem, 0);
2336 rhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2338 else if (DECL_P (tem)
2339 || TREE_CODE (tem) == STRING_CST)
2340 rhs = build_fold_addr_expr (tem);
2341 else
2342 return (void *)-1;
2344 if (TREE_CODE (rhs) != SSA_NAME
2345 && TREE_CODE (rhs) != ADDR_EXPR)
2346 return (void *)-1;
2348 copy_size = tree_to_uhwi (gimple_call_arg (def_stmt, 2));
2350 /* The bases of the destination and the references have to agree. */
2351 at = offset / BITS_PER_UNIT;
2352 if (TREE_CODE (base) == MEM_REF)
2354 if (TREE_OPERAND (base, 0) != lhs
2355 || !tree_fits_uhwi_p (TREE_OPERAND (base, 1)))
2356 return (void *) -1;
2357 at += tree_to_uhwi (TREE_OPERAND (base, 1));
2359 else if (!DECL_P (base)
2360 || TREE_CODE (lhs) != ADDR_EXPR
2361 || TREE_OPERAND (lhs, 0) != base)
2362 return (void *)-1;
2364 /* If the access is completely outside of the memcpy destination
2365 area there is no aliasing. */
2366 if (lhs_offset >= at + maxsize / BITS_PER_UNIT
2367 || lhs_offset + copy_size <= at)
2368 return NULL;
2369 /* And the access has to be contained within the memcpy destination. */
2370 if (lhs_offset > at
2371 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
2372 return (void *)-1;
2374 /* Make room for 2 operands in the new reference. */
2375 if (vr->operands.length () < 2)
2377 vec<vn_reference_op_s> old = vr->operands;
2378 vr->operands.safe_grow_cleared (2);
2379 if (old == shared_lookup_references)
2380 shared_lookup_references = vr->operands;
2382 else
2383 vr->operands.truncate (2);
2385 /* The looked-through reference is a simple MEM_REF. */
2386 memset (&op, 0, sizeof (op));
2387 op.type = vr->type;
2388 op.opcode = MEM_REF;
2389 op.op0 = build_int_cst (ptr_type_node, at - lhs_offset + rhs_offset);
2390 op.off = at - lhs_offset + rhs_offset;
2391 vr->operands[0] = op;
2392 op.type = TREE_TYPE (rhs);
2393 op.opcode = TREE_CODE (rhs);
2394 op.op0 = rhs;
2395 op.off = -1;
2396 vr->operands[1] = op;
2397 vr->hashcode = vn_reference_compute_hash (vr);
2399 /* Try folding the new reference to a constant. */
2400 tree val = fully_constant_vn_reference_p (vr);
2401 if (val)
2402 return vn_reference_lookup_or_insert_for_pieces
2403 (vuse, vr->set, vr->type, vr->operands, val);
2405 /* Adjust *ref from the new operands. */
2406 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2407 return (void *)-1;
2408 /* This can happen with bitfields. */
2409 if (ref->size != r.size)
2410 return (void *)-1;
2411 *ref = r;
2413 /* Do not update last seen VUSE after translating. */
2414 last_vuse_ptr = NULL;
2416 /* Keep looking for the adjusted *REF / VR pair. */
2417 return NULL;
2420 /* Bail out and stop walking. */
2421 return (void *)-1;
2424 /* Return a reference op vector from OP that can be used for
2425 vn_reference_lookup_pieces. The caller is responsible for releasing
2426 the vector. */
2428 vec<vn_reference_op_s>
2429 vn_reference_operands_for_lookup (tree op)
2431 bool valueized;
2432 return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
2435 /* Lookup a reference operation by it's parts, in the current hash table.
2436 Returns the resulting value number if it exists in the hash table,
2437 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2438 vn_reference_t stored in the hashtable if something is found. */
2440 tree
2441 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2442 vec<vn_reference_op_s> operands,
2443 vn_reference_t *vnresult, vn_lookup_kind kind)
2445 struct vn_reference_s vr1;
2446 vn_reference_t tmp;
2447 tree cst;
2449 if (!vnresult)
2450 vnresult = &tmp;
2451 *vnresult = NULL;
2453 vr1.vuse = vuse_ssa_val (vuse);
2454 shared_lookup_references.truncate (0);
2455 shared_lookup_references.safe_grow (operands.length ());
2456 memcpy (shared_lookup_references.address (),
2457 operands.address (),
2458 sizeof (vn_reference_op_s)
2459 * operands.length ());
2460 vr1.operands = operands = shared_lookup_references
2461 = valueize_refs (shared_lookup_references);
2462 vr1.type = type;
2463 vr1.set = set;
2464 vr1.hashcode = vn_reference_compute_hash (&vr1);
2465 if ((cst = fully_constant_vn_reference_p (&vr1)))
2466 return cst;
2468 vn_reference_lookup_1 (&vr1, vnresult);
2469 if (!*vnresult
2470 && kind != VN_NOWALK
2471 && vr1.vuse)
2473 ao_ref r;
2474 vn_walk_kind = kind;
2475 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2476 *vnresult =
2477 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2478 vn_reference_lookup_2,
2479 vn_reference_lookup_3,
2480 vuse_ssa_val, &vr1);
2481 gcc_checking_assert (vr1.operands == shared_lookup_references);
2484 if (*vnresult)
2485 return (*vnresult)->result;
2487 return NULL_TREE;
2490 /* Lookup OP in the current hash table, and return the resulting value
2491 number if it exists in the hash table. Return NULL_TREE if it does
2492 not exist in the hash table or if the result field of the structure
2493 was NULL.. VNRESULT will be filled in with the vn_reference_t
2494 stored in the hashtable if one exists. When TBAA_P is false assume
2495 we are looking up a store and treat it as having alias-set zero. */
2497 tree
2498 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2499 vn_reference_t *vnresult, bool tbaa_p)
2501 vec<vn_reference_op_s> operands;
2502 struct vn_reference_s vr1;
2503 tree cst;
2504 bool valuezied_anything;
2506 if (vnresult)
2507 *vnresult = NULL;
2509 vr1.vuse = vuse_ssa_val (vuse);
2510 vr1.operands = operands
2511 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2512 vr1.type = TREE_TYPE (op);
2513 vr1.set = tbaa_p ? get_alias_set (op) : 0;
2514 vr1.hashcode = vn_reference_compute_hash (&vr1);
2515 if ((cst = fully_constant_vn_reference_p (&vr1)))
2516 return cst;
2518 if (kind != VN_NOWALK
2519 && vr1.vuse)
2521 vn_reference_t wvnresult;
2522 ao_ref r;
2523 /* Make sure to use a valueized reference if we valueized anything.
2524 Otherwise preserve the full reference for advanced TBAA. */
2525 if (!valuezied_anything
2526 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2527 vr1.operands))
2528 ao_ref_init (&r, op);
2529 if (! tbaa_p)
2530 r.ref_alias_set = r.base_alias_set = 0;
2531 vn_walk_kind = kind;
2532 wvnresult =
2533 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2534 vn_reference_lookup_2,
2535 vn_reference_lookup_3,
2536 vuse_ssa_val, &vr1);
2537 gcc_checking_assert (vr1.operands == shared_lookup_references);
2538 if (wvnresult)
2540 if (vnresult)
2541 *vnresult = wvnresult;
2542 return wvnresult->result;
2545 return NULL_TREE;
2548 return vn_reference_lookup_1 (&vr1, vnresult);
2551 /* Lookup CALL in the current hash table and return the entry in
2552 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2554 void
2555 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2556 vn_reference_t vr)
2558 if (vnresult)
2559 *vnresult = NULL;
2561 tree vuse = gimple_vuse (call);
2563 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2564 vr->operands = valueize_shared_reference_ops_from_call (call);
2565 vr->type = gimple_expr_type (call);
2566 vr->set = 0;
2567 vr->hashcode = vn_reference_compute_hash (vr);
2568 vn_reference_lookup_1 (vr, vnresult);
2571 /* Insert OP into the current hash table with a value number of
2572 RESULT, and return the resulting reference structure we created. */
2574 static vn_reference_t
2575 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2577 vn_reference_s **slot;
2578 vn_reference_t vr1;
2579 bool tem;
2581 vr1 = current_info->references_pool->allocate ();
2582 if (TREE_CODE (result) == SSA_NAME)
2583 vr1->value_id = VN_INFO (result)->value_id;
2584 else
2585 vr1->value_id = get_or_alloc_constant_value_id (result);
2586 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2587 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2588 vr1->type = TREE_TYPE (op);
2589 vr1->set = get_alias_set (op);
2590 vr1->hashcode = vn_reference_compute_hash (vr1);
2591 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2592 vr1->result_vdef = vdef;
2594 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2595 INSERT);
2597 /* Because we lookup stores using vuses, and value number failures
2598 using the vdefs (see visit_reference_op_store for how and why),
2599 it's possible that on failure we may try to insert an already
2600 inserted store. This is not wrong, there is no ssa name for a
2601 store that we could use as a differentiator anyway. Thus, unlike
2602 the other lookup functions, you cannot gcc_assert (!*slot)
2603 here. */
2605 /* But free the old slot in case of a collision. */
2606 if (*slot)
2607 free_reference (*slot);
2609 *slot = vr1;
2610 return vr1;
2613 /* Insert a reference by it's pieces into the current hash table with
2614 a value number of RESULT. Return the resulting reference
2615 structure we created. */
2617 vn_reference_t
2618 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2619 vec<vn_reference_op_s> operands,
2620 tree result, unsigned int value_id)
2623 vn_reference_s **slot;
2624 vn_reference_t vr1;
2626 vr1 = current_info->references_pool->allocate ();
2627 vr1->value_id = value_id;
2628 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2629 vr1->operands = valueize_refs (operands);
2630 vr1->type = type;
2631 vr1->set = set;
2632 vr1->hashcode = vn_reference_compute_hash (vr1);
2633 if (result && TREE_CODE (result) == SSA_NAME)
2634 result = SSA_VAL (result);
2635 vr1->result = result;
2637 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2638 INSERT);
2640 /* At this point we should have all the things inserted that we have
2641 seen before, and we should never try inserting something that
2642 already exists. */
2643 gcc_assert (!*slot);
2644 if (*slot)
2645 free_reference (*slot);
2647 *slot = vr1;
2648 return vr1;
2651 /* Compute and return the hash value for nary operation VBO1. */
2653 static hashval_t
2654 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2656 inchash::hash hstate;
2657 unsigned i;
2659 for (i = 0; i < vno1->length; ++i)
2660 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2661 vno1->op[i] = SSA_VAL (vno1->op[i]);
2663 if (((vno1->length == 2
2664 && commutative_tree_code (vno1->opcode))
2665 || (vno1->length == 3
2666 && commutative_ternary_tree_code (vno1->opcode)))
2667 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2668 std::swap (vno1->op[0], vno1->op[1]);
2669 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
2670 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2672 std::swap (vno1->op[0], vno1->op[1]);
2673 vno1->opcode = swap_tree_comparison (vno1->opcode);
2676 hstate.add_int (vno1->opcode);
2677 for (i = 0; i < vno1->length; ++i)
2678 inchash::add_expr (vno1->op[i], hstate);
2680 return hstate.end ();
2683 /* Compare nary operations VNO1 and VNO2 and return true if they are
2684 equivalent. */
2686 bool
2687 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2689 unsigned i;
2691 if (vno1->hashcode != vno2->hashcode)
2692 return false;
2694 if (vno1->length != vno2->length)
2695 return false;
2697 if (vno1->opcode != vno2->opcode
2698 || !types_compatible_p (vno1->type, vno2->type))
2699 return false;
2701 for (i = 0; i < vno1->length; ++i)
2702 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2703 return false;
2705 /* BIT_INSERT_EXPR has an implict operand as the type precision
2706 of op1. Need to check to make sure they are the same. */
2707 if (vno1->opcode == BIT_INSERT_EXPR
2708 && TREE_CODE (vno1->op[1]) == INTEGER_CST
2709 && TYPE_PRECISION (TREE_TYPE (vno1->op[1]))
2710 != TYPE_PRECISION (TREE_TYPE (vno2->op[1])))
2711 return false;
2713 return true;
2716 /* Initialize VNO from the pieces provided. */
2718 static void
2719 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2720 enum tree_code code, tree type, tree *ops)
2722 vno->opcode = code;
2723 vno->length = length;
2724 vno->type = type;
2725 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2728 /* Initialize VNO from OP. */
2730 static void
2731 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2733 unsigned i;
2735 vno->opcode = TREE_CODE (op);
2736 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2737 vno->type = TREE_TYPE (op);
2738 for (i = 0; i < vno->length; ++i)
2739 vno->op[i] = TREE_OPERAND (op, i);
2742 /* Return the number of operands for a vn_nary ops structure from STMT. */
2744 static unsigned int
2745 vn_nary_length_from_stmt (gimple *stmt)
2747 switch (gimple_assign_rhs_code (stmt))
2749 case REALPART_EXPR:
2750 case IMAGPART_EXPR:
2751 case VIEW_CONVERT_EXPR:
2752 return 1;
2754 case BIT_FIELD_REF:
2755 return 3;
2757 case CONSTRUCTOR:
2758 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2760 default:
2761 return gimple_num_ops (stmt) - 1;
2765 /* Initialize VNO from STMT. */
2767 static void
2768 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
2770 unsigned i;
2772 vno->opcode = gimple_assign_rhs_code (stmt);
2773 vno->type = gimple_expr_type (stmt);
2774 switch (vno->opcode)
2776 case REALPART_EXPR:
2777 case IMAGPART_EXPR:
2778 case VIEW_CONVERT_EXPR:
2779 vno->length = 1;
2780 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2781 break;
2783 case BIT_FIELD_REF:
2784 vno->length = 3;
2785 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2786 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2787 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2788 break;
2790 case CONSTRUCTOR:
2791 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2792 for (i = 0; i < vno->length; ++i)
2793 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2794 break;
2796 default:
2797 gcc_checking_assert (!gimple_assign_single_p (stmt));
2798 vno->length = gimple_num_ops (stmt) - 1;
2799 for (i = 0; i < vno->length; ++i)
2800 vno->op[i] = gimple_op (stmt, i + 1);
2804 /* Compute the hashcode for VNO and look for it in the hash table;
2805 return the resulting value number if it exists in the hash table.
2806 Return NULL_TREE if it does not exist in the hash table or if the
2807 result field of the operation is NULL. VNRESULT will contain the
2808 vn_nary_op_t from the hashtable if it exists. */
2810 static tree
2811 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2813 vn_nary_op_s **slot;
2815 if (vnresult)
2816 *vnresult = NULL;
2818 vno->hashcode = vn_nary_op_compute_hash (vno);
2819 slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2820 NO_INSERT);
2821 if (!slot && current_info == optimistic_info)
2822 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2823 NO_INSERT);
2824 if (!slot)
2825 return NULL_TREE;
2826 if (vnresult)
2827 *vnresult = *slot;
2828 return (*slot)->result;
2831 /* Lookup a n-ary operation by its pieces and return the resulting value
2832 number if it exists in the hash table. Return NULL_TREE if it does
2833 not exist in the hash table or if the result field of the operation
2834 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2835 if it exists. */
2837 tree
2838 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2839 tree type, tree *ops, vn_nary_op_t *vnresult)
2841 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2842 sizeof_vn_nary_op (length));
2843 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2844 return vn_nary_op_lookup_1 (vno1, vnresult);
2847 /* Lookup OP in the current hash table, and return the resulting value
2848 number if it exists in the hash table. Return NULL_TREE if it does
2849 not exist in the hash table or if the result field of the operation
2850 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2851 if it exists. */
2853 tree
2854 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2856 vn_nary_op_t vno1
2857 = XALLOCAVAR (struct vn_nary_op_s,
2858 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2859 init_vn_nary_op_from_op (vno1, op);
2860 return vn_nary_op_lookup_1 (vno1, vnresult);
2863 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2864 value number if it exists in the hash table. Return NULL_TREE if
2865 it does not exist in the hash table. VNRESULT will contain the
2866 vn_nary_op_t from the hashtable if it exists. */
2868 tree
2869 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
2871 vn_nary_op_t vno1
2872 = XALLOCAVAR (struct vn_nary_op_s,
2873 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2874 init_vn_nary_op_from_stmt (vno1, stmt);
2875 return vn_nary_op_lookup_1 (vno1, vnresult);
2878 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2880 static vn_nary_op_t
2881 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2883 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2886 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2887 obstack. */
2889 static vn_nary_op_t
2890 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2892 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2893 &current_info->nary_obstack);
2895 vno1->value_id = value_id;
2896 vno1->length = length;
2897 vno1->result = result;
2899 return vno1;
2902 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2903 VNO->HASHCODE first. */
2905 static vn_nary_op_t
2906 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
2907 bool compute_hash)
2909 vn_nary_op_s **slot;
2911 if (compute_hash)
2912 vno->hashcode = vn_nary_op_compute_hash (vno);
2914 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
2915 /* While we do not want to insert things twice it's awkward to
2916 avoid it in the case where visit_nary_op pattern-matches stuff
2917 and ends up simplifying the replacement to itself. We then
2918 get two inserts, one from visit_nary_op and one from
2919 vn_nary_build_or_lookup.
2920 So allow inserts with the same value number. */
2921 if (*slot && (*slot)->result == vno->result)
2922 return *slot;
2924 gcc_assert (!*slot);
2926 *slot = vno;
2927 return vno;
2930 /* Insert a n-ary operation into the current hash table using it's
2931 pieces. Return the vn_nary_op_t structure we created and put in
2932 the hashtable. */
2934 vn_nary_op_t
2935 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2936 tree type, tree *ops,
2937 tree result, unsigned int value_id)
2939 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2940 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2941 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2944 /* Insert OP into the current hash table with a value number of
2945 RESULT. Return the vn_nary_op_t structure we created and put in
2946 the hashtable. */
2948 vn_nary_op_t
2949 vn_nary_op_insert (tree op, tree result)
2951 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2952 vn_nary_op_t vno1;
2954 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2955 init_vn_nary_op_from_op (vno1, op);
2956 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2959 /* Insert the rhs of STMT into the current hash table with a value number of
2960 RESULT. */
2962 static vn_nary_op_t
2963 vn_nary_op_insert_stmt (gimple *stmt, tree result)
2965 vn_nary_op_t vno1
2966 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2967 result, VN_INFO (result)->value_id);
2968 init_vn_nary_op_from_stmt (vno1, stmt);
2969 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2972 /* Compute a hashcode for PHI operation VP1 and return it. */
2974 static inline hashval_t
2975 vn_phi_compute_hash (vn_phi_t vp1)
2977 inchash::hash hstate (vp1->phiargs.length () > 2
2978 ? vp1->block->index : vp1->phiargs.length ());
2979 tree phi1op;
2980 tree type;
2981 edge e;
2982 edge_iterator ei;
2984 /* If all PHI arguments are constants we need to distinguish
2985 the PHI node via its type. */
2986 type = vp1->type;
2987 hstate.merge_hash (vn_hash_type (type));
2989 FOR_EACH_EDGE (e, ei, vp1->block->preds)
2991 /* Don't hash backedge values they need to be handled as VN_TOP
2992 for optimistic value-numbering. */
2993 if (e->flags & EDGE_DFS_BACK)
2994 continue;
2996 phi1op = vp1->phiargs[e->dest_idx];
2997 if (phi1op == VN_TOP)
2998 continue;
2999 inchash::add_expr (phi1op, hstate);
3002 return hstate.end ();
3006 /* Return true if COND1 and COND2 represent the same condition, set
3007 *INVERTED_P if one needs to be inverted to make it the same as
3008 the other. */
3010 static bool
3011 cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
3012 gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
3014 enum tree_code code1 = gimple_cond_code (cond1);
3015 enum tree_code code2 = gimple_cond_code (cond2);
3017 *inverted_p = false;
3018 if (code1 == code2)
3020 else if (code1 == swap_tree_comparison (code2))
3021 std::swap (lhs2, rhs2);
3022 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
3023 *inverted_p = true;
3024 else if (code1 == invert_tree_comparison
3025 (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
3027 std::swap (lhs2, rhs2);
3028 *inverted_p = true;
3030 else
3031 return false;
3033 return ((expressions_equal_p (lhs1, lhs2)
3034 && expressions_equal_p (rhs1, rhs2))
3035 || (commutative_tree_code (code1)
3036 && expressions_equal_p (lhs1, rhs2)
3037 && expressions_equal_p (rhs1, lhs2)));
3040 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
3042 static int
3043 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
3045 if (vp1->hashcode != vp2->hashcode)
3046 return false;
3048 if (vp1->block != vp2->block)
3050 if (vp1->phiargs.length () != vp2->phiargs.length ())
3051 return false;
3053 switch (vp1->phiargs.length ())
3055 case 1:
3056 /* Single-arg PHIs are just copies. */
3057 break;
3059 case 2:
3061 /* Rule out backedges into the PHI. */
3062 if (vp1->block->loop_father->header == vp1->block
3063 || vp2->block->loop_father->header == vp2->block)
3064 return false;
3066 /* If the PHI nodes do not have compatible types
3067 they are not the same. */
3068 if (!types_compatible_p (vp1->type, vp2->type))
3069 return false;
3071 basic_block idom1
3072 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3073 basic_block idom2
3074 = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
3075 /* If the immediate dominator end in switch stmts multiple
3076 values may end up in the same PHI arg via intermediate
3077 CFG merges. */
3078 if (EDGE_COUNT (idom1->succs) != 2
3079 || EDGE_COUNT (idom2->succs) != 2)
3080 return false;
3082 /* Verify the controlling stmt is the same. */
3083 gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1));
3084 gcond *last2 = safe_dyn_cast <gcond *> (last_stmt (idom2));
3085 if (! last1 || ! last2)
3086 return false;
3087 bool inverted_p;
3088 if (! cond_stmts_equal_p (last1, vp1->cclhs, vp1->ccrhs,
3089 last2, vp2->cclhs, vp2->ccrhs,
3090 &inverted_p))
3091 return false;
3093 /* Get at true/false controlled edges into the PHI. */
3094 edge te1, te2, fe1, fe2;
3095 if (! extract_true_false_controlled_edges (idom1, vp1->block,
3096 &te1, &fe1)
3097 || ! extract_true_false_controlled_edges (idom2, vp2->block,
3098 &te2, &fe2))
3099 return false;
3101 /* Swap edges if the second condition is the inverted of the
3102 first. */
3103 if (inverted_p)
3104 std::swap (te2, fe2);
3106 /* ??? Handle VN_TOP specially. */
3107 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
3108 vp2->phiargs[te2->dest_idx])
3109 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
3110 vp2->phiargs[fe2->dest_idx]))
3111 return false;
3113 return true;
3116 default:
3117 return false;
3121 /* If the PHI nodes do not have compatible types
3122 they are not the same. */
3123 if (!types_compatible_p (vp1->type, vp2->type))
3124 return false;
3126 /* Any phi in the same block will have it's arguments in the
3127 same edge order, because of how we store phi nodes. */
3128 int i;
3129 tree phi1op;
3130 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
3132 tree phi2op = vp2->phiargs[i];
3133 if (phi1op == VN_TOP || phi2op == VN_TOP)
3134 continue;
3135 if (!expressions_equal_p (phi1op, phi2op))
3136 return false;
3139 return true;
3142 static vec<tree> shared_lookup_phiargs;
3144 /* Lookup PHI in the current hash table, and return the resulting
3145 value number if it exists in the hash table. Return NULL_TREE if
3146 it does not exist in the hash table. */
3148 static tree
3149 vn_phi_lookup (gimple *phi)
3151 vn_phi_s **slot;
3152 struct vn_phi_s vp1;
3153 edge e;
3154 edge_iterator ei;
3156 shared_lookup_phiargs.truncate (0);
3157 shared_lookup_phiargs.safe_grow (gimple_phi_num_args (phi));
3159 /* Canonicalize the SSA_NAME's to their value number. */
3160 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3162 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3163 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3164 shared_lookup_phiargs[e->dest_idx] = def;
3166 vp1.type = TREE_TYPE (gimple_phi_result (phi));
3167 vp1.phiargs = shared_lookup_phiargs;
3168 vp1.block = gimple_bb (phi);
3169 /* Extract values of the controlling condition. */
3170 vp1.cclhs = NULL_TREE;
3171 vp1.ccrhs = NULL_TREE;
3172 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1.block);
3173 if (EDGE_COUNT (idom1->succs) == 2)
3174 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3176 vp1.cclhs = vn_valueize (gimple_cond_lhs (last1));
3177 vp1.ccrhs = vn_valueize (gimple_cond_rhs (last1));
3179 vp1.hashcode = vn_phi_compute_hash (&vp1);
3180 slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3181 NO_INSERT);
3182 if (!slot && current_info == optimistic_info)
3183 slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3184 NO_INSERT);
3185 if (!slot)
3186 return NULL_TREE;
3187 return (*slot)->result;
3190 /* Insert PHI into the current hash table with a value number of
3191 RESULT. */
3193 static vn_phi_t
3194 vn_phi_insert (gimple *phi, tree result)
3196 vn_phi_s **slot;
3197 vn_phi_t vp1 = current_info->phis_pool->allocate ();
3198 vec<tree> args = vNULL;
3199 edge e;
3200 edge_iterator ei;
3202 args.safe_grow (gimple_phi_num_args (phi));
3204 /* Canonicalize the SSA_NAME's to their value number. */
3205 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3207 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3208 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3209 args[e->dest_idx] = def;
3211 vp1->value_id = VN_INFO (result)->value_id;
3212 vp1->type = TREE_TYPE (gimple_phi_result (phi));
3213 vp1->phiargs = args;
3214 vp1->block = gimple_bb (phi);
3215 /* Extract values of the controlling condition. */
3216 vp1->cclhs = NULL_TREE;
3217 vp1->ccrhs = NULL_TREE;
3218 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3219 if (EDGE_COUNT (idom1->succs) == 2)
3220 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3222 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
3223 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
3225 vp1->result = result;
3226 vp1->hashcode = vn_phi_compute_hash (vp1);
3228 slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
3230 /* Because we iterate over phi operations more than once, it's
3231 possible the slot might already exist here, hence no assert.*/
3232 *slot = vp1;
3233 return vp1;
3237 /* Print set of components in strongly connected component SCC to OUT. */
3239 static void
3240 print_scc (FILE *out, vec<tree> scc)
3242 tree var;
3243 unsigned int i;
3245 fprintf (out, "SCC consists of %u:", scc.length ());
3246 FOR_EACH_VEC_ELT (scc, i, var)
3248 fprintf (out, " ");
3249 print_generic_expr (out, var);
3251 fprintf (out, "\n");
3254 /* Return true if BB1 is dominated by BB2 taking into account edges
3255 that are not executable. */
3257 static bool
3258 dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
3260 edge_iterator ei;
3261 edge e;
3263 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3264 return true;
3266 /* Before iterating we'd like to know if there exists a
3267 (executable) path from bb2 to bb1 at all, if not we can
3268 directly return false. For now simply iterate once. */
3270 /* Iterate to the single executable bb1 predecessor. */
3271 if (EDGE_COUNT (bb1->preds) > 1)
3273 edge prede = NULL;
3274 FOR_EACH_EDGE (e, ei, bb1->preds)
3275 if (e->flags & EDGE_EXECUTABLE)
3277 if (prede)
3279 prede = NULL;
3280 break;
3282 prede = e;
3284 if (prede)
3286 bb1 = prede->src;
3288 /* Re-do the dominance check with changed bb1. */
3289 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3290 return true;
3294 /* Iterate to the single executable bb2 successor. */
3295 edge succe = NULL;
3296 FOR_EACH_EDGE (e, ei, bb2->succs)
3297 if (e->flags & EDGE_EXECUTABLE)
3299 if (succe)
3301 succe = NULL;
3302 break;
3304 succe = e;
3306 if (succe)
3308 /* Verify the reached block is only reached through succe.
3309 If there is only one edge we can spare us the dominator
3310 check and iterate directly. */
3311 if (EDGE_COUNT (succe->dest->preds) > 1)
3313 FOR_EACH_EDGE (e, ei, succe->dest->preds)
3314 if (e != succe
3315 && (e->flags & EDGE_EXECUTABLE))
3317 succe = NULL;
3318 break;
3321 if (succe)
3323 bb2 = succe->dest;
3325 /* Re-do the dominance check with changed bb2. */
3326 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3327 return true;
3331 /* We could now iterate updating bb1 / bb2. */
3332 return false;
3335 /* Set the value number of FROM to TO, return true if it has changed
3336 as a result. */
3338 static inline bool
3339 set_ssa_val_to (tree from, tree to)
3341 tree currval = SSA_VAL (from);
3342 HOST_WIDE_INT toff, coff;
3344 /* The only thing we allow as value numbers are ssa_names
3345 and invariants. So assert that here. We don't allow VN_TOP
3346 as visiting a stmt should produce a value-number other than
3347 that.
3348 ??? Still VN_TOP can happen for unreachable code, so force
3349 it to varying in that case. Not all code is prepared to
3350 get VN_TOP on valueization. */
3351 if (to == VN_TOP)
3353 if (dump_file && (dump_flags & TDF_DETAILS))
3354 fprintf (dump_file, "Forcing value number to varying on "
3355 "receiving VN_TOP\n");
3356 to = from;
3359 gcc_assert (to != NULL_TREE
3360 && ((TREE_CODE (to) == SSA_NAME
3361 && (to == from || SSA_VAL (to) == to))
3362 || is_gimple_min_invariant (to)));
3364 if (from != to)
3366 if (currval == from)
3368 if (dump_file && (dump_flags & TDF_DETAILS))
3370 fprintf (dump_file, "Not changing value number of ");
3371 print_generic_expr (dump_file, from);
3372 fprintf (dump_file, " from VARYING to ");
3373 print_generic_expr (dump_file, to);
3374 fprintf (dump_file, "\n");
3376 return false;
3378 else if (currval != VN_TOP
3379 && ! is_gimple_min_invariant (currval)
3380 && is_gimple_min_invariant (to))
3382 if (dump_file && (dump_flags & TDF_DETAILS))
3384 fprintf (dump_file, "Forcing VARYING instead of changing "
3385 "value number of ");
3386 print_generic_expr (dump_file, from);
3387 fprintf (dump_file, " from ");
3388 print_generic_expr (dump_file, currval);
3389 fprintf (dump_file, " (non-constant) to ");
3390 print_generic_expr (dump_file, to);
3391 fprintf (dump_file, " (constant)\n");
3393 to = from;
3395 else if (TREE_CODE (to) == SSA_NAME
3396 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
3397 to = from;
3400 if (dump_file && (dump_flags & TDF_DETAILS))
3402 fprintf (dump_file, "Setting value number of ");
3403 print_generic_expr (dump_file, from);
3404 fprintf (dump_file, " to ");
3405 print_generic_expr (dump_file, to);
3408 if (currval != to
3409 && !operand_equal_p (currval, to, 0)
3410 /* Different undefined SSA names are not actually different. See
3411 PR82320 for a testcase were we'd otherwise not terminate iteration. */
3412 && !(TREE_CODE (currval) == SSA_NAME
3413 && TREE_CODE (to) == SSA_NAME
3414 && ssa_undefined_value_p (currval, false)
3415 && ssa_undefined_value_p (to, false))
3416 /* ??? For addresses involving volatile objects or types operand_equal_p
3417 does not reliably detect ADDR_EXPRs as equal. We know we are only
3418 getting invariant gimple addresses here, so can use
3419 get_addr_base_and_unit_offset to do this comparison. */
3420 && !(TREE_CODE (currval) == ADDR_EXPR
3421 && TREE_CODE (to) == ADDR_EXPR
3422 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
3423 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
3424 && coff == toff))
3426 if (dump_file && (dump_flags & TDF_DETAILS))
3427 fprintf (dump_file, " (changed)\n");
3429 /* If we equate two SSA names we have to make the side-band info
3430 of the leader conservative (and remember whatever original value
3431 was present). */
3432 if (TREE_CODE (to) == SSA_NAME)
3434 if (INTEGRAL_TYPE_P (TREE_TYPE (to))
3435 && SSA_NAME_RANGE_INFO (to))
3437 if (SSA_NAME_IS_DEFAULT_DEF (to)
3438 || dominated_by_p_w_unex
3439 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3440 gimple_bb (SSA_NAME_DEF_STMT (to))))
3441 /* Keep the info from the dominator. */
3443 else
3445 /* Save old info. */
3446 if (! VN_INFO (to)->info.range_info)
3448 VN_INFO (to)->info.range_info = SSA_NAME_RANGE_INFO (to);
3449 VN_INFO (to)->range_info_anti_range_p
3450 = SSA_NAME_ANTI_RANGE_P (to);
3452 /* Rather than allocating memory and unioning the info
3453 just clear it. */
3454 if (dump_file && (dump_flags & TDF_DETAILS))
3456 fprintf (dump_file, "clearing range info of ");
3457 print_generic_expr (dump_file, to);
3458 fprintf (dump_file, "\n");
3460 SSA_NAME_RANGE_INFO (to) = NULL;
3463 else if (POINTER_TYPE_P (TREE_TYPE (to))
3464 && SSA_NAME_PTR_INFO (to))
3466 if (SSA_NAME_IS_DEFAULT_DEF (to)
3467 || dominated_by_p_w_unex
3468 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3469 gimple_bb (SSA_NAME_DEF_STMT (to))))
3470 /* Keep the info from the dominator. */
3472 else if (! SSA_NAME_PTR_INFO (from)
3473 /* Handle the case of trivially equivalent info. */
3474 || memcmp (SSA_NAME_PTR_INFO (to),
3475 SSA_NAME_PTR_INFO (from),
3476 sizeof (ptr_info_def)) != 0)
3478 /* Save old info. */
3479 if (! VN_INFO (to)->info.ptr_info)
3480 VN_INFO (to)->info.ptr_info = SSA_NAME_PTR_INFO (to);
3481 /* Rather than allocating memory and unioning the info
3482 just clear it. */
3483 if (dump_file && (dump_flags & TDF_DETAILS))
3485 fprintf (dump_file, "clearing points-to info of ");
3486 print_generic_expr (dump_file, to);
3487 fprintf (dump_file, "\n");
3489 SSA_NAME_PTR_INFO (to) = NULL;
3494 VN_INFO (from)->valnum = to;
3495 return true;
3497 if (dump_file && (dump_flags & TDF_DETAILS))
3498 fprintf (dump_file, "\n");
3499 return false;
3502 /* Mark as processed all the definitions in the defining stmt of USE, or
3503 the USE itself. */
3505 static void
3506 mark_use_processed (tree use)
3508 ssa_op_iter iter;
3509 def_operand_p defp;
3510 gimple *stmt = SSA_NAME_DEF_STMT (use);
3512 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
3514 VN_INFO (use)->use_processed = true;
3515 return;
3518 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3520 tree def = DEF_FROM_PTR (defp);
3522 VN_INFO (def)->use_processed = true;
3526 /* Set all definitions in STMT to value number to themselves.
3527 Return true if a value number changed. */
3529 static bool
3530 defs_to_varying (gimple *stmt)
3532 bool changed = false;
3533 ssa_op_iter iter;
3534 def_operand_p defp;
3536 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3538 tree def = DEF_FROM_PTR (defp);
3539 changed |= set_ssa_val_to (def, def);
3541 return changed;
3544 /* Visit a copy between LHS and RHS, return true if the value number
3545 changed. */
3547 static bool
3548 visit_copy (tree lhs, tree rhs)
3550 /* Valueize. */
3551 rhs = SSA_VAL (rhs);
3553 return set_ssa_val_to (lhs, rhs);
3556 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
3557 is the same. */
3559 static tree
3560 valueized_wider_op (tree wide_type, tree op)
3562 if (TREE_CODE (op) == SSA_NAME)
3563 op = SSA_VAL (op);
3565 /* Either we have the op widened available. */
3566 tree ops[3] = {};
3567 ops[0] = op;
3568 tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
3569 wide_type, ops, NULL);
3570 if (tem)
3571 return tem;
3573 /* Or the op is truncated from some existing value. */
3574 if (TREE_CODE (op) == SSA_NAME)
3576 gimple *def = SSA_NAME_DEF_STMT (op);
3577 if (is_gimple_assign (def)
3578 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3580 tem = gimple_assign_rhs1 (def);
3581 if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)))
3583 if (TREE_CODE (tem) == SSA_NAME)
3584 tem = SSA_VAL (tem);
3585 return tem;
3590 /* For constants simply extend it. */
3591 if (TREE_CODE (op) == INTEGER_CST)
3592 return wide_int_to_tree (wide_type, wi::to_wide (op));
3594 return NULL_TREE;
3597 /* Visit a nary operator RHS, value number it, and return true if the
3598 value number of LHS has changed as a result. */
3600 static bool
3601 visit_nary_op (tree lhs, gassign *stmt)
3603 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
3604 if (result)
3605 return set_ssa_val_to (lhs, result);
3607 /* Do some special pattern matching for redundancies of operations
3608 in different types. */
3609 enum tree_code code = gimple_assign_rhs_code (stmt);
3610 tree type = TREE_TYPE (lhs);
3611 tree rhs1 = gimple_assign_rhs1 (stmt);
3612 switch (code)
3614 CASE_CONVERT:
3615 /* Match arithmetic done in a different type where we can easily
3616 substitute the result from some earlier sign-changed or widened
3617 operation. */
3618 if (INTEGRAL_TYPE_P (type)
3619 && TREE_CODE (rhs1) == SSA_NAME
3620 /* We only handle sign-changes or zero-extension -> & mask. */
3621 && ((TYPE_UNSIGNED (TREE_TYPE (rhs1))
3622 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs1)))
3623 || TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (rhs1))))
3625 gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
3626 if (def
3627 && (gimple_assign_rhs_code (def) == PLUS_EXPR
3628 || gimple_assign_rhs_code (def) == MINUS_EXPR
3629 || gimple_assign_rhs_code (def) == MULT_EXPR))
3631 tree ops[3] = {};
3632 /* Either we have the op widened available. */
3633 ops[0] = valueized_wider_op (type,
3634 gimple_assign_rhs1 (def));
3635 if (ops[0])
3636 ops[1] = valueized_wider_op (type,
3637 gimple_assign_rhs2 (def));
3638 if (ops[0] && ops[1])
3640 ops[0] = vn_nary_op_lookup_pieces
3641 (2, gimple_assign_rhs_code (def), type, ops, NULL);
3642 /* We have wider operation available. */
3643 if (ops[0])
3645 unsigned lhs_prec = TYPE_PRECISION (type);
3646 unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1));
3647 if (lhs_prec == rhs_prec)
3649 ops[1] = NULL_TREE;
3650 result = vn_nary_build_or_lookup (NOP_EXPR,
3651 type, ops);
3652 if (result)
3654 bool changed = set_ssa_val_to (lhs, result);
3655 vn_nary_op_insert_stmt (stmt, result);
3656 return changed;
3659 else
3661 ops[1] = wide_int_to_tree (type,
3662 wi::mask (rhs_prec, false,
3663 lhs_prec));
3664 result = vn_nary_build_or_lookup (BIT_AND_EXPR,
3665 TREE_TYPE (lhs),
3666 ops);
3667 if (result)
3669 bool changed = set_ssa_val_to (lhs, result);
3670 vn_nary_op_insert_stmt (stmt, result);
3671 return changed;
3678 default:;
3681 bool changed = set_ssa_val_to (lhs, lhs);
3682 vn_nary_op_insert_stmt (stmt, lhs);
3683 return changed;
3686 /* Visit a call STMT storing into LHS. Return true if the value number
3687 of the LHS has changed as a result. */
3689 static bool
3690 visit_reference_op_call (tree lhs, gcall *stmt)
3692 bool changed = false;
3693 struct vn_reference_s vr1;
3694 vn_reference_t vnresult = NULL;
3695 tree vdef = gimple_vdef (stmt);
3697 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
3698 if (lhs && TREE_CODE (lhs) != SSA_NAME)
3699 lhs = NULL_TREE;
3701 vn_reference_lookup_call (stmt, &vnresult, &vr1);
3702 if (vnresult)
3704 if (vnresult->result_vdef && vdef)
3705 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
3706 else if (vdef)
3707 /* If the call was discovered to be pure or const reflect
3708 that as far as possible. */
3709 changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt)));
3711 if (!vnresult->result && lhs)
3712 vnresult->result = lhs;
3714 if (vnresult->result && lhs)
3715 changed |= set_ssa_val_to (lhs, vnresult->result);
3717 else
3719 vn_reference_t vr2;
3720 vn_reference_s **slot;
3721 tree vdef_val = vdef;
3722 if (vdef)
3724 /* If we value numbered an indirect functions function to
3725 one not clobbering memory value number its VDEF to its
3726 VUSE. */
3727 tree fn = gimple_call_fn (stmt);
3728 if (fn && TREE_CODE (fn) == SSA_NAME)
3730 fn = SSA_VAL (fn);
3731 if (TREE_CODE (fn) == ADDR_EXPR
3732 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
3733 && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
3734 & (ECF_CONST | ECF_PURE)))
3735 vdef_val = vuse_ssa_val (gimple_vuse (stmt));
3737 changed |= set_ssa_val_to (vdef, vdef_val);
3739 if (lhs)
3740 changed |= set_ssa_val_to (lhs, lhs);
3741 vr2 = current_info->references_pool->allocate ();
3742 vr2->vuse = vr1.vuse;
3743 /* As we are not walking the virtual operand chain we know the
3744 shared_lookup_references are still original so we can re-use
3745 them here. */
3746 vr2->operands = vr1.operands.copy ();
3747 vr2->type = vr1.type;
3748 vr2->set = vr1.set;
3749 vr2->hashcode = vr1.hashcode;
3750 vr2->result = lhs;
3751 vr2->result_vdef = vdef_val;
3752 slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3753 INSERT);
3754 gcc_assert (!*slot);
3755 *slot = vr2;
3758 return changed;
3761 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3762 and return true if the value number of the LHS has changed as a result. */
3764 static bool
3765 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
3767 bool changed = false;
3768 tree last_vuse;
3769 tree result;
3771 last_vuse = gimple_vuse (stmt);
3772 last_vuse_ptr = &last_vuse;
3773 result = vn_reference_lookup (op, gimple_vuse (stmt),
3774 default_vn_walk_kind, NULL, true);
3775 last_vuse_ptr = NULL;
3777 /* We handle type-punning through unions by value-numbering based
3778 on offset and size of the access. Be prepared to handle a
3779 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3780 if (result
3781 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3783 /* We will be setting the value number of lhs to the value number
3784 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3785 So first simplify and lookup this expression to see if it
3786 is already available. */
3787 code_helper rcode = VIEW_CONVERT_EXPR;
3788 tree ops[3] = { result };
3789 result = vn_nary_build_or_lookup (rcode, TREE_TYPE (op), ops);
3792 if (result)
3793 changed = set_ssa_val_to (lhs, result);
3794 else
3796 changed = set_ssa_val_to (lhs, lhs);
3797 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
3800 return changed;
3804 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3805 and return true if the value number of the LHS has changed as a result. */
3807 static bool
3808 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
3810 bool changed = false;
3811 vn_reference_t vnresult = NULL;
3812 tree assign;
3813 bool resultsame = false;
3814 tree vuse = gimple_vuse (stmt);
3815 tree vdef = gimple_vdef (stmt);
3817 if (TREE_CODE (op) == SSA_NAME)
3818 op = SSA_VAL (op);
3820 /* First we want to lookup using the *vuses* from the store and see
3821 if there the last store to this location with the same address
3822 had the same value.
3824 The vuses represent the memory state before the store. If the
3825 memory state, address, and value of the store is the same as the
3826 last store to this location, then this store will produce the
3827 same memory state as that store.
3829 In this case the vdef versions for this store are value numbered to those
3830 vuse versions, since they represent the same memory state after
3831 this store.
3833 Otherwise, the vdefs for the store are used when inserting into
3834 the table, since the store generates a new memory state. */
3836 vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
3837 if (vnresult
3838 && vnresult->result)
3840 tree result = vnresult->result;
3841 if (TREE_CODE (result) == SSA_NAME)
3842 result = SSA_VAL (result);
3843 resultsame = expressions_equal_p (result, op);
3844 if (resultsame)
3846 /* If the TBAA state isn't compatible for downstream reads
3847 we cannot value-number the VDEFs the same. */
3848 alias_set_type set = get_alias_set (lhs);
3849 if (vnresult->set != set
3850 && ! alias_set_subset_of (set, vnresult->set))
3851 resultsame = false;
3855 if (!resultsame)
3857 /* Only perform the following when being called from PRE
3858 which embeds tail merging. */
3859 if (default_vn_walk_kind == VN_WALK)
3861 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3862 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
3863 if (vnresult)
3865 VN_INFO (vdef)->use_processed = true;
3866 return set_ssa_val_to (vdef, vnresult->result_vdef);
3870 if (dump_file && (dump_flags & TDF_DETAILS))
3872 fprintf (dump_file, "No store match\n");
3873 fprintf (dump_file, "Value numbering store ");
3874 print_generic_expr (dump_file, lhs);
3875 fprintf (dump_file, " to ");
3876 print_generic_expr (dump_file, op);
3877 fprintf (dump_file, "\n");
3879 /* Have to set value numbers before insert, since insert is
3880 going to valueize the references in-place. */
3881 if (vdef)
3882 changed |= set_ssa_val_to (vdef, vdef);
3884 /* Do not insert structure copies into the tables. */
3885 if (is_gimple_min_invariant (op)
3886 || is_gimple_reg (op))
3887 vn_reference_insert (lhs, op, vdef, NULL);
3889 /* Only perform the following when being called from PRE
3890 which embeds tail merging. */
3891 if (default_vn_walk_kind == VN_WALK)
3893 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3894 vn_reference_insert (assign, lhs, vuse, vdef);
3897 else
3899 /* We had a match, so value number the vdef to have the value
3900 number of the vuse it came from. */
3902 if (dump_file && (dump_flags & TDF_DETAILS))
3903 fprintf (dump_file, "Store matched earlier value, "
3904 "value numbering store vdefs to matching vuses.\n");
3906 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3909 return changed;
3912 /* Visit and value number PHI, return true if the value number
3913 changed. */
3915 static bool
3916 visit_phi (gimple *phi)
3918 tree result, sameval = VN_TOP, seen_undef = NULL_TREE;
3919 unsigned n_executable = 0;
3920 bool allsame = true;
3921 edge_iterator ei;
3922 edge e;
3924 /* TODO: We could check for this in init_sccvn, and replace this
3925 with a gcc_assert. */
3926 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3927 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3929 /* See if all non-TOP arguments have the same value. TOP is
3930 equivalent to everything, so we can ignore it. */
3931 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3932 if (e->flags & EDGE_EXECUTABLE)
3934 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3936 ++n_executable;
3937 if (TREE_CODE (def) == SSA_NAME)
3938 def = SSA_VAL (def);
3939 if (def == VN_TOP)
3941 /* Ignore undefined defs for sameval but record one. */
3942 else if (TREE_CODE (def) == SSA_NAME
3943 && ssa_undefined_value_p (def, false))
3944 seen_undef = def;
3945 else if (sameval == VN_TOP)
3946 sameval = def;
3947 else if (!expressions_equal_p (def, sameval))
3949 allsame = false;
3950 break;
3955 /* If none of the edges was executable keep the value-number at VN_TOP,
3956 if only a single edge is exectuable use its value. */
3957 if (n_executable <= 1)
3958 result = seen_undef ? seen_undef : sameval;
3959 /* If we saw only undefined values and VN_TOP use one of the
3960 undefined values. */
3961 else if (sameval == VN_TOP)
3962 result = seen_undef ? seen_undef : sameval;
3963 /* First see if it is equivalent to a phi node in this block. We prefer
3964 this as it allows IV elimination - see PRs 66502 and 67167. */
3965 else if ((result = vn_phi_lookup (phi)))
3967 /* If all values are the same use that, unless we've seen undefined
3968 values as well and the value isn't constant.
3969 CCP/copyprop have the same restriction to not remove uninit warnings. */
3970 else if (allsame
3971 && (! seen_undef || is_gimple_min_invariant (sameval)))
3972 result = sameval;
3973 else
3975 result = PHI_RESULT (phi);
3976 /* Only insert PHIs that are varying, for constant value numbers
3977 we mess up equivalences otherwise as we are only comparing
3978 the immediate controlling predicates. */
3979 vn_phi_insert (phi, result);
3982 return set_ssa_val_to (PHI_RESULT (phi), result);
3985 /* Try to simplify RHS using equivalences and constant folding. */
3987 static tree
3988 try_to_simplify (gassign *stmt)
3990 enum tree_code code = gimple_assign_rhs_code (stmt);
3991 tree tem;
3993 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3994 in this case, there is no point in doing extra work. */
3995 if (code == SSA_NAME)
3996 return NULL_TREE;
3998 /* First try constant folding based on our current lattice. */
3999 mprts_hook = vn_lookup_simplify_result;
4000 mprts_hook_cnt = 9;
4001 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
4002 mprts_hook = NULL;
4003 if (tem
4004 && (TREE_CODE (tem) == SSA_NAME
4005 || is_gimple_min_invariant (tem)))
4006 return tem;
4008 return NULL_TREE;
4011 /* Visit and value number USE, return true if the value number
4012 changed. */
4014 static bool
4015 visit_use (tree use)
4017 bool changed = false;
4018 gimple *stmt = SSA_NAME_DEF_STMT (use);
4020 mark_use_processed (use);
4022 gcc_assert (!SSA_NAME_IN_FREE_LIST (use)
4023 && !SSA_NAME_IS_DEFAULT_DEF (use));
4025 if (dump_file && (dump_flags & TDF_DETAILS))
4027 fprintf (dump_file, "Value numbering ");
4028 print_generic_expr (dump_file, use);
4029 fprintf (dump_file, " stmt = ");
4030 print_gimple_stmt (dump_file, stmt, 0);
4033 if (gimple_code (stmt) == GIMPLE_PHI)
4034 changed = visit_phi (stmt);
4035 else if (gimple_has_volatile_ops (stmt))
4036 changed = defs_to_varying (stmt);
4037 else if (gassign *ass = dyn_cast <gassign *> (stmt))
4039 enum tree_code code = gimple_assign_rhs_code (ass);
4040 tree lhs = gimple_assign_lhs (ass);
4041 tree rhs1 = gimple_assign_rhs1 (ass);
4042 tree simplified;
4044 /* Shortcut for copies. Simplifying copies is pointless,
4045 since we copy the expression and value they represent. */
4046 if (code == SSA_NAME
4047 && TREE_CODE (lhs) == SSA_NAME)
4049 changed = visit_copy (lhs, rhs1);
4050 goto done;
4052 simplified = try_to_simplify (ass);
4053 if (simplified)
4055 if (dump_file && (dump_flags & TDF_DETAILS))
4057 fprintf (dump_file, "RHS ");
4058 print_gimple_expr (dump_file, ass, 0);
4059 fprintf (dump_file, " simplified to ");
4060 print_generic_expr (dump_file, simplified);
4061 fprintf (dump_file, "\n");
4064 /* Setting value numbers to constants will occasionally
4065 screw up phi congruence because constants are not
4066 uniquely associated with a single ssa name that can be
4067 looked up. */
4068 if (simplified
4069 && is_gimple_min_invariant (simplified)
4070 && TREE_CODE (lhs) == SSA_NAME)
4072 changed = set_ssa_val_to (lhs, simplified);
4073 goto done;
4075 else if (simplified
4076 && TREE_CODE (simplified) == SSA_NAME
4077 && TREE_CODE (lhs) == SSA_NAME)
4079 changed = visit_copy (lhs, simplified);
4080 goto done;
4083 if ((TREE_CODE (lhs) == SSA_NAME
4084 /* We can substitute SSA_NAMEs that are live over
4085 abnormal edges with their constant value. */
4086 && !(gimple_assign_copy_p (ass)
4087 && is_gimple_min_invariant (rhs1))
4088 && !(simplified
4089 && is_gimple_min_invariant (simplified))
4090 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4091 /* Stores or copies from SSA_NAMEs that are live over
4092 abnormal edges are a problem. */
4093 || (code == SSA_NAME
4094 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
4095 changed = defs_to_varying (ass);
4096 else if (REFERENCE_CLASS_P (lhs)
4097 || DECL_P (lhs))
4098 changed = visit_reference_op_store (lhs, rhs1, ass);
4099 else if (TREE_CODE (lhs) == SSA_NAME)
4101 if ((gimple_assign_copy_p (ass)
4102 && is_gimple_min_invariant (rhs1))
4103 || (simplified
4104 && is_gimple_min_invariant (simplified)))
4106 if (simplified)
4107 changed = set_ssa_val_to (lhs, simplified);
4108 else
4109 changed = set_ssa_val_to (lhs, rhs1);
4111 else
4113 /* Visit the original statement. */
4114 switch (vn_get_stmt_kind (ass))
4116 case VN_NARY:
4117 changed = visit_nary_op (lhs, ass);
4118 break;
4119 case VN_REFERENCE:
4120 changed = visit_reference_op_load (lhs, rhs1, ass);
4121 break;
4122 default:
4123 changed = defs_to_varying (ass);
4124 break;
4128 else
4129 changed = defs_to_varying (ass);
4131 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
4133 tree lhs = gimple_call_lhs (call_stmt);
4134 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4136 /* Try constant folding based on our current lattice. */
4137 tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
4138 vn_valueize);
4139 if (simplified)
4141 if (dump_file && (dump_flags & TDF_DETAILS))
4143 fprintf (dump_file, "call ");
4144 print_gimple_expr (dump_file, call_stmt, 0);
4145 fprintf (dump_file, " simplified to ");
4146 print_generic_expr (dump_file, simplified);
4147 fprintf (dump_file, "\n");
4150 /* Setting value numbers to constants will occasionally
4151 screw up phi congruence because constants are not
4152 uniquely associated with a single ssa name that can be
4153 looked up. */
4154 if (simplified
4155 && is_gimple_min_invariant (simplified))
4157 changed = set_ssa_val_to (lhs, simplified);
4158 if (gimple_vdef (call_stmt))
4159 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4160 SSA_VAL (gimple_vuse (call_stmt)));
4161 goto done;
4163 else if (simplified
4164 && TREE_CODE (simplified) == SSA_NAME)
4166 changed = visit_copy (lhs, simplified);
4167 if (gimple_vdef (call_stmt))
4168 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4169 SSA_VAL (gimple_vuse (call_stmt)));
4170 goto done;
4172 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4174 changed = defs_to_varying (call_stmt);
4175 goto done;
4179 /* Pick up flags from a devirtualization target. */
4180 tree fn = gimple_call_fn (stmt);
4181 int extra_fnflags = 0;
4182 if (fn && TREE_CODE (fn) == SSA_NAME)
4184 fn = SSA_VAL (fn);
4185 if (TREE_CODE (fn) == ADDR_EXPR
4186 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
4187 extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0));
4189 if (!gimple_call_internal_p (call_stmt)
4190 && (/* Calls to the same function with the same vuse
4191 and the same operands do not necessarily return the same
4192 value, unless they're pure or const. */
4193 ((gimple_call_flags (call_stmt) | extra_fnflags)
4194 & (ECF_PURE | ECF_CONST))
4195 /* If calls have a vdef, subsequent calls won't have
4196 the same incoming vuse. So, if 2 calls with vdef have the
4197 same vuse, we know they're not subsequent.
4198 We can value number 2 calls to the same function with the
4199 same vuse and the same operands which are not subsequent
4200 the same, because there is no code in the program that can
4201 compare the 2 values... */
4202 || (gimple_vdef (call_stmt)
4203 /* ... unless the call returns a pointer which does
4204 not alias with anything else. In which case the
4205 information that the values are distinct are encoded
4206 in the IL. */
4207 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
4208 /* Only perform the following when being called from PRE
4209 which embeds tail merging. */
4210 && default_vn_walk_kind == VN_WALK)))
4211 changed = visit_reference_op_call (lhs, call_stmt);
4212 else
4213 changed = defs_to_varying (call_stmt);
4215 else
4216 changed = defs_to_varying (stmt);
4217 done:
4218 return changed;
4221 /* Compare two operands by reverse postorder index */
4223 static int
4224 compare_ops (const void *pa, const void *pb)
4226 const tree opa = *((const tree *)pa);
4227 const tree opb = *((const tree *)pb);
4228 gimple *opstmta = SSA_NAME_DEF_STMT (opa);
4229 gimple *opstmtb = SSA_NAME_DEF_STMT (opb);
4230 basic_block bba;
4231 basic_block bbb;
4233 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
4234 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4235 else if (gimple_nop_p (opstmta))
4236 return -1;
4237 else if (gimple_nop_p (opstmtb))
4238 return 1;
4240 bba = gimple_bb (opstmta);
4241 bbb = gimple_bb (opstmtb);
4243 if (!bba && !bbb)
4244 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4245 else if (!bba)
4246 return -1;
4247 else if (!bbb)
4248 return 1;
4250 if (bba == bbb)
4252 if (gimple_code (opstmta) == GIMPLE_PHI
4253 && gimple_code (opstmtb) == GIMPLE_PHI)
4254 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4255 else if (gimple_code (opstmta) == GIMPLE_PHI)
4256 return -1;
4257 else if (gimple_code (opstmtb) == GIMPLE_PHI)
4258 return 1;
4259 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
4260 return gimple_uid (opstmta) - gimple_uid (opstmtb);
4261 else
4262 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4264 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
4267 /* Sort an array containing members of a strongly connected component
4268 SCC so that the members are ordered by RPO number.
4269 This means that when the sort is complete, iterating through the
4270 array will give you the members in RPO order. */
4272 static void
4273 sort_scc (vec<tree> scc)
4275 scc.qsort (compare_ops);
4278 /* Insert the no longer used nary ONARY to the hash INFO. */
4280 static void
4281 copy_nary (vn_nary_op_t onary, vn_tables_t info)
4283 size_t size = sizeof_vn_nary_op (onary->length);
4284 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
4285 &info->nary_obstack);
4286 memcpy (nary, onary, size);
4287 vn_nary_op_insert_into (nary, info->nary, false);
4290 /* Insert the no longer used phi OPHI to the hash INFO. */
4292 static void
4293 copy_phi (vn_phi_t ophi, vn_tables_t info)
4295 vn_phi_t phi = info->phis_pool->allocate ();
4296 vn_phi_s **slot;
4297 memcpy (phi, ophi, sizeof (*phi));
4298 ophi->phiargs.create (0);
4299 slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
4300 gcc_assert (!*slot);
4301 *slot = phi;
4304 /* Insert the no longer used reference OREF to the hash INFO. */
4306 static void
4307 copy_reference (vn_reference_t oref, vn_tables_t info)
4309 vn_reference_t ref;
4310 vn_reference_s **slot;
4311 ref = info->references_pool->allocate ();
4312 memcpy (ref, oref, sizeof (*ref));
4313 oref->operands.create (0);
4314 slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
4315 if (*slot)
4316 free_reference (*slot);
4317 *slot = ref;
4320 /* Process a strongly connected component in the SSA graph. */
4322 static void
4323 process_scc (vec<tree> scc)
4325 tree var;
4326 unsigned int i;
4327 unsigned int iterations = 0;
4328 bool changed = true;
4329 vn_nary_op_iterator_type hin;
4330 vn_phi_iterator_type hip;
4331 vn_reference_iterator_type hir;
4332 vn_nary_op_t nary;
4333 vn_phi_t phi;
4334 vn_reference_t ref;
4336 /* If the SCC has a single member, just visit it. */
4337 if (scc.length () == 1)
4339 tree use = scc[0];
4340 if (VN_INFO (use)->use_processed)
4341 return;
4342 /* We need to make sure it doesn't form a cycle itself, which can
4343 happen for self-referential PHI nodes. In that case we would
4344 end up inserting an expression with VN_TOP operands into the
4345 valid table which makes us derive bogus equivalences later.
4346 The cheapest way to check this is to assume it for all PHI nodes. */
4347 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
4348 /* Fallthru to iteration. */ ;
4349 else
4351 visit_use (use);
4352 return;
4356 if (dump_file && (dump_flags & TDF_DETAILS))
4357 print_scc (dump_file, scc);
4359 /* Iterate over the SCC with the optimistic table until it stops
4360 changing. */
4361 current_info = optimistic_info;
4362 while (changed)
4364 changed = false;
4365 iterations++;
4366 if (dump_file && (dump_flags & TDF_DETAILS))
4367 fprintf (dump_file, "Starting iteration %d\n", iterations);
4368 /* As we are value-numbering optimistically we have to
4369 clear the expression tables and the simplified expressions
4370 in each iteration until we converge. */
4371 optimistic_info->nary->empty ();
4372 optimistic_info->phis->empty ();
4373 optimistic_info->references->empty ();
4374 obstack_free (&optimistic_info->nary_obstack, NULL);
4375 gcc_obstack_init (&optimistic_info->nary_obstack);
4376 optimistic_info->phis_pool->release ();
4377 optimistic_info->references_pool->release ();
4378 FOR_EACH_VEC_ELT (scc, i, var)
4379 gcc_assert (!VN_INFO (var)->needs_insertion
4380 && VN_INFO (var)->expr == NULL);
4381 FOR_EACH_VEC_ELT (scc, i, var)
4382 changed |= visit_use (var);
4385 if (dump_file && (dump_flags & TDF_DETAILS))
4386 fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
4387 statistics_histogram_event (cfun, "SCC iterations", iterations);
4389 /* Finally, copy the contents of the no longer used optimistic
4390 table to the valid table. */
4391 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
4392 copy_nary (nary, valid_info);
4393 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
4394 copy_phi (phi, valid_info);
4395 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
4396 ref, vn_reference_t, hir)
4397 copy_reference (ref, valid_info);
4399 current_info = valid_info;
4403 /* Pop the components of the found SCC for NAME off the SCC stack
4404 and process them. Returns true if all went well, false if
4405 we run into resource limits. */
4407 static void
4408 extract_and_process_scc_for_name (tree name)
4410 auto_vec<tree> scc;
4411 tree x;
4413 /* Found an SCC, pop the components off the SCC stack and
4414 process them. */
4417 x = sccstack.pop ();
4419 VN_INFO (x)->on_sccstack = false;
4420 scc.safe_push (x);
4421 } while (x != name);
4423 /* Drop all defs in the SCC to varying in case a SCC turns out to be
4424 incredibly large.
4425 ??? Just switch to a non-optimistic mode that avoids any iteration. */
4426 if (scc.length () > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
4428 if (dump_file)
4430 print_scc (dump_file, scc);
4431 fprintf (dump_file, "WARNING: Giving up value-numbering SCC due to "
4432 "size %u exceeding %u\n", scc.length (),
4433 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
4435 tree var;
4436 unsigned i;
4437 FOR_EACH_VEC_ELT (scc, i, var)
4439 gimple *def = SSA_NAME_DEF_STMT (var);
4440 mark_use_processed (var);
4441 if (SSA_NAME_IS_DEFAULT_DEF (var)
4442 || gimple_code (def) == GIMPLE_PHI)
4443 set_ssa_val_to (var, var);
4444 else
4445 defs_to_varying (def);
4447 return;
4450 if (scc.length () > 1)
4451 sort_scc (scc);
4453 process_scc (scc);
4456 /* Depth first search on NAME to discover and process SCC's in the SSA
4457 graph.
4458 Execution of this algorithm relies on the fact that the SCC's are
4459 popped off the stack in topological order.
4460 Returns true if successful, false if we stopped processing SCC's due
4461 to resource constraints. */
4463 static void
4464 DFS (tree name)
4466 auto_vec<ssa_op_iter> itervec;
4467 auto_vec<tree> namevec;
4468 use_operand_p usep = NULL;
4469 gimple *defstmt;
4470 tree use;
4471 ssa_op_iter iter;
4473 start_over:
4474 /* SCC info */
4475 VN_INFO (name)->dfsnum = next_dfs_num++;
4476 VN_INFO (name)->visited = true;
4477 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
4479 sccstack.safe_push (name);
4480 VN_INFO (name)->on_sccstack = true;
4481 defstmt = SSA_NAME_DEF_STMT (name);
4483 /* Recursively DFS on our operands, looking for SCC's. */
4484 if (!gimple_nop_p (defstmt))
4486 /* Push a new iterator. */
4487 if (gphi *phi = dyn_cast <gphi *> (defstmt))
4488 usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
4489 else
4490 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
4492 else
4493 clear_and_done_ssa_iter (&iter);
4495 while (1)
4497 /* If we are done processing uses of a name, go up the stack
4498 of iterators and process SCCs as we found them. */
4499 if (op_iter_done (&iter))
4501 /* See if we found an SCC. */
4502 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
4503 extract_and_process_scc_for_name (name);
4505 /* Check if we are done. */
4506 if (namevec.is_empty ())
4507 return;
4509 /* Restore the last use walker and continue walking there. */
4510 use = name;
4511 name = namevec.pop ();
4512 memcpy (&iter, &itervec.last (),
4513 sizeof (ssa_op_iter));
4514 itervec.pop ();
4515 goto continue_walking;
4518 use = USE_FROM_PTR (usep);
4520 /* Since we handle phi nodes, we will sometimes get
4521 invariants in the use expression. */
4522 if (TREE_CODE (use) == SSA_NAME)
4524 if (! (VN_INFO (use)->visited))
4526 /* Recurse by pushing the current use walking state on
4527 the stack and starting over. */
4528 itervec.safe_push (iter);
4529 namevec.safe_push (name);
4530 name = use;
4531 goto start_over;
4533 continue_walking:
4534 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
4535 VN_INFO (use)->low);
4537 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
4538 && VN_INFO (use)->on_sccstack)
4540 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
4541 VN_INFO (name)->low);
4545 usep = op_iter_next_use (&iter);
4549 /* Allocate a value number table. */
4551 static void
4552 allocate_vn_table (vn_tables_t table)
4554 table->phis = new vn_phi_table_type (23);
4555 table->nary = new vn_nary_op_table_type (23);
4556 table->references = new vn_reference_table_type (23);
4558 gcc_obstack_init (&table->nary_obstack);
4559 table->phis_pool = new object_allocator<vn_phi_s> ("VN phis");
4560 table->references_pool = new object_allocator<vn_reference_s>
4561 ("VN references");
4564 /* Free a value number table. */
4566 static void
4567 free_vn_table (vn_tables_t table)
4569 delete table->phis;
4570 table->phis = NULL;
4571 delete table->nary;
4572 table->nary = NULL;
4573 delete table->references;
4574 table->references = NULL;
4575 obstack_free (&table->nary_obstack, NULL);
4576 delete table->phis_pool;
4577 delete table->references_pool;
4580 static void
4581 init_scc_vn (void)
4583 int j;
4584 int *rpo_numbers_temp;
4586 calculate_dominance_info (CDI_DOMINATORS);
4587 mark_dfs_back_edges ();
4589 sccstack.create (0);
4590 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
4592 constant_value_ids = BITMAP_ALLOC (NULL);
4594 next_dfs_num = 1;
4595 next_value_id = 1;
4597 vn_ssa_aux_table.create (num_ssa_names + 1);
4598 /* VEC_alloc doesn't actually grow it to the right size, it just
4599 preallocates the space to do so. */
4600 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
4601 gcc_obstack_init (&vn_ssa_aux_obstack);
4603 shared_lookup_phiargs.create (0);
4604 shared_lookup_references.create (0);
4605 rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
4606 rpo_numbers_temp =
4607 XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
4608 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4610 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4611 the i'th block in RPO order is bb. We want to map bb's to RPO
4612 numbers, so we need to rearrange this array. */
4613 for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
4614 rpo_numbers[rpo_numbers_temp[j]] = j;
4616 XDELETE (rpo_numbers_temp);
4618 VN_TOP = build_decl (UNKNOWN_LOCATION, VAR_DECL,
4619 get_identifier ("VN_TOP"), error_mark_node);
4621 renumber_gimple_stmt_uids ();
4623 /* Create the valid and optimistic value numbering tables. */
4624 valid_info = XCNEW (struct vn_tables_s);
4625 allocate_vn_table (valid_info);
4626 optimistic_info = XCNEW (struct vn_tables_s);
4627 allocate_vn_table (optimistic_info);
4628 current_info = valid_info;
4630 /* Create the VN_INFO structures, and initialize value numbers to
4631 TOP or VARYING for parameters. */
4632 size_t i;
4633 tree name;
4635 FOR_EACH_SSA_NAME (i, name, cfun)
4637 VN_INFO_GET (name)->valnum = VN_TOP;
4638 VN_INFO (name)->needs_insertion = false;
4639 VN_INFO (name)->expr = NULL;
4640 VN_INFO (name)->value_id = 0;
4642 if (!SSA_NAME_IS_DEFAULT_DEF (name))
4643 continue;
4645 switch (TREE_CODE (SSA_NAME_VAR (name)))
4647 case VAR_DECL:
4648 /* All undefined vars are VARYING. */
4649 VN_INFO (name)->valnum = name;
4650 VN_INFO (name)->visited = true;
4651 break;
4653 case PARM_DECL:
4654 /* Parameters are VARYING but we can record a condition
4655 if we know it is a non-NULL pointer. */
4656 VN_INFO (name)->visited = true;
4657 VN_INFO (name)->valnum = name;
4658 if (POINTER_TYPE_P (TREE_TYPE (name))
4659 && nonnull_arg_p (SSA_NAME_VAR (name)))
4661 tree ops[2];
4662 ops[0] = name;
4663 ops[1] = build_int_cst (TREE_TYPE (name), 0);
4664 vn_nary_op_insert_pieces (2, NE_EXPR, boolean_type_node, ops,
4665 boolean_true_node, 0);
4666 if (dump_file && (dump_flags & TDF_DETAILS))
4668 fprintf (dump_file, "Recording ");
4669 print_generic_expr (dump_file, name, TDF_SLIM);
4670 fprintf (dump_file, " != 0\n");
4673 break;
4675 case RESULT_DECL:
4676 /* If the result is passed by invisible reference the default
4677 def is initialized, otherwise it's uninitialized. Still
4678 undefined is varying. */
4679 VN_INFO (name)->visited = true;
4680 VN_INFO (name)->valnum = name;
4681 break;
4683 default:
4684 gcc_unreachable ();
4689 /* Restore SSA info that has been reset on value leaders. */
4691 void
4692 scc_vn_restore_ssa_info (void)
4694 unsigned i;
4695 tree name;
4697 FOR_EACH_SSA_NAME (i, name, cfun)
4699 if (has_VN_INFO (name))
4701 if (VN_INFO (name)->needs_insertion)
4703 else if (POINTER_TYPE_P (TREE_TYPE (name))
4704 && VN_INFO (name)->info.ptr_info)
4705 SSA_NAME_PTR_INFO (name) = VN_INFO (name)->info.ptr_info;
4706 else if (INTEGRAL_TYPE_P (TREE_TYPE (name))
4707 && VN_INFO (name)->info.range_info)
4709 SSA_NAME_RANGE_INFO (name) = VN_INFO (name)->info.range_info;
4710 SSA_NAME_ANTI_RANGE_P (name)
4711 = VN_INFO (name)->range_info_anti_range_p;
4717 void
4718 free_scc_vn (void)
4720 size_t i;
4721 tree name;
4723 delete constant_to_value_id;
4724 constant_to_value_id = NULL;
4725 BITMAP_FREE (constant_value_ids);
4726 shared_lookup_phiargs.release ();
4727 shared_lookup_references.release ();
4728 XDELETEVEC (rpo_numbers);
4730 FOR_EACH_SSA_NAME (i, name, cfun)
4732 if (has_VN_INFO (name)
4733 && VN_INFO (name)->needs_insertion)
4734 release_ssa_name (name);
4736 obstack_free (&vn_ssa_aux_obstack, NULL);
4737 vn_ssa_aux_table.release ();
4739 sccstack.release ();
4740 free_vn_table (valid_info);
4741 XDELETE (valid_info);
4742 free_vn_table (optimistic_info);
4743 XDELETE (optimistic_info);
4745 BITMAP_FREE (const_parms);
4748 /* Set *ID according to RESULT. */
4750 static void
4751 set_value_id_for_result (tree result, unsigned int *id)
4753 if (result && TREE_CODE (result) == SSA_NAME)
4754 *id = VN_INFO (result)->value_id;
4755 else if (result && is_gimple_min_invariant (result))
4756 *id = get_or_alloc_constant_value_id (result);
4757 else
4758 *id = get_next_value_id ();
4761 /* Set the value ids in the valid hash tables. */
4763 static void
4764 set_hashtable_value_ids (void)
4766 vn_nary_op_iterator_type hin;
4767 vn_phi_iterator_type hip;
4768 vn_reference_iterator_type hir;
4769 vn_nary_op_t vno;
4770 vn_reference_t vr;
4771 vn_phi_t vp;
4773 /* Now set the value ids of the things we had put in the hash
4774 table. */
4776 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4777 set_value_id_for_result (vno->result, &vno->value_id);
4779 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4780 set_value_id_for_result (vp->result, &vp->value_id);
4782 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4783 hir)
4784 set_value_id_for_result (vr->result, &vr->value_id);
4787 class sccvn_dom_walker : public dom_walker
4789 public:
4790 sccvn_dom_walker ()
4791 : dom_walker (CDI_DOMINATORS, true), cond_stack (0) {}
4793 virtual edge before_dom_children (basic_block);
4794 virtual void after_dom_children (basic_block);
4796 void record_cond (basic_block,
4797 enum tree_code code, tree lhs, tree rhs, bool value);
4798 void record_conds (basic_block,
4799 enum tree_code code, tree lhs, tree rhs, bool value);
4801 auto_vec<std::pair <basic_block, std::pair <vn_nary_op_t, vn_nary_op_t> > >
4802 cond_stack;
4805 /* Record a temporary condition for the BB and its dominated blocks. */
4807 void
4808 sccvn_dom_walker::record_cond (basic_block bb,
4809 enum tree_code code, tree lhs, tree rhs,
4810 bool value)
4812 tree ops[2] = { lhs, rhs };
4813 vn_nary_op_t old = NULL;
4814 if (vn_nary_op_lookup_pieces (2, code, boolean_type_node, ops, &old))
4815 current_info->nary->remove_elt_with_hash (old, old->hashcode);
4816 vn_nary_op_t cond
4817 = vn_nary_op_insert_pieces (2, code, boolean_type_node, ops,
4818 value
4819 ? boolean_true_node
4820 : boolean_false_node, 0);
4821 if (dump_file && (dump_flags & TDF_DETAILS))
4823 fprintf (dump_file, "Recording temporarily ");
4824 print_generic_expr (dump_file, ops[0], TDF_SLIM);
4825 fprintf (dump_file, " %s ", get_tree_code_name (code));
4826 print_generic_expr (dump_file, ops[1], TDF_SLIM);
4827 fprintf (dump_file, " == %s%s\n",
4828 value ? "true" : "false",
4829 old ? " (old entry saved)" : "");
4831 cond_stack.safe_push (std::make_pair (bb, std::make_pair (cond, old)));
4834 /* Record temporary conditions for the BB and its dominated blocks
4835 according to LHS CODE RHS == VALUE and its dominated conditions. */
4837 void
4838 sccvn_dom_walker::record_conds (basic_block bb,
4839 enum tree_code code, tree lhs, tree rhs,
4840 bool value)
4842 /* Record the original condition. */
4843 record_cond (bb, code, lhs, rhs, value);
4845 if (!value)
4846 return;
4848 /* Record dominated conditions if the condition is true. Note that
4849 the inversion is already recorded. */
4850 switch (code)
4852 case LT_EXPR:
4853 case GT_EXPR:
4854 record_cond (bb, code == LT_EXPR ? LE_EXPR : GE_EXPR, lhs, rhs, true);
4855 record_cond (bb, NE_EXPR, lhs, rhs, true);
4856 record_cond (bb, EQ_EXPR, lhs, rhs, false);
4857 break;
4859 case EQ_EXPR:
4860 record_cond (bb, LE_EXPR, lhs, rhs, true);
4861 record_cond (bb, GE_EXPR, lhs, rhs, true);
4862 record_cond (bb, LT_EXPR, lhs, rhs, false);
4863 record_cond (bb, GT_EXPR, lhs, rhs, false);
4864 break;
4866 default:
4867 break;
4871 /* Restore expressions and values derived from conditionals. */
4873 void
4874 sccvn_dom_walker::after_dom_children (basic_block bb)
4876 while (!cond_stack.is_empty ()
4877 && cond_stack.last ().first == bb)
4879 vn_nary_op_t cond = cond_stack.last ().second.first;
4880 vn_nary_op_t old = cond_stack.last ().second.second;
4881 current_info->nary->remove_elt_with_hash (cond, cond->hashcode);
4882 if (old)
4883 vn_nary_op_insert_into (old, current_info->nary, false);
4884 cond_stack.pop ();
4888 /* Value number all statements in BB. */
4890 edge
4891 sccvn_dom_walker::before_dom_children (basic_block bb)
4893 if (dump_file && (dump_flags & TDF_DETAILS))
4894 fprintf (dump_file, "Visiting BB %d\n", bb->index);
4896 /* If we have a single predecessor record the equivalence from a
4897 possible condition on the predecessor edge. */
4898 edge pred_e = single_pred_edge_ignoring_loop_edges (bb, false);
4899 if (pred_e)
4901 /* Check if there are multiple executable successor edges in
4902 the source block. Otherwise there is no additional info
4903 to be recorded. */
4904 edge_iterator ei;
4905 edge e2;
4906 FOR_EACH_EDGE (e2, ei, pred_e->src->succs)
4907 if (e2 != pred_e
4908 && e2->flags & EDGE_EXECUTABLE)
4909 break;
4910 if (e2 && (e2->flags & EDGE_EXECUTABLE))
4912 gimple *stmt = last_stmt (pred_e->src);
4913 if (stmt
4914 && gimple_code (stmt) == GIMPLE_COND)
4916 enum tree_code code = gimple_cond_code (stmt);
4917 tree lhs = gimple_cond_lhs (stmt);
4918 tree rhs = gimple_cond_rhs (stmt);
4919 record_conds (bb, code, lhs, rhs,
4920 (pred_e->flags & EDGE_TRUE_VALUE) != 0);
4921 code = invert_tree_comparison (code, HONOR_NANS (lhs));
4922 if (code != ERROR_MARK)
4923 record_conds (bb, code, lhs, rhs,
4924 (pred_e->flags & EDGE_TRUE_VALUE) == 0);
4929 /* Value-number all defs in the basic-block. */
4930 for (gphi_iterator gsi = gsi_start_phis (bb);
4931 !gsi_end_p (gsi); gsi_next (&gsi))
4933 gphi *phi = gsi.phi ();
4934 tree res = PHI_RESULT (phi);
4935 if (!VN_INFO (res)->visited)
4936 DFS (res);
4938 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
4939 !gsi_end_p (gsi); gsi_next (&gsi))
4941 ssa_op_iter i;
4942 tree op;
4943 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
4944 if (!VN_INFO (op)->visited)
4945 DFS (op);
4948 /* Finally look at the last stmt. */
4949 gimple *stmt = last_stmt (bb);
4950 if (!stmt)
4951 return NULL;
4953 enum gimple_code code = gimple_code (stmt);
4954 if (code != GIMPLE_COND
4955 && code != GIMPLE_SWITCH
4956 && code != GIMPLE_GOTO)
4957 return NULL;
4959 if (dump_file && (dump_flags & TDF_DETAILS))
4961 fprintf (dump_file, "Visiting control stmt ending BB %d: ", bb->index);
4962 print_gimple_stmt (dump_file, stmt, 0);
4965 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
4966 if value-numbering can prove they are not reachable. Handling
4967 computed gotos is also possible. */
4968 tree val;
4969 switch (code)
4971 case GIMPLE_COND:
4973 tree lhs = vn_valueize (gimple_cond_lhs (stmt));
4974 tree rhs = vn_valueize (gimple_cond_rhs (stmt));
4975 val = gimple_simplify (gimple_cond_code (stmt),
4976 boolean_type_node, lhs, rhs,
4977 NULL, vn_valueize);
4978 /* If that didn't simplify to a constant see if we have recorded
4979 temporary expressions from taken edges. */
4980 if (!val || TREE_CODE (val) != INTEGER_CST)
4982 tree ops[2];
4983 ops[0] = lhs;
4984 ops[1] = rhs;
4985 val = vn_nary_op_lookup_pieces (2, gimple_cond_code (stmt),
4986 boolean_type_node, ops, NULL);
4988 break;
4990 case GIMPLE_SWITCH:
4991 val = gimple_switch_index (as_a <gswitch *> (stmt));
4992 break;
4993 case GIMPLE_GOTO:
4994 val = gimple_goto_dest (stmt);
4995 break;
4996 default:
4997 gcc_unreachable ();
4999 if (!val)
5000 return NULL;
5002 edge taken = find_taken_edge (bb, vn_valueize (val));
5003 if (!taken)
5004 return NULL;
5006 if (dump_file && (dump_flags & TDF_DETAILS))
5007 fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
5008 "not executable\n", bb->index, bb->index, taken->dest->index);
5010 return taken;
5013 /* Do SCCVN. Returns true if it finished, false if we bailed out
5014 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
5015 how we use the alias oracle walking during the VN process. */
5017 void
5018 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
5020 size_t i;
5022 default_vn_walk_kind = default_vn_walk_kind_;
5024 init_scc_vn ();
5026 /* Collect pointers we know point to readonly memory. */
5027 const_parms = BITMAP_ALLOC (NULL);
5028 tree fnspec = lookup_attribute ("fn spec",
5029 TYPE_ATTRIBUTES (TREE_TYPE (cfun->decl)));
5030 if (fnspec)
5032 fnspec = TREE_VALUE (TREE_VALUE (fnspec));
5033 i = 1;
5034 for (tree arg = DECL_ARGUMENTS (cfun->decl);
5035 arg; arg = DECL_CHAIN (arg), ++i)
5037 if (i >= (unsigned) TREE_STRING_LENGTH (fnspec))
5038 break;
5039 if (TREE_STRING_POINTER (fnspec)[i] == 'R'
5040 || TREE_STRING_POINTER (fnspec)[i] == 'r')
5042 tree name = ssa_default_def (cfun, arg);
5043 if (name)
5044 bitmap_set_bit (const_parms, SSA_NAME_VERSION (name));
5049 /* Walk all blocks in dominator order, value-numbering stmts
5050 SSA defs and decide whether outgoing edges are not executable. */
5051 sccvn_dom_walker walker;
5052 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5054 /* Initialize the value ids and prune out remaining VN_TOPs
5055 from dead code. */
5056 tree name;
5057 FOR_EACH_SSA_NAME (i, name, cfun)
5059 vn_ssa_aux_t info = VN_INFO (name);
5060 if (!info->visited
5061 || info->valnum == VN_TOP)
5062 info->valnum = name;
5063 if (info->valnum == name)
5064 info->value_id = get_next_value_id ();
5065 else if (is_gimple_min_invariant (info->valnum))
5066 info->value_id = get_or_alloc_constant_value_id (info->valnum);
5069 /* Propagate. */
5070 FOR_EACH_SSA_NAME (i, name, cfun)
5072 vn_ssa_aux_t info = VN_INFO (name);
5073 if (TREE_CODE (info->valnum) == SSA_NAME
5074 && info->valnum != name
5075 && info->value_id != VN_INFO (info->valnum)->value_id)
5076 info->value_id = VN_INFO (info->valnum)->value_id;
5079 set_hashtable_value_ids ();
5081 if (dump_file && (dump_flags & TDF_DETAILS))
5083 fprintf (dump_file, "Value numbers:\n");
5084 FOR_EACH_SSA_NAME (i, name, cfun)
5086 if (VN_INFO (name)->visited
5087 && SSA_VAL (name) != name)
5089 print_generic_expr (dump_file, name);
5090 fprintf (dump_file, " = ");
5091 print_generic_expr (dump_file, SSA_VAL (name));
5092 fprintf (dump_file, "\n");
5098 /* Return the maximum value id we have ever seen. */
5100 unsigned int
5101 get_max_value_id (void)
5103 return next_value_id;
5106 /* Return the next unique value id. */
5108 unsigned int
5109 get_next_value_id (void)
5111 return next_value_id++;
5115 /* Compare two expressions E1 and E2 and return true if they are equal. */
5117 bool
5118 expressions_equal_p (tree e1, tree e2)
5120 /* The obvious case. */
5121 if (e1 == e2)
5122 return true;
5124 /* If either one is VN_TOP consider them equal. */
5125 if (e1 == VN_TOP || e2 == VN_TOP)
5126 return true;
5128 /* If only one of them is null, they cannot be equal. */
5129 if (!e1 || !e2)
5130 return false;
5132 /* Now perform the actual comparison. */
5133 if (TREE_CODE (e1) == TREE_CODE (e2)
5134 && operand_equal_p (e1, e2, OEP_PURE_SAME))
5135 return true;
5137 return false;
5141 /* Return true if the nary operation NARY may trap. This is a copy
5142 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
5144 bool
5145 vn_nary_may_trap (vn_nary_op_t nary)
5147 tree type;
5148 tree rhs2 = NULL_TREE;
5149 bool honor_nans = false;
5150 bool honor_snans = false;
5151 bool fp_operation = false;
5152 bool honor_trapv = false;
5153 bool handled, ret;
5154 unsigned i;
5156 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
5157 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
5158 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
5160 type = nary->type;
5161 fp_operation = FLOAT_TYPE_P (type);
5162 if (fp_operation)
5164 honor_nans = flag_trapping_math && !flag_finite_math_only;
5165 honor_snans = flag_signaling_nans != 0;
5167 else if (INTEGRAL_TYPE_P (type)
5168 && TYPE_OVERFLOW_TRAPS (type))
5169 honor_trapv = true;
5171 if (nary->length >= 2)
5172 rhs2 = nary->op[1];
5173 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
5174 honor_trapv,
5175 honor_nans, honor_snans, rhs2,
5176 &handled);
5177 if (handled
5178 && ret)
5179 return true;
5181 for (i = 0; i < nary->length; ++i)
5182 if (tree_could_trap_p (nary->op[i]))
5183 return true;
5185 return false;
5189 class eliminate_dom_walker : public dom_walker
5191 public:
5192 eliminate_dom_walker (cdi_direction, bitmap);
5193 ~eliminate_dom_walker ();
5195 virtual edge before_dom_children (basic_block);
5196 virtual void after_dom_children (basic_block);
5198 tree eliminate_avail (tree op);
5199 void eliminate_push_avail (tree op);
5200 tree eliminate_insert (gimple_stmt_iterator *gsi, tree val);
5202 bool do_pre;
5203 unsigned int el_todo;
5204 unsigned int eliminations;
5205 unsigned int insertions;
5207 /* SSA names that had their defs inserted by PRE if do_pre. */
5208 bitmap inserted_exprs;
5210 /* Blocks with statements that have had their EH properties changed. */
5211 bitmap need_eh_cleanup;
5213 /* Blocks with statements that have had their AB properties changed. */
5214 bitmap need_ab_cleanup;
5216 auto_vec<gimple *> to_remove;
5217 auto_vec<gimple *> to_fixup;
5218 auto_vec<tree> avail;
5219 auto_vec<tree> avail_stack;
5222 eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction,
5223 bitmap inserted_exprs_)
5224 : dom_walker (direction), do_pre (inserted_exprs_ != NULL),
5225 el_todo (0), eliminations (0), insertions (0),
5226 inserted_exprs (inserted_exprs_)
5228 need_eh_cleanup = BITMAP_ALLOC (NULL);
5229 need_ab_cleanup = BITMAP_ALLOC (NULL);
5232 eliminate_dom_walker::~eliminate_dom_walker ()
5234 BITMAP_FREE (need_eh_cleanup);
5235 BITMAP_FREE (need_ab_cleanup);
5238 /* Return a leader for OP that is available at the current point of the
5239 eliminate domwalk. */
5241 tree
5242 eliminate_dom_walker::eliminate_avail (tree op)
5244 tree valnum = VN_INFO (op)->valnum;
5245 if (TREE_CODE (valnum) == SSA_NAME)
5247 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
5248 return valnum;
5249 if (avail.length () > SSA_NAME_VERSION (valnum))
5250 return avail[SSA_NAME_VERSION (valnum)];
5252 else if (is_gimple_min_invariant (valnum))
5253 return valnum;
5254 return NULL_TREE;
5257 /* At the current point of the eliminate domwalk make OP available. */
5259 void
5260 eliminate_dom_walker::eliminate_push_avail (tree op)
5262 tree valnum = VN_INFO (op)->valnum;
5263 if (TREE_CODE (valnum) == SSA_NAME)
5265 if (avail.length () <= SSA_NAME_VERSION (valnum))
5266 avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1);
5267 tree pushop = op;
5268 if (avail[SSA_NAME_VERSION (valnum)])
5269 pushop = avail[SSA_NAME_VERSION (valnum)];
5270 avail_stack.safe_push (pushop);
5271 avail[SSA_NAME_VERSION (valnum)] = op;
5275 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
5276 the leader for the expression if insertion was successful. */
5278 tree
5279 eliminate_dom_walker::eliminate_insert (gimple_stmt_iterator *gsi, tree val)
5281 /* We can insert a sequence with a single assignment only. */
5282 gimple_seq stmts = VN_INFO (val)->expr;
5283 if (!gimple_seq_singleton_p (stmts))
5284 return NULL_TREE;
5285 gassign *stmt = dyn_cast <gassign *> (gimple_seq_first_stmt (stmts));
5286 if (!stmt
5287 || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
5288 && gimple_assign_rhs_code (stmt) != VIEW_CONVERT_EXPR
5289 && gimple_assign_rhs_code (stmt) != BIT_FIELD_REF
5290 && (gimple_assign_rhs_code (stmt) != BIT_AND_EXPR
5291 || TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)))
5292 return NULL_TREE;
5294 tree op = gimple_assign_rhs1 (stmt);
5295 if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR
5296 || gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5297 op = TREE_OPERAND (op, 0);
5298 tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (op) : op;
5299 if (!leader)
5300 return NULL_TREE;
5302 tree res;
5303 stmts = NULL;
5304 if (gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5305 res = gimple_build (&stmts, BIT_FIELD_REF,
5306 TREE_TYPE (val), leader,
5307 TREE_OPERAND (gimple_assign_rhs1 (stmt), 1),
5308 TREE_OPERAND (gimple_assign_rhs1 (stmt), 2));
5309 else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
5310 res = gimple_build (&stmts, BIT_AND_EXPR,
5311 TREE_TYPE (val), leader, gimple_assign_rhs2 (stmt));
5312 else
5313 res = gimple_build (&stmts, gimple_assign_rhs_code (stmt),
5314 TREE_TYPE (val), leader);
5315 if (TREE_CODE (res) != SSA_NAME
5316 || SSA_NAME_IS_DEFAULT_DEF (res)
5317 || gimple_bb (SSA_NAME_DEF_STMT (res)))
5319 gimple_seq_discard (stmts);
5321 /* During propagation we have to treat SSA info conservatively
5322 and thus we can end up simplifying the inserted expression
5323 at elimination time to sth not defined in stmts. */
5324 /* But then this is a redundancy we failed to detect. Which means
5325 res now has two values. That doesn't play well with how
5326 we track availability here, so give up. */
5327 if (dump_file && (dump_flags & TDF_DETAILS))
5329 if (TREE_CODE (res) == SSA_NAME)
5330 res = eliminate_avail (res);
5331 if (res)
5333 fprintf (dump_file, "Failed to insert expression for value ");
5334 print_generic_expr (dump_file, val);
5335 fprintf (dump_file, " which is really fully redundant to ");
5336 print_generic_expr (dump_file, res);
5337 fprintf (dump_file, "\n");
5341 return NULL_TREE;
5343 else
5345 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
5346 VN_INFO_GET (res)->valnum = val;
5349 insertions++;
5350 if (dump_file && (dump_flags & TDF_DETAILS))
5352 fprintf (dump_file, "Inserted ");
5353 print_gimple_stmt (dump_file, SSA_NAME_DEF_STMT (res), 0);
5356 return res;
5361 /* Perform elimination for the basic-block B during the domwalk. */
5363 edge
5364 eliminate_dom_walker::before_dom_children (basic_block b)
5366 /* Mark new bb. */
5367 avail_stack.safe_push (NULL_TREE);
5369 /* Skip unreachable blocks marked unreachable during the SCCVN domwalk. */
5370 edge_iterator ei;
5371 edge e;
5372 FOR_EACH_EDGE (e, ei, b->preds)
5373 if (e->flags & EDGE_EXECUTABLE)
5374 break;
5375 if (! e)
5376 return NULL;
5378 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
5380 gphi *phi = gsi.phi ();
5381 tree res = PHI_RESULT (phi);
5383 if (virtual_operand_p (res))
5385 gsi_next (&gsi);
5386 continue;
5389 tree sprime = eliminate_avail (res);
5390 if (sprime
5391 && sprime != res)
5393 if (dump_file && (dump_flags & TDF_DETAILS))
5395 fprintf (dump_file, "Replaced redundant PHI node defining ");
5396 print_generic_expr (dump_file, res);
5397 fprintf (dump_file, " with ");
5398 print_generic_expr (dump_file, sprime);
5399 fprintf (dump_file, "\n");
5402 /* If we inserted this PHI node ourself, it's not an elimination. */
5403 if (! inserted_exprs
5404 || ! bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
5405 eliminations++;
5407 /* If we will propagate into all uses don't bother to do
5408 anything. */
5409 if (may_propagate_copy (res, sprime))
5411 /* Mark the PHI for removal. */
5412 to_remove.safe_push (phi);
5413 gsi_next (&gsi);
5414 continue;
5417 remove_phi_node (&gsi, false);
5419 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
5420 sprime = fold_convert (TREE_TYPE (res), sprime);
5421 gimple *stmt = gimple_build_assign (res, sprime);
5422 gimple_stmt_iterator gsi2 = gsi_after_labels (b);
5423 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
5424 continue;
5427 eliminate_push_avail (res);
5428 gsi_next (&gsi);
5431 for (gimple_stmt_iterator gsi = gsi_start_bb (b);
5432 !gsi_end_p (gsi);
5433 gsi_next (&gsi))
5435 tree sprime = NULL_TREE;
5436 gimple *stmt = gsi_stmt (gsi);
5437 tree lhs = gimple_get_lhs (stmt);
5438 if (lhs && TREE_CODE (lhs) == SSA_NAME
5439 && !gimple_has_volatile_ops (stmt)
5440 /* See PR43491. Do not replace a global register variable when
5441 it is a the RHS of an assignment. Do replace local register
5442 variables since gcc does not guarantee a local variable will
5443 be allocated in register.
5444 ??? The fix isn't effective here. This should instead
5445 be ensured by not value-numbering them the same but treating
5446 them like volatiles? */
5447 && !(gimple_assign_single_p (stmt)
5448 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
5449 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))
5450 && is_global_var (gimple_assign_rhs1 (stmt)))))
5452 sprime = eliminate_avail (lhs);
5453 if (!sprime)
5455 /* If there is no existing usable leader but SCCVN thinks
5456 it has an expression it wants to use as replacement,
5457 insert that. */
5458 tree val = VN_INFO (lhs)->valnum;
5459 if (val != VN_TOP
5460 && TREE_CODE (val) == SSA_NAME
5461 && VN_INFO (val)->needs_insertion
5462 && VN_INFO (val)->expr != NULL
5463 && (sprime = eliminate_insert (&gsi, val)) != NULL_TREE)
5464 eliminate_push_avail (sprime);
5467 /* If this now constitutes a copy duplicate points-to
5468 and range info appropriately. This is especially
5469 important for inserted code. See tree-ssa-copy.c
5470 for similar code. */
5471 if (sprime
5472 && TREE_CODE (sprime) == SSA_NAME)
5474 basic_block sprime_b = gimple_bb (SSA_NAME_DEF_STMT (sprime));
5475 if (POINTER_TYPE_P (TREE_TYPE (lhs))
5476 && VN_INFO_PTR_INFO (lhs)
5477 && ! VN_INFO_PTR_INFO (sprime))
5479 duplicate_ssa_name_ptr_info (sprime,
5480 VN_INFO_PTR_INFO (lhs));
5481 if (b != sprime_b)
5482 mark_ptr_info_alignment_unknown
5483 (SSA_NAME_PTR_INFO (sprime));
5485 else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
5486 && VN_INFO_RANGE_INFO (lhs)
5487 && ! VN_INFO_RANGE_INFO (sprime)
5488 && b == sprime_b)
5489 duplicate_ssa_name_range_info (sprime,
5490 VN_INFO_RANGE_TYPE (lhs),
5491 VN_INFO_RANGE_INFO (lhs));
5494 /* Inhibit the use of an inserted PHI on a loop header when
5495 the address of the memory reference is a simple induction
5496 variable. In other cases the vectorizer won't do anything
5497 anyway (either it's loop invariant or a complicated
5498 expression). */
5499 if (sprime
5500 && TREE_CODE (sprime) == SSA_NAME
5501 && do_pre
5502 && (flag_tree_loop_vectorize || flag_tree_parallelize_loops > 1)
5503 && loop_outer (b->loop_father)
5504 && has_zero_uses (sprime)
5505 && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
5506 && gimple_assign_load_p (stmt))
5508 gimple *def_stmt = SSA_NAME_DEF_STMT (sprime);
5509 basic_block def_bb = gimple_bb (def_stmt);
5510 if (gimple_code (def_stmt) == GIMPLE_PHI
5511 && def_bb->loop_father->header == def_bb)
5513 loop_p loop = def_bb->loop_father;
5514 ssa_op_iter iter;
5515 tree op;
5516 bool found = false;
5517 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5519 affine_iv iv;
5520 def_bb = gimple_bb (SSA_NAME_DEF_STMT (op));
5521 if (def_bb
5522 && flow_bb_inside_loop_p (loop, def_bb)
5523 && simple_iv (loop, loop, op, &iv, true))
5525 found = true;
5526 break;
5529 if (found)
5531 if (dump_file && (dump_flags & TDF_DETAILS))
5533 fprintf (dump_file, "Not replacing ");
5534 print_gimple_expr (dump_file, stmt, 0);
5535 fprintf (dump_file, " with ");
5536 print_generic_expr (dump_file, sprime);
5537 fprintf (dump_file, " which would add a loop"
5538 " carried dependence to loop %d\n",
5539 loop->num);
5541 /* Don't keep sprime available. */
5542 sprime = NULL_TREE;
5547 if (sprime)
5549 /* If we can propagate the value computed for LHS into
5550 all uses don't bother doing anything with this stmt. */
5551 if (may_propagate_copy (lhs, sprime))
5553 /* Mark it for removal. */
5554 to_remove.safe_push (stmt);
5556 /* ??? Don't count copy/constant propagations. */
5557 if (gimple_assign_single_p (stmt)
5558 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5559 || gimple_assign_rhs1 (stmt) == sprime))
5560 continue;
5562 if (dump_file && (dump_flags & TDF_DETAILS))
5564 fprintf (dump_file, "Replaced ");
5565 print_gimple_expr (dump_file, stmt, 0);
5566 fprintf (dump_file, " with ");
5567 print_generic_expr (dump_file, sprime);
5568 fprintf (dump_file, " in all uses of ");
5569 print_gimple_stmt (dump_file, stmt, 0);
5572 eliminations++;
5573 continue;
5576 /* If this is an assignment from our leader (which
5577 happens in the case the value-number is a constant)
5578 then there is nothing to do. */
5579 if (gimple_assign_single_p (stmt)
5580 && sprime == gimple_assign_rhs1 (stmt))
5581 continue;
5583 /* Else replace its RHS. */
5584 bool can_make_abnormal_goto
5585 = is_gimple_call (stmt)
5586 && stmt_can_make_abnormal_goto (stmt);
5588 if (dump_file && (dump_flags & TDF_DETAILS))
5590 fprintf (dump_file, "Replaced ");
5591 print_gimple_expr (dump_file, stmt, 0);
5592 fprintf (dump_file, " with ");
5593 print_generic_expr (dump_file, sprime);
5594 fprintf (dump_file, " in ");
5595 print_gimple_stmt (dump_file, stmt, 0);
5598 eliminations++;
5599 gimple *orig_stmt = stmt;
5600 if (!useless_type_conversion_p (TREE_TYPE (lhs),
5601 TREE_TYPE (sprime)))
5602 sprime = fold_convert (TREE_TYPE (lhs), sprime);
5603 tree vdef = gimple_vdef (stmt);
5604 tree vuse = gimple_vuse (stmt);
5605 propagate_tree_value_into_stmt (&gsi, sprime);
5606 stmt = gsi_stmt (gsi);
5607 update_stmt (stmt);
5608 if (vdef != gimple_vdef (stmt))
5609 VN_INFO (vdef)->valnum = vuse;
5611 /* If we removed EH side-effects from the statement, clean
5612 its EH information. */
5613 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
5615 bitmap_set_bit (need_eh_cleanup,
5616 gimple_bb (stmt)->index);
5617 if (dump_file && (dump_flags & TDF_DETAILS))
5618 fprintf (dump_file, " Removed EH side-effects.\n");
5621 /* Likewise for AB side-effects. */
5622 if (can_make_abnormal_goto
5623 && !stmt_can_make_abnormal_goto (stmt))
5625 bitmap_set_bit (need_ab_cleanup,
5626 gimple_bb (stmt)->index);
5627 if (dump_file && (dump_flags & TDF_DETAILS))
5628 fprintf (dump_file, " Removed AB side-effects.\n");
5631 continue;
5635 /* If the statement is a scalar store, see if the expression
5636 has the same value number as its rhs. If so, the store is
5637 dead. */
5638 if (gimple_assign_single_p (stmt)
5639 && !gimple_has_volatile_ops (stmt)
5640 && !is_gimple_reg (gimple_assign_lhs (stmt))
5641 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5642 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
5644 tree val;
5645 tree rhs = gimple_assign_rhs1 (stmt);
5646 vn_reference_t vnresult;
5647 val = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_WALKREWRITE,
5648 &vnresult, false);
5649 if (TREE_CODE (rhs) == SSA_NAME)
5650 rhs = VN_INFO (rhs)->valnum;
5651 if (val
5652 && operand_equal_p (val, rhs, 0))
5654 /* We can only remove the later store if the former aliases
5655 at least all accesses the later one does or if the store
5656 was to readonly memory storing the same value. */
5657 alias_set_type set = get_alias_set (lhs);
5658 if (! vnresult
5659 || vnresult->set == set
5660 || alias_set_subset_of (set, vnresult->set))
5662 if (dump_file && (dump_flags & TDF_DETAILS))
5664 fprintf (dump_file, "Deleted redundant store ");
5665 print_gimple_stmt (dump_file, stmt, 0);
5668 /* Queue stmt for removal. */
5669 to_remove.safe_push (stmt);
5670 continue;
5675 /* If this is a control statement value numbering left edges
5676 unexecuted on force the condition in a way consistent with
5677 that. */
5678 if (gcond *cond = dyn_cast <gcond *> (stmt))
5680 if ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE)
5681 ^ (EDGE_SUCC (b, 1)->flags & EDGE_EXECUTABLE))
5683 if (dump_file && (dump_flags & TDF_DETAILS))
5685 fprintf (dump_file, "Removing unexecutable edge from ");
5686 print_gimple_stmt (dump_file, stmt, 0);
5688 if (((EDGE_SUCC (b, 0)->flags & EDGE_TRUE_VALUE) != 0)
5689 == ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE) != 0))
5690 gimple_cond_make_true (cond);
5691 else
5692 gimple_cond_make_false (cond);
5693 update_stmt (cond);
5694 el_todo |= TODO_cleanup_cfg;
5695 continue;
5699 bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
5700 bool was_noreturn = (is_gimple_call (stmt)
5701 && gimple_call_noreturn_p (stmt));
5702 tree vdef = gimple_vdef (stmt);
5703 tree vuse = gimple_vuse (stmt);
5705 /* If we didn't replace the whole stmt (or propagate the result
5706 into all uses), replace all uses on this stmt with their
5707 leaders. */
5708 bool modified = false;
5709 use_operand_p use_p;
5710 ssa_op_iter iter;
5711 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5713 tree use = USE_FROM_PTR (use_p);
5714 /* ??? The call code above leaves stmt operands un-updated. */
5715 if (TREE_CODE (use) != SSA_NAME)
5716 continue;
5717 tree sprime = eliminate_avail (use);
5718 if (sprime && sprime != use
5719 && may_propagate_copy (use, sprime)
5720 /* We substitute into debug stmts to avoid excessive
5721 debug temporaries created by removed stmts, but we need
5722 to avoid doing so for inserted sprimes as we never want
5723 to create debug temporaries for them. */
5724 && (!inserted_exprs
5725 || TREE_CODE (sprime) != SSA_NAME
5726 || !is_gimple_debug (stmt)
5727 || !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
5729 propagate_value (use_p, sprime);
5730 modified = true;
5734 /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
5735 into which is a requirement for the IPA devirt machinery. */
5736 gimple *old_stmt = stmt;
5737 if (modified)
5739 /* If a formerly non-invariant ADDR_EXPR is turned into an
5740 invariant one it was on a separate stmt. */
5741 if (gimple_assign_single_p (stmt)
5742 && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
5743 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
5744 gimple_stmt_iterator prev = gsi;
5745 gsi_prev (&prev);
5746 if (fold_stmt (&gsi))
5748 /* fold_stmt may have created new stmts inbetween
5749 the previous stmt and the folded stmt. Mark
5750 all defs created there as varying to not confuse
5751 the SCCVN machinery as we're using that even during
5752 elimination. */
5753 if (gsi_end_p (prev))
5754 prev = gsi_start_bb (b);
5755 else
5756 gsi_next (&prev);
5757 if (gsi_stmt (prev) != gsi_stmt (gsi))
5760 tree def;
5761 ssa_op_iter dit;
5762 FOR_EACH_SSA_TREE_OPERAND (def, gsi_stmt (prev),
5763 dit, SSA_OP_ALL_DEFS)
5764 /* As existing DEFs may move between stmts
5765 we have to guard VN_INFO_GET. */
5766 if (! has_VN_INFO (def))
5767 VN_INFO_GET (def)->valnum = def;
5768 if (gsi_stmt (prev) == gsi_stmt (gsi))
5769 break;
5770 gsi_next (&prev);
5772 while (1);
5774 stmt = gsi_stmt (gsi);
5775 /* In case we folded the stmt away schedule the NOP for removal. */
5776 if (gimple_nop_p (stmt))
5777 to_remove.safe_push (stmt);
5780 /* Visit indirect calls and turn them into direct calls if
5781 possible using the devirtualization machinery. Do this before
5782 checking for required EH/abnormal/noreturn cleanup as devird
5783 may expose more of those. */
5784 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
5786 tree fn = gimple_call_fn (call_stmt);
5787 if (fn
5788 && flag_devirtualize
5789 && virtual_method_call_p (fn))
5791 tree otr_type = obj_type_ref_class (fn);
5792 unsigned HOST_WIDE_INT otr_tok
5793 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn));
5794 tree instance;
5795 ipa_polymorphic_call_context context (current_function_decl,
5796 fn, stmt, &instance);
5797 context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn),
5798 otr_type, stmt);
5799 bool final;
5800 vec <cgraph_node *> targets
5801 = possible_polymorphic_call_targets (obj_type_ref_class (fn),
5802 otr_tok, context, &final);
5803 if (dump_file)
5804 dump_possible_polymorphic_call_targets (dump_file,
5805 obj_type_ref_class (fn),
5806 otr_tok, context);
5807 if (final && targets.length () <= 1 && dbg_cnt (devirt))
5809 tree fn;
5810 if (targets.length () == 1)
5811 fn = targets[0]->decl;
5812 else
5813 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5814 if (dump_enabled_p ())
5816 location_t loc = gimple_location (stmt);
5817 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
5818 "converting indirect call to "
5819 "function %s\n",
5820 lang_hooks.decl_printable_name (fn, 2));
5822 gimple_call_set_fndecl (call_stmt, fn);
5823 /* If changing the call to __builtin_unreachable
5824 or similar noreturn function, adjust gimple_call_fntype
5825 too. */
5826 if (gimple_call_noreturn_p (call_stmt)
5827 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
5828 && TYPE_ARG_TYPES (TREE_TYPE (fn))
5829 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn)))
5830 == void_type_node))
5831 gimple_call_set_fntype (call_stmt, TREE_TYPE (fn));
5832 maybe_remove_unused_call_args (cfun, call_stmt);
5833 modified = true;
5838 if (modified)
5840 /* When changing a call into a noreturn call, cfg cleanup
5841 is needed to fix up the noreturn call. */
5842 if (!was_noreturn
5843 && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
5844 to_fixup.safe_push (stmt);
5845 /* When changing a condition or switch into one we know what
5846 edge will be executed, schedule a cfg cleanup. */
5847 if ((gimple_code (stmt) == GIMPLE_COND
5848 && (gimple_cond_true_p (as_a <gcond *> (stmt))
5849 || gimple_cond_false_p (as_a <gcond *> (stmt))))
5850 || (gimple_code (stmt) == GIMPLE_SWITCH
5851 && TREE_CODE (gimple_switch_index
5852 (as_a <gswitch *> (stmt))) == INTEGER_CST))
5853 el_todo |= TODO_cleanup_cfg;
5854 /* If we removed EH side-effects from the statement, clean
5855 its EH information. */
5856 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
5858 bitmap_set_bit (need_eh_cleanup,
5859 gimple_bb (stmt)->index);
5860 if (dump_file && (dump_flags & TDF_DETAILS))
5861 fprintf (dump_file, " Removed EH side-effects.\n");
5863 /* Likewise for AB side-effects. */
5864 if (can_make_abnormal_goto
5865 && !stmt_can_make_abnormal_goto (stmt))
5867 bitmap_set_bit (need_ab_cleanup,
5868 gimple_bb (stmt)->index);
5869 if (dump_file && (dump_flags & TDF_DETAILS))
5870 fprintf (dump_file, " Removed AB side-effects.\n");
5872 update_stmt (stmt);
5873 if (vdef != gimple_vdef (stmt))
5874 VN_INFO (vdef)->valnum = vuse;
5877 /* Make new values available - for fully redundant LHS we
5878 continue with the next stmt above and skip this. */
5879 def_operand_p defp;
5880 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
5881 eliminate_push_avail (DEF_FROM_PTR (defp));
5884 /* Replace destination PHI arguments. */
5885 FOR_EACH_EDGE (e, ei, b->succs)
5886 if (e->flags & EDGE_EXECUTABLE)
5887 for (gphi_iterator gsi = gsi_start_phis (e->dest);
5888 !gsi_end_p (gsi);
5889 gsi_next (&gsi))
5891 gphi *phi = gsi.phi ();
5892 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
5893 tree arg = USE_FROM_PTR (use_p);
5894 if (TREE_CODE (arg) != SSA_NAME
5895 || virtual_operand_p (arg))
5896 continue;
5897 tree sprime = eliminate_avail (arg);
5898 if (sprime && may_propagate_copy (arg, sprime))
5899 propagate_value (use_p, sprime);
5901 return NULL;
5904 /* Make no longer available leaders no longer available. */
5906 void
5907 eliminate_dom_walker::after_dom_children (basic_block)
5909 tree entry;
5910 while ((entry = avail_stack.pop ()) != NULL_TREE)
5912 tree valnum = VN_INFO (entry)->valnum;
5913 tree old = avail[SSA_NAME_VERSION (valnum)];
5914 if (old == entry)
5915 avail[SSA_NAME_VERSION (valnum)] = NULL_TREE;
5916 else
5917 avail[SSA_NAME_VERSION (valnum)] = entry;
5921 /* Eliminate fully redundant computations. */
5923 unsigned int
5924 vn_eliminate (bitmap inserted_exprs)
5926 eliminate_dom_walker el (CDI_DOMINATORS, inserted_exprs);
5927 el.avail.reserve (num_ssa_names);
5929 el.walk (cfun->cfg->x_entry_block_ptr);
5931 /* We cannot remove stmts during BB walk, especially not release SSA
5932 names there as this confuses the VN machinery. The stmts ending
5933 up in to_remove are either stores or simple copies.
5934 Remove stmts in reverse order to make debug stmt creation possible. */
5935 while (!el.to_remove.is_empty ())
5937 gimple *stmt = el.to_remove.pop ();
5939 if (dump_file && (dump_flags & TDF_DETAILS))
5941 fprintf (dump_file, "Removing dead stmt ");
5942 print_gimple_stmt (dump_file, stmt, 0, 0);
5945 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5946 if (gimple_code (stmt) == GIMPLE_PHI)
5947 remove_phi_node (&gsi, true);
5948 else
5950 basic_block bb = gimple_bb (stmt);
5951 unlink_stmt_vdef (stmt);
5952 if (gsi_remove (&gsi, true))
5953 bitmap_set_bit (el.need_eh_cleanup, bb->index);
5954 if (is_gimple_call (stmt) && stmt_can_make_abnormal_goto (stmt))
5955 bitmap_set_bit (el.need_ab_cleanup, bb->index);
5956 release_defs (stmt);
5959 /* Removing a stmt may expose a forwarder block. */
5960 el.el_todo |= TODO_cleanup_cfg;
5963 /* Fixup stmts that became noreturn calls. This may require splitting
5964 blocks and thus isn't possible during the dominator walk. Do this
5965 in reverse order so we don't inadvertedly remove a stmt we want to
5966 fixup by visiting a dominating now noreturn call first. */
5967 while (!el.to_fixup.is_empty ())
5969 gimple *stmt = el.to_fixup.pop ();
5971 if (dump_file && (dump_flags & TDF_DETAILS))
5973 fprintf (dump_file, "Fixing up noreturn call ");
5974 print_gimple_stmt (dump_file, stmt, 0);
5977 if (fixup_noreturn_call (stmt))
5978 el.el_todo |= TODO_cleanup_cfg;
5981 bool do_eh_cleanup = !bitmap_empty_p (el.need_eh_cleanup);
5982 bool do_ab_cleanup = !bitmap_empty_p (el.need_ab_cleanup);
5984 if (do_eh_cleanup)
5985 gimple_purge_all_dead_eh_edges (el.need_eh_cleanup);
5987 if (do_ab_cleanup)
5988 gimple_purge_all_dead_abnormal_call_edges (el.need_ab_cleanup);
5990 if (do_eh_cleanup || do_ab_cleanup)
5991 el.el_todo |= TODO_cleanup_cfg;
5993 statistics_counter_event (cfun, "Eliminated", el.eliminations);
5994 statistics_counter_event (cfun, "Insertions", el.insertions);
5996 return el.el_todo;
6000 namespace {
6002 const pass_data pass_data_fre =
6004 GIMPLE_PASS, /* type */
6005 "fre", /* name */
6006 OPTGROUP_NONE, /* optinfo_flags */
6007 TV_TREE_FRE, /* tv_id */
6008 ( PROP_cfg | PROP_ssa ), /* properties_required */
6009 0, /* properties_provided */
6010 0, /* properties_destroyed */
6011 0, /* todo_flags_start */
6012 0, /* todo_flags_finish */
6015 class pass_fre : public gimple_opt_pass
6017 public:
6018 pass_fre (gcc::context *ctxt)
6019 : gimple_opt_pass (pass_data_fre, ctxt)
6022 /* opt_pass methods: */
6023 opt_pass * clone () { return new pass_fre (m_ctxt); }
6024 virtual bool gate (function *) { return flag_tree_fre != 0; }
6025 virtual unsigned int execute (function *);
6027 }; // class pass_fre
6029 unsigned int
6030 pass_fre::execute (function *)
6032 unsigned int todo = 0;
6034 run_scc_vn (VN_WALKREWRITE);
6036 /* Remove all the redundant expressions. */
6037 todo |= vn_eliminate (NULL);
6039 scc_vn_restore_ssa_info ();
6040 free_scc_vn ();
6042 return todo;
6045 } // anon namespace
6047 gimple_opt_pass *
6048 make_pass_fre (gcc::context *ctxt)
6050 return new pass_fre (ctxt);