Make argv const char ** in read_md_files etc
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blob2dbe4084420f153eafefde6048c61f612e17dfde
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2016 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "insn-config.h"
32 #include "emit-rtl.h"
33 #include "cgraph.h"
34 #include "gimple-pretty-print.h"
35 #include "alias.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "cfganal.h"
39 #include "tree-inline.h"
40 #include "internal-fn.h"
41 #include "gimple-fold.h"
42 #include "tree-eh.h"
43 #include "gimplify.h"
44 #include "flags.h"
45 #include "dojump.h"
46 #include "explow.h"
47 #include "calls.h"
48 #include "varasm.h"
49 #include "stmt.h"
50 #include "expr.h"
51 #include "tree-dfa.h"
52 #include "tree-ssa.h"
53 #include "dumpfile.h"
54 #include "cfgloop.h"
55 #include "params.h"
56 #include "tree-ssa-propagate.h"
57 #include "tree-ssa-sccvn.h"
58 #include "tree-cfg.h"
59 #include "domwalk.h"
60 #include "gimple-iterator.h"
61 #include "gimple-match.h"
63 /* This algorithm is based on the SCC algorithm presented by Keith
64 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
65 (http://citeseer.ist.psu.edu/41805.html). In
66 straight line code, it is equivalent to a regular hash based value
67 numbering that is performed in reverse postorder.
69 For code with cycles, there are two alternatives, both of which
70 require keeping the hashtables separate from the actual list of
71 value numbers for SSA names.
73 1. Iterate value numbering in an RPO walk of the blocks, removing
74 all the entries from the hashtable after each iteration (but
75 keeping the SSA name->value number mapping between iterations).
76 Iterate until it does not change.
78 2. Perform value numbering as part of an SCC walk on the SSA graph,
79 iterating only the cycles in the SSA graph until they do not change
80 (using a separate, optimistic hashtable for value numbering the SCC
81 operands).
83 The second is not just faster in practice (because most SSA graph
84 cycles do not involve all the variables in the graph), it also has
85 some nice properties.
87 One of these nice properties is that when we pop an SCC off the
88 stack, we are guaranteed to have processed all the operands coming from
89 *outside of that SCC*, so we do not need to do anything special to
90 ensure they have value numbers.
92 Another nice property is that the SCC walk is done as part of a DFS
93 of the SSA graph, which makes it easy to perform combining and
94 simplifying operations at the same time.
96 The code below is deliberately written in a way that makes it easy
97 to separate the SCC walk from the other work it does.
99 In order to propagate constants through the code, we track which
100 expressions contain constants, and use those while folding. In
101 theory, we could also track expressions whose value numbers are
102 replaced, in case we end up folding based on expression
103 identities.
105 In order to value number memory, we assign value numbers to vuses.
106 This enables us to note that, for example, stores to the same
107 address of the same value from the same starting memory states are
108 equivalent.
109 TODO:
111 1. We can iterate only the changing portions of the SCC's, but
112 I have not seen an SCC big enough for this to be a win.
113 2. If you differentiate between phi nodes for loops and phi nodes
114 for if-then-else, you can properly consider phi nodes in different
115 blocks for equivalence.
116 3. We could value number vuses in more cases, particularly, whole
117 structure copies.
121 static tree *last_vuse_ptr;
122 static vn_lookup_kind vn_walk_kind;
123 static vn_lookup_kind default_vn_walk_kind;
124 bitmap const_parms;
126 /* vn_nary_op hashtable helpers. */
128 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
130 typedef vn_nary_op_s *compare_type;
131 static inline hashval_t hash (const vn_nary_op_s *);
132 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
135 /* Return the computed hashcode for nary operation P1. */
137 inline hashval_t
138 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
140 return vno1->hashcode;
143 /* Compare nary operations P1 and P2 and return true if they are
144 equivalent. */
146 inline bool
147 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
149 return vn_nary_op_eq (vno1, vno2);
152 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
153 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
156 /* vn_phi hashtable helpers. */
158 static int
159 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
161 struct vn_phi_hasher : pointer_hash <vn_phi_s>
163 static inline hashval_t hash (const vn_phi_s *);
164 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
165 static inline void remove (vn_phi_s *);
168 /* Return the computed hashcode for phi operation P1. */
170 inline hashval_t
171 vn_phi_hasher::hash (const vn_phi_s *vp1)
173 return vp1->hashcode;
176 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
178 inline bool
179 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
181 return vn_phi_eq (vp1, vp2);
184 /* Free a phi operation structure VP. */
186 inline void
187 vn_phi_hasher::remove (vn_phi_s *phi)
189 phi->phiargs.release ();
192 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
193 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
196 /* Compare two reference operands P1 and P2 for equality. Return true if
197 they are equal, and false otherwise. */
199 static int
200 vn_reference_op_eq (const void *p1, const void *p2)
202 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
203 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
205 return (vro1->opcode == vro2->opcode
206 /* We do not care for differences in type qualification. */
207 && (vro1->type == vro2->type
208 || (vro1->type && vro2->type
209 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
210 TYPE_MAIN_VARIANT (vro2->type))))
211 && expressions_equal_p (vro1->op0, vro2->op0)
212 && expressions_equal_p (vro1->op1, vro2->op1)
213 && expressions_equal_p (vro1->op2, vro2->op2));
216 /* Free a reference operation structure VP. */
218 static inline void
219 free_reference (vn_reference_s *vr)
221 vr->operands.release ();
225 /* vn_reference hashtable helpers. */
227 struct vn_reference_hasher : pointer_hash <vn_reference_s>
229 static inline hashval_t hash (const vn_reference_s *);
230 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
231 static inline void remove (vn_reference_s *);
234 /* Return the hashcode for a given reference operation P1. */
236 inline hashval_t
237 vn_reference_hasher::hash (const vn_reference_s *vr1)
239 return vr1->hashcode;
242 inline bool
243 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
245 return vn_reference_eq (v, c);
248 inline void
249 vn_reference_hasher::remove (vn_reference_s *v)
251 free_reference (v);
254 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
255 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
258 /* The set of hashtables and alloc_pool's for their items. */
260 typedef struct vn_tables_s
262 vn_nary_op_table_type *nary;
263 vn_phi_table_type *phis;
264 vn_reference_table_type *references;
265 struct obstack nary_obstack;
266 object_allocator<vn_phi_s> *phis_pool;
267 object_allocator<vn_reference_s> *references_pool;
268 } *vn_tables_t;
271 /* vn_constant hashtable helpers. */
273 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
275 static inline hashval_t hash (const vn_constant_s *);
276 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
279 /* Hash table hash function for vn_constant_t. */
281 inline hashval_t
282 vn_constant_hasher::hash (const vn_constant_s *vc1)
284 return vc1->hashcode;
287 /* Hash table equality function for vn_constant_t. */
289 inline bool
290 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
292 if (vc1->hashcode != vc2->hashcode)
293 return false;
295 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
298 static hash_table<vn_constant_hasher> *constant_to_value_id;
299 static bitmap constant_value_ids;
302 /* Valid hashtables storing information we have proven to be
303 correct. */
305 static vn_tables_t valid_info;
307 /* Optimistic hashtables storing information we are making assumptions about
308 during iterations. */
310 static vn_tables_t optimistic_info;
312 /* Pointer to the set of hashtables that is currently being used.
313 Should always point to either the optimistic_info, or the
314 valid_info. */
316 static vn_tables_t current_info;
319 /* Reverse post order index for each basic block. */
321 static int *rpo_numbers;
323 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
325 /* Return the SSA value of the VUSE x, supporting released VDEFs
326 during elimination which will value-number the VDEF to the
327 associated VUSE (but not substitute in the whole lattice). */
329 static inline tree
330 vuse_ssa_val (tree x)
332 if (!x)
333 return NULL_TREE;
337 x = SSA_VAL (x);
339 while (SSA_NAME_IN_FREE_LIST (x));
341 return x;
344 /* This represents the top of the VN lattice, which is the universal
345 value. */
347 tree VN_TOP;
349 /* Unique counter for our value ids. */
351 static unsigned int next_value_id;
353 /* Next DFS number and the stack for strongly connected component
354 detection. */
356 static unsigned int next_dfs_num;
357 static vec<tree> sccstack;
361 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
362 are allocated on an obstack for locality reasons, and to free them
363 without looping over the vec. */
365 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
366 static struct obstack vn_ssa_aux_obstack;
368 /* Return whether there is value numbering information for a given SSA name. */
370 bool
371 has_VN_INFO (tree name)
373 if (SSA_NAME_VERSION (name) < vn_ssa_aux_table.length ())
374 return vn_ssa_aux_table[SSA_NAME_VERSION (name)] != NULL;
375 return false;
378 /* Return the value numbering information for a given SSA name. */
380 vn_ssa_aux_t
381 VN_INFO (tree name)
383 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
384 gcc_checking_assert (res);
385 return res;
388 /* Set the value numbering info for a given SSA name to a given
389 value. */
391 static inline void
392 VN_INFO_SET (tree name, vn_ssa_aux_t value)
394 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
397 /* Initialize the value numbering info for a given SSA name.
398 This should be called just once for every SSA name. */
400 vn_ssa_aux_t
401 VN_INFO_GET (tree name)
403 vn_ssa_aux_t newinfo;
405 gcc_assert (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ()
406 || vn_ssa_aux_table[SSA_NAME_VERSION (name)] == NULL);
407 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
408 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
409 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
410 vn_ssa_aux_table.safe_grow_cleared (SSA_NAME_VERSION (name) + 1);
411 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
412 return newinfo;
416 /* Return the vn_kind the expression computed by the stmt should be
417 associated with. */
419 enum vn_kind
420 vn_get_stmt_kind (gimple *stmt)
422 switch (gimple_code (stmt))
424 case GIMPLE_CALL:
425 return VN_REFERENCE;
426 case GIMPLE_PHI:
427 return VN_PHI;
428 case GIMPLE_ASSIGN:
430 enum tree_code code = gimple_assign_rhs_code (stmt);
431 tree rhs1 = gimple_assign_rhs1 (stmt);
432 switch (get_gimple_rhs_class (code))
434 case GIMPLE_UNARY_RHS:
435 case GIMPLE_BINARY_RHS:
436 case GIMPLE_TERNARY_RHS:
437 return VN_NARY;
438 case GIMPLE_SINGLE_RHS:
439 switch (TREE_CODE_CLASS (code))
441 case tcc_reference:
442 /* VOP-less references can go through unary case. */
443 if ((code == REALPART_EXPR
444 || code == IMAGPART_EXPR
445 || code == VIEW_CONVERT_EXPR
446 || code == BIT_FIELD_REF)
447 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
448 return VN_NARY;
450 /* Fallthrough. */
451 case tcc_declaration:
452 return VN_REFERENCE;
454 case tcc_constant:
455 return VN_CONSTANT;
457 default:
458 if (code == ADDR_EXPR)
459 return (is_gimple_min_invariant (rhs1)
460 ? VN_CONSTANT : VN_REFERENCE);
461 else if (code == CONSTRUCTOR)
462 return VN_NARY;
463 return VN_NONE;
465 default:
466 return VN_NONE;
469 default:
470 return VN_NONE;
474 /* Lookup a value id for CONSTANT and return it. If it does not
475 exist returns 0. */
477 unsigned int
478 get_constant_value_id (tree constant)
480 vn_constant_s **slot;
481 struct vn_constant_s vc;
483 vc.hashcode = vn_hash_constant_with_type (constant);
484 vc.constant = constant;
485 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
486 if (slot)
487 return (*slot)->value_id;
488 return 0;
491 /* Lookup a value id for CONSTANT, and if it does not exist, create a
492 new one and return it. If it does exist, return it. */
494 unsigned int
495 get_or_alloc_constant_value_id (tree constant)
497 vn_constant_s **slot;
498 struct vn_constant_s vc;
499 vn_constant_t vcp;
501 vc.hashcode = vn_hash_constant_with_type (constant);
502 vc.constant = constant;
503 slot = constant_to_value_id->find_slot (&vc, INSERT);
504 if (*slot)
505 return (*slot)->value_id;
507 vcp = XNEW (struct vn_constant_s);
508 vcp->hashcode = vc.hashcode;
509 vcp->constant = constant;
510 vcp->value_id = get_next_value_id ();
511 *slot = vcp;
512 bitmap_set_bit (constant_value_ids, vcp->value_id);
513 return vcp->value_id;
516 /* Return true if V is a value id for a constant. */
518 bool
519 value_id_constant_p (unsigned int v)
521 return bitmap_bit_p (constant_value_ids, v);
524 /* Compute the hash for a reference operand VRO1. */
526 static void
527 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
529 hstate.add_int (vro1->opcode);
530 if (vro1->op0)
531 inchash::add_expr (vro1->op0, hstate);
532 if (vro1->op1)
533 inchash::add_expr (vro1->op1, hstate);
534 if (vro1->op2)
535 inchash::add_expr (vro1->op2, hstate);
538 /* Compute a hash for the reference operation VR1 and return it. */
540 static hashval_t
541 vn_reference_compute_hash (const vn_reference_t vr1)
543 inchash::hash hstate;
544 hashval_t result;
545 int i;
546 vn_reference_op_t vro;
547 HOST_WIDE_INT off = -1;
548 bool deref = false;
550 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
552 if (vro->opcode == MEM_REF)
553 deref = true;
554 else if (vro->opcode != ADDR_EXPR)
555 deref = false;
556 if (vro->off != -1)
558 if (off == -1)
559 off = 0;
560 off += vro->off;
562 else
564 if (off != -1
565 && off != 0)
566 hstate.add_int (off);
567 off = -1;
568 if (deref
569 && vro->opcode == ADDR_EXPR)
571 if (vro->op0)
573 tree op = TREE_OPERAND (vro->op0, 0);
574 hstate.add_int (TREE_CODE (op));
575 inchash::add_expr (op, hstate);
578 else
579 vn_reference_op_compute_hash (vro, hstate);
582 result = hstate.end ();
583 /* ??? We would ICE later if we hash instead of adding that in. */
584 if (vr1->vuse)
585 result += SSA_NAME_VERSION (vr1->vuse);
587 return result;
590 /* Return true if reference operations VR1 and VR2 are equivalent. This
591 means they have the same set of operands and vuses. */
593 bool
594 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
596 unsigned i, j;
598 /* Early out if this is not a hash collision. */
599 if (vr1->hashcode != vr2->hashcode)
600 return false;
602 /* The VOP needs to be the same. */
603 if (vr1->vuse != vr2->vuse)
604 return false;
606 /* If the operands are the same we are done. */
607 if (vr1->operands == vr2->operands)
608 return true;
610 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
611 return false;
613 if (INTEGRAL_TYPE_P (vr1->type)
614 && INTEGRAL_TYPE_P (vr2->type))
616 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
617 return false;
619 else if (INTEGRAL_TYPE_P (vr1->type)
620 && (TYPE_PRECISION (vr1->type)
621 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
622 return false;
623 else if (INTEGRAL_TYPE_P (vr2->type)
624 && (TYPE_PRECISION (vr2->type)
625 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
626 return false;
628 i = 0;
629 j = 0;
632 HOST_WIDE_INT off1 = 0, off2 = 0;
633 vn_reference_op_t vro1, vro2;
634 vn_reference_op_s tem1, tem2;
635 bool deref1 = false, deref2 = false;
636 for (; vr1->operands.iterate (i, &vro1); i++)
638 if (vro1->opcode == MEM_REF)
639 deref1 = true;
640 /* Do not look through a storage order barrier. */
641 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
642 return false;
643 if (vro1->off == -1)
644 break;
645 off1 += vro1->off;
647 for (; vr2->operands.iterate (j, &vro2); j++)
649 if (vro2->opcode == MEM_REF)
650 deref2 = true;
651 /* Do not look through a storage order barrier. */
652 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
653 return false;
654 if (vro2->off == -1)
655 break;
656 off2 += vro2->off;
658 if (off1 != off2)
659 return false;
660 if (deref1 && vro1->opcode == ADDR_EXPR)
662 memset (&tem1, 0, sizeof (tem1));
663 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
664 tem1.type = TREE_TYPE (tem1.op0);
665 tem1.opcode = TREE_CODE (tem1.op0);
666 vro1 = &tem1;
667 deref1 = false;
669 if (deref2 && vro2->opcode == ADDR_EXPR)
671 memset (&tem2, 0, sizeof (tem2));
672 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
673 tem2.type = TREE_TYPE (tem2.op0);
674 tem2.opcode = TREE_CODE (tem2.op0);
675 vro2 = &tem2;
676 deref2 = false;
678 if (deref1 != deref2)
679 return false;
680 if (!vn_reference_op_eq (vro1, vro2))
681 return false;
682 ++j;
683 ++i;
685 while (vr1->operands.length () != i
686 || vr2->operands.length () != j);
688 return true;
691 /* Copy the operations present in load/store REF into RESULT, a vector of
692 vn_reference_op_s's. */
694 static void
695 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
697 if (TREE_CODE (ref) == TARGET_MEM_REF)
699 vn_reference_op_s temp;
701 result->reserve (3);
703 memset (&temp, 0, sizeof (temp));
704 temp.type = TREE_TYPE (ref);
705 temp.opcode = TREE_CODE (ref);
706 temp.op0 = TMR_INDEX (ref);
707 temp.op1 = TMR_STEP (ref);
708 temp.op2 = TMR_OFFSET (ref);
709 temp.off = -1;
710 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
711 temp.base = MR_DEPENDENCE_BASE (ref);
712 result->quick_push (temp);
714 memset (&temp, 0, sizeof (temp));
715 temp.type = NULL_TREE;
716 temp.opcode = ERROR_MARK;
717 temp.op0 = TMR_INDEX2 (ref);
718 temp.off = -1;
719 result->quick_push (temp);
721 memset (&temp, 0, sizeof (temp));
722 temp.type = NULL_TREE;
723 temp.opcode = TREE_CODE (TMR_BASE (ref));
724 temp.op0 = TMR_BASE (ref);
725 temp.off = -1;
726 result->quick_push (temp);
727 return;
730 /* For non-calls, store the information that makes up the address. */
731 tree orig = ref;
732 while (ref)
734 vn_reference_op_s temp;
736 memset (&temp, 0, sizeof (temp));
737 temp.type = TREE_TYPE (ref);
738 temp.opcode = TREE_CODE (ref);
739 temp.off = -1;
741 switch (temp.opcode)
743 case MODIFY_EXPR:
744 temp.op0 = TREE_OPERAND (ref, 1);
745 break;
746 case WITH_SIZE_EXPR:
747 temp.op0 = TREE_OPERAND (ref, 1);
748 temp.off = 0;
749 break;
750 case MEM_REF:
751 /* The base address gets its own vn_reference_op_s structure. */
752 temp.op0 = TREE_OPERAND (ref, 1);
754 offset_int off = mem_ref_offset (ref);
755 if (wi::fits_shwi_p (off))
756 temp.off = off.to_shwi ();
758 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
759 temp.base = MR_DEPENDENCE_BASE (ref);
760 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
761 break;
762 case BIT_FIELD_REF:
763 /* Record bits, position and storage order. */
764 temp.op0 = TREE_OPERAND (ref, 1);
765 temp.op1 = TREE_OPERAND (ref, 2);
766 if (tree_fits_shwi_p (TREE_OPERAND (ref, 2)))
768 HOST_WIDE_INT off = tree_to_shwi (TREE_OPERAND (ref, 2));
769 if (off % BITS_PER_UNIT == 0)
770 temp.off = off / BITS_PER_UNIT;
772 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
773 break;
774 case COMPONENT_REF:
775 /* The field decl is enough to unambiguously specify the field,
776 a matching type is not necessary and a mismatching type
777 is always a spurious difference. */
778 temp.type = NULL_TREE;
779 temp.op0 = TREE_OPERAND (ref, 1);
780 temp.op1 = TREE_OPERAND (ref, 2);
782 tree this_offset = component_ref_field_offset (ref);
783 if (this_offset
784 && TREE_CODE (this_offset) == INTEGER_CST)
786 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
787 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
789 offset_int off
790 = (wi::to_offset (this_offset)
791 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
792 if (wi::fits_shwi_p (off)
793 /* Probibit value-numbering zero offset components
794 of addresses the same before the pass folding
795 __builtin_object_size had a chance to run
796 (checking cfun->after_inlining does the
797 trick here). */
798 && (TREE_CODE (orig) != ADDR_EXPR
799 || off != 0
800 || cfun->after_inlining))
801 temp.off = off.to_shwi ();
805 break;
806 case ARRAY_RANGE_REF:
807 case ARRAY_REF:
808 /* Record index as operand. */
809 temp.op0 = TREE_OPERAND (ref, 1);
810 /* Always record lower bounds and element size. */
811 temp.op1 = array_ref_low_bound (ref);
812 temp.op2 = array_ref_element_size (ref);
813 if (TREE_CODE (temp.op0) == INTEGER_CST
814 && TREE_CODE (temp.op1) == INTEGER_CST
815 && TREE_CODE (temp.op2) == INTEGER_CST)
817 offset_int off = ((wi::to_offset (temp.op0)
818 - wi::to_offset (temp.op1))
819 * wi::to_offset (temp.op2));
820 if (wi::fits_shwi_p (off))
821 temp.off = off.to_shwi();
823 break;
824 case VAR_DECL:
825 if (DECL_HARD_REGISTER (ref))
827 temp.op0 = ref;
828 break;
830 /* Fallthru. */
831 case PARM_DECL:
832 case CONST_DECL:
833 case RESULT_DECL:
834 /* Canonicalize decls to MEM[&decl] which is what we end up with
835 when valueizing MEM[ptr] with ptr = &decl. */
836 temp.opcode = MEM_REF;
837 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
838 temp.off = 0;
839 result->safe_push (temp);
840 temp.opcode = ADDR_EXPR;
841 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
842 temp.type = TREE_TYPE (temp.op0);
843 temp.off = -1;
844 break;
845 case STRING_CST:
846 case INTEGER_CST:
847 case COMPLEX_CST:
848 case VECTOR_CST:
849 case REAL_CST:
850 case FIXED_CST:
851 case CONSTRUCTOR:
852 case SSA_NAME:
853 temp.op0 = ref;
854 break;
855 case ADDR_EXPR:
856 if (is_gimple_min_invariant (ref))
858 temp.op0 = ref;
859 break;
861 break;
862 /* These are only interesting for their operands, their
863 existence, and their type. They will never be the last
864 ref in the chain of references (IE they require an
865 operand), so we don't have to put anything
866 for op* as it will be handled by the iteration */
867 case REALPART_EXPR:
868 temp.off = 0;
869 break;
870 case VIEW_CONVERT_EXPR:
871 temp.off = 0;
872 temp.reverse = storage_order_barrier_p (ref);
873 break;
874 case IMAGPART_EXPR:
875 /* This is only interesting for its constant offset. */
876 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
877 break;
878 default:
879 gcc_unreachable ();
881 result->safe_push (temp);
883 if (REFERENCE_CLASS_P (ref)
884 || TREE_CODE (ref) == MODIFY_EXPR
885 || TREE_CODE (ref) == WITH_SIZE_EXPR
886 || (TREE_CODE (ref) == ADDR_EXPR
887 && !is_gimple_min_invariant (ref)))
888 ref = TREE_OPERAND (ref, 0);
889 else
890 ref = NULL_TREE;
894 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
895 operands in *OPS, the reference alias set SET and the reference type TYPE.
896 Return true if something useful was produced. */
898 bool
899 ao_ref_init_from_vn_reference (ao_ref *ref,
900 alias_set_type set, tree type,
901 vec<vn_reference_op_s> ops)
903 vn_reference_op_t op;
904 unsigned i;
905 tree base = NULL_TREE;
906 tree *op0_p = &base;
907 offset_int offset = 0;
908 offset_int max_size;
909 offset_int size = -1;
910 tree size_tree = NULL_TREE;
911 alias_set_type base_alias_set = -1;
913 /* First get the final access size from just the outermost expression. */
914 op = &ops[0];
915 if (op->opcode == COMPONENT_REF)
916 size_tree = DECL_SIZE (op->op0);
917 else if (op->opcode == BIT_FIELD_REF)
918 size_tree = op->op0;
919 else
921 machine_mode mode = TYPE_MODE (type);
922 if (mode == BLKmode)
923 size_tree = TYPE_SIZE (type);
924 else
925 size = int (GET_MODE_BITSIZE (mode));
927 if (size_tree != NULL_TREE
928 && TREE_CODE (size_tree) == INTEGER_CST)
929 size = wi::to_offset (size_tree);
931 /* Initially, maxsize is the same as the accessed element size.
932 In the following it will only grow (or become -1). */
933 max_size = size;
935 /* Compute cumulative bit-offset for nested component-refs and array-refs,
936 and find the ultimate containing object. */
937 FOR_EACH_VEC_ELT (ops, i, op)
939 switch (op->opcode)
941 /* These may be in the reference ops, but we cannot do anything
942 sensible with them here. */
943 case ADDR_EXPR:
944 /* Apart from ADDR_EXPR arguments to MEM_REF. */
945 if (base != NULL_TREE
946 && TREE_CODE (base) == MEM_REF
947 && op->op0
948 && DECL_P (TREE_OPERAND (op->op0, 0)))
950 vn_reference_op_t pop = &ops[i-1];
951 base = TREE_OPERAND (op->op0, 0);
952 if (pop->off == -1)
954 max_size = -1;
955 offset = 0;
957 else
958 offset += pop->off * BITS_PER_UNIT;
959 op0_p = NULL;
960 break;
962 /* Fallthru. */
963 case CALL_EXPR:
964 return false;
966 /* Record the base objects. */
967 case MEM_REF:
968 base_alias_set = get_deref_alias_set (op->op0);
969 *op0_p = build2 (MEM_REF, op->type,
970 NULL_TREE, op->op0);
971 MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
972 MR_DEPENDENCE_BASE (*op0_p) = op->base;
973 op0_p = &TREE_OPERAND (*op0_p, 0);
974 break;
976 case VAR_DECL:
977 case PARM_DECL:
978 case RESULT_DECL:
979 case SSA_NAME:
980 *op0_p = op->op0;
981 op0_p = NULL;
982 break;
984 /* And now the usual component-reference style ops. */
985 case BIT_FIELD_REF:
986 offset += wi::to_offset (op->op1);
987 break;
989 case COMPONENT_REF:
991 tree field = op->op0;
992 /* We do not have a complete COMPONENT_REF tree here so we
993 cannot use component_ref_field_offset. Do the interesting
994 parts manually. */
995 tree this_offset = DECL_FIELD_OFFSET (field);
997 if (op->op1 || TREE_CODE (this_offset) != INTEGER_CST)
998 max_size = -1;
999 else
1001 offset_int woffset = (wi::to_offset (this_offset)
1002 << LOG2_BITS_PER_UNIT);
1003 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1004 offset += woffset;
1006 break;
1009 case ARRAY_RANGE_REF:
1010 case ARRAY_REF:
1011 /* We recorded the lower bound and the element size. */
1012 if (TREE_CODE (op->op0) != INTEGER_CST
1013 || TREE_CODE (op->op1) != INTEGER_CST
1014 || TREE_CODE (op->op2) != INTEGER_CST)
1015 max_size = -1;
1016 else
1018 offset_int woffset
1019 = wi::sext (wi::to_offset (op->op0) - wi::to_offset (op->op1),
1020 TYPE_PRECISION (TREE_TYPE (op->op0)));
1021 woffset *= wi::to_offset (op->op2);
1022 woffset <<= LOG2_BITS_PER_UNIT;
1023 offset += woffset;
1025 break;
1027 case REALPART_EXPR:
1028 break;
1030 case IMAGPART_EXPR:
1031 offset += size;
1032 break;
1034 case VIEW_CONVERT_EXPR:
1035 break;
1037 case STRING_CST:
1038 case INTEGER_CST:
1039 case COMPLEX_CST:
1040 case VECTOR_CST:
1041 case REAL_CST:
1042 case CONSTRUCTOR:
1043 case CONST_DECL:
1044 return false;
1046 default:
1047 return false;
1051 if (base == NULL_TREE)
1052 return false;
1054 ref->ref = NULL_TREE;
1055 ref->base = base;
1056 ref->ref_alias_set = set;
1057 if (base_alias_set != -1)
1058 ref->base_alias_set = base_alias_set;
1059 else
1060 ref->base_alias_set = get_alias_set (base);
1061 /* We discount volatiles from value-numbering elsewhere. */
1062 ref->volatile_p = false;
1064 if (!wi::fits_shwi_p (size) || wi::neg_p (size))
1066 ref->offset = 0;
1067 ref->size = -1;
1068 ref->max_size = -1;
1069 return true;
1072 ref->size = size.to_shwi ();
1074 if (!wi::fits_shwi_p (offset))
1076 ref->offset = 0;
1077 ref->max_size = -1;
1078 return true;
1081 ref->offset = offset.to_shwi ();
1083 if (!wi::fits_shwi_p (max_size) || wi::neg_p (max_size))
1084 ref->max_size = -1;
1085 else
1086 ref->max_size = max_size.to_shwi ();
1088 return true;
1091 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1092 vn_reference_op_s's. */
1094 static void
1095 copy_reference_ops_from_call (gcall *call,
1096 vec<vn_reference_op_s> *result)
1098 vn_reference_op_s temp;
1099 unsigned i;
1100 tree lhs = gimple_call_lhs (call);
1101 int lr;
1103 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1104 different. By adding the lhs here in the vector, we ensure that the
1105 hashcode is different, guaranteeing a different value number. */
1106 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1108 memset (&temp, 0, sizeof (temp));
1109 temp.opcode = MODIFY_EXPR;
1110 temp.type = TREE_TYPE (lhs);
1111 temp.op0 = lhs;
1112 temp.off = -1;
1113 result->safe_push (temp);
1116 /* Copy the type, opcode, function, static chain and EH region, if any. */
1117 memset (&temp, 0, sizeof (temp));
1118 temp.type = gimple_call_return_type (call);
1119 temp.opcode = CALL_EXPR;
1120 temp.op0 = gimple_call_fn (call);
1121 temp.op1 = gimple_call_chain (call);
1122 if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1123 temp.op2 = size_int (lr);
1124 temp.off = -1;
1125 if (gimple_call_with_bounds_p (call))
1126 temp.with_bounds = 1;
1127 result->safe_push (temp);
1129 /* Copy the call arguments. As they can be references as well,
1130 just chain them together. */
1131 for (i = 0; i < gimple_call_num_args (call); ++i)
1133 tree callarg = gimple_call_arg (call, i);
1134 copy_reference_ops_from_ref (callarg, result);
1138 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1139 *I_P to point to the last element of the replacement. */
1140 static bool
1141 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1142 unsigned int *i_p)
1144 unsigned int i = *i_p;
1145 vn_reference_op_t op = &(*ops)[i];
1146 vn_reference_op_t mem_op = &(*ops)[i - 1];
1147 tree addr_base;
1148 HOST_WIDE_INT addr_offset = 0;
1150 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1151 from .foo.bar to the preceding MEM_REF offset and replace the
1152 address with &OBJ. */
1153 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1154 &addr_offset);
1155 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1156 if (addr_base != TREE_OPERAND (op->op0, 0))
1158 offset_int off = offset_int::from (mem_op->op0, SIGNED);
1159 off += addr_offset;
1160 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1161 op->op0 = build_fold_addr_expr (addr_base);
1162 if (tree_fits_shwi_p (mem_op->op0))
1163 mem_op->off = tree_to_shwi (mem_op->op0);
1164 else
1165 mem_op->off = -1;
1166 return true;
1168 return false;
1171 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1172 *I_P to point to the last element of the replacement. */
1173 static bool
1174 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1175 unsigned int *i_p)
1177 unsigned int i = *i_p;
1178 vn_reference_op_t op = &(*ops)[i];
1179 vn_reference_op_t mem_op = &(*ops)[i - 1];
1180 gimple *def_stmt;
1181 enum tree_code code;
1182 offset_int off;
1184 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1185 if (!is_gimple_assign (def_stmt))
1186 return false;
1188 code = gimple_assign_rhs_code (def_stmt);
1189 if (code != ADDR_EXPR
1190 && code != POINTER_PLUS_EXPR)
1191 return false;
1193 off = offset_int::from (mem_op->op0, SIGNED);
1195 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1196 from .foo.bar to the preceding MEM_REF offset and replace the
1197 address with &OBJ. */
1198 if (code == ADDR_EXPR)
1200 tree addr, addr_base;
1201 HOST_WIDE_INT addr_offset;
1203 addr = gimple_assign_rhs1 (def_stmt);
1204 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1205 &addr_offset);
1206 /* If that didn't work because the address isn't invariant propagate
1207 the reference tree from the address operation in case the current
1208 dereference isn't offsetted. */
1209 if (!addr_base
1210 && *i_p == ops->length () - 1
1211 && off == 0
1212 /* This makes us disable this transform for PRE where the
1213 reference ops might be also used for code insertion which
1214 is invalid. */
1215 && default_vn_walk_kind == VN_WALKREWRITE)
1217 auto_vec<vn_reference_op_s, 32> tem;
1218 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1219 ops->pop ();
1220 ops->pop ();
1221 ops->safe_splice (tem);
1222 --*i_p;
1223 return true;
1225 if (!addr_base
1226 || TREE_CODE (addr_base) != MEM_REF)
1227 return false;
1229 off += addr_offset;
1230 off += mem_ref_offset (addr_base);
1231 op->op0 = TREE_OPERAND (addr_base, 0);
1233 else
1235 tree ptr, ptroff;
1236 ptr = gimple_assign_rhs1 (def_stmt);
1237 ptroff = gimple_assign_rhs2 (def_stmt);
1238 if (TREE_CODE (ptr) != SSA_NAME
1239 || TREE_CODE (ptroff) != INTEGER_CST)
1240 return false;
1242 off += wi::to_offset (ptroff);
1243 op->op0 = ptr;
1246 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1247 if (tree_fits_shwi_p (mem_op->op0))
1248 mem_op->off = tree_to_shwi (mem_op->op0);
1249 else
1250 mem_op->off = -1;
1251 if (TREE_CODE (op->op0) == SSA_NAME)
1252 op->op0 = SSA_VAL (op->op0);
1253 if (TREE_CODE (op->op0) != SSA_NAME)
1254 op->opcode = TREE_CODE (op->op0);
1256 /* And recurse. */
1257 if (TREE_CODE (op->op0) == SSA_NAME)
1258 vn_reference_maybe_forwprop_address (ops, i_p);
1259 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1260 vn_reference_fold_indirect (ops, i_p);
1261 return true;
1264 /* Optimize the reference REF to a constant if possible or return
1265 NULL_TREE if not. */
1267 tree
1268 fully_constant_vn_reference_p (vn_reference_t ref)
1270 vec<vn_reference_op_s> operands = ref->operands;
1271 vn_reference_op_t op;
1273 /* Try to simplify the translated expression if it is
1274 a call to a builtin function with at most two arguments. */
1275 op = &operands[0];
1276 if (op->opcode == CALL_EXPR
1277 && TREE_CODE (op->op0) == ADDR_EXPR
1278 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1279 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1280 && operands.length () >= 2
1281 && operands.length () <= 3)
1283 vn_reference_op_t arg0, arg1 = NULL;
1284 bool anyconst = false;
1285 arg0 = &operands[1];
1286 if (operands.length () > 2)
1287 arg1 = &operands[2];
1288 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1289 || (arg0->opcode == ADDR_EXPR
1290 && is_gimple_min_invariant (arg0->op0)))
1291 anyconst = true;
1292 if (arg1
1293 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1294 || (arg1->opcode == ADDR_EXPR
1295 && is_gimple_min_invariant (arg1->op0))))
1296 anyconst = true;
1297 if (anyconst)
1299 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1300 arg1 ? 2 : 1,
1301 arg0->op0,
1302 arg1 ? arg1->op0 : NULL);
1303 if (folded
1304 && TREE_CODE (folded) == NOP_EXPR)
1305 folded = TREE_OPERAND (folded, 0);
1306 if (folded
1307 && is_gimple_min_invariant (folded))
1308 return folded;
1312 /* Simplify reads from constants or constant initializers. */
1313 else if (BITS_PER_UNIT == 8
1314 && is_gimple_reg_type (ref->type)
1315 && (!INTEGRAL_TYPE_P (ref->type)
1316 || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
1318 HOST_WIDE_INT off = 0;
1319 HOST_WIDE_INT size;
1320 if (INTEGRAL_TYPE_P (ref->type))
1321 size = TYPE_PRECISION (ref->type);
1322 else
1323 size = tree_to_shwi (TYPE_SIZE (ref->type));
1324 if (size % BITS_PER_UNIT != 0
1325 || size > MAX_BITSIZE_MODE_ANY_MODE)
1326 return NULL_TREE;
1327 size /= BITS_PER_UNIT;
1328 unsigned i;
1329 for (i = 0; i < operands.length (); ++i)
1331 if (operands[i].off == -1)
1332 return NULL_TREE;
1333 off += operands[i].off;
1334 if (operands[i].opcode == MEM_REF)
1336 ++i;
1337 break;
1340 vn_reference_op_t base = &operands[--i];
1341 tree ctor = error_mark_node;
1342 tree decl = NULL_TREE;
1343 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1344 ctor = base->op0;
1345 else if (base->opcode == MEM_REF
1346 && base[1].opcode == ADDR_EXPR
1347 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1348 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL))
1350 decl = TREE_OPERAND (base[1].op0, 0);
1351 ctor = ctor_for_folding (decl);
1353 if (ctor == NULL_TREE)
1354 return build_zero_cst (ref->type);
1355 else if (ctor != error_mark_node)
1357 if (decl)
1359 tree res = fold_ctor_reference (ref->type, ctor,
1360 off * BITS_PER_UNIT,
1361 size * BITS_PER_UNIT, decl);
1362 if (res)
1364 STRIP_USELESS_TYPE_CONVERSION (res);
1365 if (is_gimple_min_invariant (res))
1366 return res;
1369 else
1371 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1372 int len = native_encode_expr (ctor, buf, size, off);
1373 if (len > 0)
1374 return native_interpret_expr (ref->type, buf, len);
1379 return NULL_TREE;
1382 /* Return true if OPS contain a storage order barrier. */
1384 static bool
1385 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1387 vn_reference_op_t op;
1388 unsigned i;
1390 FOR_EACH_VEC_ELT (ops, i, op)
1391 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1392 return true;
1394 return false;
1397 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1398 structures into their value numbers. This is done in-place, and
1399 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1400 whether any operands were valueized. */
1402 static vec<vn_reference_op_s>
1403 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1405 vn_reference_op_t vro;
1406 unsigned int i;
1408 *valueized_anything = false;
1410 FOR_EACH_VEC_ELT (orig, i, vro)
1412 if (vro->opcode == SSA_NAME
1413 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1415 tree tem = SSA_VAL (vro->op0);
1416 if (tem != vro->op0)
1418 *valueized_anything = true;
1419 vro->op0 = tem;
1421 /* If it transforms from an SSA_NAME to a constant, update
1422 the opcode. */
1423 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1424 vro->opcode = TREE_CODE (vro->op0);
1426 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1428 tree tem = SSA_VAL (vro->op1);
1429 if (tem != vro->op1)
1431 *valueized_anything = true;
1432 vro->op1 = tem;
1435 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1437 tree tem = SSA_VAL (vro->op2);
1438 if (tem != vro->op2)
1440 *valueized_anything = true;
1441 vro->op2 = tem;
1444 /* If it transforms from an SSA_NAME to an address, fold with
1445 a preceding indirect reference. */
1446 if (i > 0
1447 && vro->op0
1448 && TREE_CODE (vro->op0) == ADDR_EXPR
1449 && orig[i - 1].opcode == MEM_REF)
1451 if (vn_reference_fold_indirect (&orig, &i))
1452 *valueized_anything = true;
1454 else if (i > 0
1455 && vro->opcode == SSA_NAME
1456 && orig[i - 1].opcode == MEM_REF)
1458 if (vn_reference_maybe_forwprop_address (&orig, &i))
1459 *valueized_anything = true;
1461 /* If it transforms a non-constant ARRAY_REF into a constant
1462 one, adjust the constant offset. */
1463 else if (vro->opcode == ARRAY_REF
1464 && vro->off == -1
1465 && TREE_CODE (vro->op0) == INTEGER_CST
1466 && TREE_CODE (vro->op1) == INTEGER_CST
1467 && TREE_CODE (vro->op2) == INTEGER_CST)
1469 offset_int off = ((wi::to_offset (vro->op0)
1470 - wi::to_offset (vro->op1))
1471 * wi::to_offset (vro->op2));
1472 if (wi::fits_shwi_p (off))
1473 vro->off = off.to_shwi ();
1477 return orig;
1480 static vec<vn_reference_op_s>
1481 valueize_refs (vec<vn_reference_op_s> orig)
1483 bool tem;
1484 return valueize_refs_1 (orig, &tem);
1487 static vec<vn_reference_op_s> shared_lookup_references;
1489 /* Create a vector of vn_reference_op_s structures from REF, a
1490 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1491 this function. *VALUEIZED_ANYTHING will specify whether any
1492 operands were valueized. */
1494 static vec<vn_reference_op_s>
1495 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1497 if (!ref)
1498 return vNULL;
1499 shared_lookup_references.truncate (0);
1500 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1501 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1502 valueized_anything);
1503 return shared_lookup_references;
1506 /* Create a vector of vn_reference_op_s structures from CALL, a
1507 call statement. The vector is shared among all callers of
1508 this function. */
1510 static vec<vn_reference_op_s>
1511 valueize_shared_reference_ops_from_call (gcall *call)
1513 if (!call)
1514 return vNULL;
1515 shared_lookup_references.truncate (0);
1516 copy_reference_ops_from_call (call, &shared_lookup_references);
1517 shared_lookup_references = valueize_refs (shared_lookup_references);
1518 return shared_lookup_references;
1521 /* Lookup a SCCVN reference operation VR in the current hash table.
1522 Returns the resulting value number if it exists in the hash table,
1523 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1524 vn_reference_t stored in the hashtable if something is found. */
1526 static tree
1527 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1529 vn_reference_s **slot;
1530 hashval_t hash;
1532 hash = vr->hashcode;
1533 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1534 if (!slot && current_info == optimistic_info)
1535 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1536 if (slot)
1538 if (vnresult)
1539 *vnresult = (vn_reference_t)*slot;
1540 return ((vn_reference_t)*slot)->result;
1543 return NULL_TREE;
1546 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1547 with the current VUSE and performs the expression lookup. */
1549 static void *
1550 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1551 unsigned int cnt, void *vr_)
1553 vn_reference_t vr = (vn_reference_t)vr_;
1554 vn_reference_s **slot;
1555 hashval_t hash;
1557 /* This bounds the stmt walks we perform on reference lookups
1558 to O(1) instead of O(N) where N is the number of dominating
1559 stores. */
1560 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1561 return (void *)-1;
1563 if (last_vuse_ptr)
1564 *last_vuse_ptr = vuse;
1566 /* Fixup vuse and hash. */
1567 if (vr->vuse)
1568 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1569 vr->vuse = vuse_ssa_val (vuse);
1570 if (vr->vuse)
1571 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1573 hash = vr->hashcode;
1574 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1575 if (!slot && current_info == optimistic_info)
1576 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1577 if (slot)
1578 return *slot;
1580 return NULL;
1583 /* Lookup an existing or insert a new vn_reference entry into the
1584 value table for the VUSE, SET, TYPE, OPERANDS reference which
1585 has the value VALUE which is either a constant or an SSA name. */
1587 static vn_reference_t
1588 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1589 alias_set_type set,
1590 tree type,
1591 vec<vn_reference_op_s,
1592 va_heap> operands,
1593 tree value)
1595 vn_reference_s vr1;
1596 vn_reference_t result;
1597 unsigned value_id;
1598 vr1.vuse = vuse;
1599 vr1.operands = operands;
1600 vr1.type = type;
1601 vr1.set = set;
1602 vr1.hashcode = vn_reference_compute_hash (&vr1);
1603 if (vn_reference_lookup_1 (&vr1, &result))
1604 return result;
1605 if (TREE_CODE (value) == SSA_NAME)
1606 value_id = VN_INFO (value)->value_id;
1607 else
1608 value_id = get_or_alloc_constant_value_id (value);
1609 return vn_reference_insert_pieces (vuse, set, type,
1610 operands.copy (), value, value_id);
1613 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1614 from the statement defining VUSE and if not successful tries to
1615 translate *REFP and VR_ through an aggregate copy at the definition
1616 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
1617 of *REF and *VR. If only disambiguation was performed then
1618 *DISAMBIGUATE_ONLY is set to true. */
1620 static void *
1621 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1622 bool *disambiguate_only)
1624 vn_reference_t vr = (vn_reference_t)vr_;
1625 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
1626 tree base = ao_ref_base (ref);
1627 HOST_WIDE_INT offset, maxsize;
1628 static vec<vn_reference_op_s>
1629 lhs_ops = vNULL;
1630 ao_ref lhs_ref;
1631 bool lhs_ref_ok = false;
1633 /* If the reference is based on a parameter that was determined as
1634 pointing to readonly memory it doesn't change. */
1635 if (TREE_CODE (base) == MEM_REF
1636 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
1637 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
1638 && bitmap_bit_p (const_parms,
1639 SSA_NAME_VERSION (TREE_OPERAND (base, 0))))
1641 *disambiguate_only = true;
1642 return NULL;
1645 /* First try to disambiguate after value-replacing in the definitions LHS. */
1646 if (is_gimple_assign (def_stmt))
1648 tree lhs = gimple_assign_lhs (def_stmt);
1649 bool valueized_anything = false;
1650 /* Avoid re-allocation overhead. */
1651 lhs_ops.truncate (0);
1652 copy_reference_ops_from_ref (lhs, &lhs_ops);
1653 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1654 if (valueized_anything)
1656 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1657 get_alias_set (lhs),
1658 TREE_TYPE (lhs), lhs_ops);
1659 if (lhs_ref_ok
1660 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1662 *disambiguate_only = true;
1663 return NULL;
1666 else
1668 ao_ref_init (&lhs_ref, lhs);
1669 lhs_ref_ok = true;
1672 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1673 && gimple_call_num_args (def_stmt) <= 4)
1675 /* For builtin calls valueize its arguments and call the
1676 alias oracle again. Valueization may improve points-to
1677 info of pointers and constify size and position arguments.
1678 Originally this was motivated by PR61034 which has
1679 conditional calls to free falsely clobbering ref because
1680 of imprecise points-to info of the argument. */
1681 tree oldargs[4];
1682 bool valueized_anything = false;
1683 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1685 oldargs[i] = gimple_call_arg (def_stmt, i);
1686 if (TREE_CODE (oldargs[i]) == SSA_NAME
1687 && VN_INFO (oldargs[i])->valnum != oldargs[i])
1689 gimple_call_set_arg (def_stmt, i, VN_INFO (oldargs[i])->valnum);
1690 valueized_anything = true;
1693 if (valueized_anything)
1695 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1696 ref);
1697 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1698 gimple_call_set_arg (def_stmt, i, oldargs[i]);
1699 if (!res)
1701 *disambiguate_only = true;
1702 return NULL;
1707 if (*disambiguate_only)
1708 return (void *)-1;
1710 offset = ref->offset;
1711 maxsize = ref->max_size;
1713 /* If we cannot constrain the size of the reference we cannot
1714 test if anything kills it. */
1715 if (maxsize == -1)
1716 return (void *)-1;
1718 /* We can't deduce anything useful from clobbers. */
1719 if (gimple_clobber_p (def_stmt))
1720 return (void *)-1;
1722 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1723 from that definition.
1724 1) Memset. */
1725 if (is_gimple_reg_type (vr->type)
1726 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1727 && integer_zerop (gimple_call_arg (def_stmt, 1))
1728 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2))
1729 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1731 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1732 tree base2;
1733 HOST_WIDE_INT offset2, size2, maxsize2;
1734 bool reverse;
1735 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
1736 &reverse);
1737 size2 = tree_to_uhwi (gimple_call_arg (def_stmt, 2)) * 8;
1738 if ((unsigned HOST_WIDE_INT)size2 / 8
1739 == tree_to_uhwi (gimple_call_arg (def_stmt, 2))
1740 && maxsize2 != -1
1741 && operand_equal_p (base, base2, 0)
1742 && offset2 <= offset
1743 && offset2 + size2 >= offset + maxsize)
1745 tree val = build_zero_cst (vr->type);
1746 return vn_reference_lookup_or_insert_for_pieces
1747 (vuse, vr->set, vr->type, vr->operands, val);
1751 /* 2) Assignment from an empty CONSTRUCTOR. */
1752 else if (is_gimple_reg_type (vr->type)
1753 && gimple_assign_single_p (def_stmt)
1754 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1755 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1757 tree base2;
1758 HOST_WIDE_INT offset2, size2, maxsize2;
1759 bool reverse;
1760 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1761 &offset2, &size2, &maxsize2, &reverse);
1762 if (maxsize2 != -1
1763 && operand_equal_p (base, base2, 0)
1764 && offset2 <= offset
1765 && offset2 + size2 >= offset + maxsize)
1767 tree val = build_zero_cst (vr->type);
1768 return vn_reference_lookup_or_insert_for_pieces
1769 (vuse, vr->set, vr->type, vr->operands, val);
1773 /* 3) Assignment from a constant. We can use folds native encode/interpret
1774 routines to extract the assigned bits. */
1775 else if (vn_walk_kind == VN_WALKREWRITE
1776 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1777 && ref->size == maxsize
1778 && maxsize % BITS_PER_UNIT == 0
1779 && offset % BITS_PER_UNIT == 0
1780 && is_gimple_reg_type (vr->type)
1781 && !contains_storage_order_barrier_p (vr->operands)
1782 && gimple_assign_single_p (def_stmt)
1783 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
1785 tree base2;
1786 HOST_WIDE_INT offset2, size2, maxsize2;
1787 bool reverse;
1788 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1789 &offset2, &size2, &maxsize2, &reverse);
1790 if (!reverse
1791 && maxsize2 != -1
1792 && maxsize2 == size2
1793 && size2 % BITS_PER_UNIT == 0
1794 && offset2 % BITS_PER_UNIT == 0
1795 && operand_equal_p (base, base2, 0)
1796 && offset2 <= offset
1797 && offset2 + size2 >= offset + maxsize)
1799 /* We support up to 512-bit values (for V8DFmode). */
1800 unsigned char buffer[64];
1801 int len;
1803 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1804 buffer, sizeof (buffer));
1805 if (len > 0)
1807 tree val = native_interpret_expr (vr->type,
1808 buffer
1809 + ((offset - offset2)
1810 / BITS_PER_UNIT),
1811 ref->size / BITS_PER_UNIT);
1812 if (val)
1813 return vn_reference_lookup_or_insert_for_pieces
1814 (vuse, vr->set, vr->type, vr->operands, val);
1819 /* 4) Assignment from an SSA name which definition we may be able
1820 to access pieces from. */
1821 else if (ref->size == maxsize
1822 && is_gimple_reg_type (vr->type)
1823 && !contains_storage_order_barrier_p (vr->operands)
1824 && gimple_assign_single_p (def_stmt)
1825 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
1827 tree rhs1 = gimple_assign_rhs1 (def_stmt);
1828 gimple *def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
1829 if (is_gimple_assign (def_stmt2)
1830 && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
1831 || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
1832 && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1))))
1834 tree base2;
1835 HOST_WIDE_INT offset2, size2, maxsize2, off;
1836 bool reverse;
1837 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1838 &offset2, &size2, &maxsize2,
1839 &reverse);
1840 off = offset - offset2;
1841 if (!reverse
1842 && maxsize2 != -1
1843 && maxsize2 == size2
1844 && operand_equal_p (base, base2, 0)
1845 && offset2 <= offset
1846 && offset2 + size2 >= offset + maxsize)
1848 tree val = NULL_TREE;
1849 HOST_WIDE_INT elsz
1850 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1))));
1851 if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR)
1853 if (off == 0)
1854 val = gimple_assign_rhs1 (def_stmt2);
1855 else if (off == elsz)
1856 val = gimple_assign_rhs2 (def_stmt2);
1858 else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR
1859 && off % elsz == 0)
1861 tree ctor = gimple_assign_rhs1 (def_stmt2);
1862 unsigned i = off / elsz;
1863 if (i < CONSTRUCTOR_NELTS (ctor))
1865 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
1866 if (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
1868 if (TREE_CODE (TREE_TYPE (elt->value))
1869 != VECTOR_TYPE)
1870 val = elt->value;
1874 if (val)
1875 return vn_reference_lookup_or_insert_for_pieces
1876 (vuse, vr->set, vr->type, vr->operands, val);
1881 /* 5) For aggregate copies translate the reference through them if
1882 the copy kills ref. */
1883 else if (vn_walk_kind == VN_WALKREWRITE
1884 && gimple_assign_single_p (def_stmt)
1885 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1886 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1887 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1889 tree base2;
1890 HOST_WIDE_INT maxsize2;
1891 int i, j, k;
1892 auto_vec<vn_reference_op_s> rhs;
1893 vn_reference_op_t vro;
1894 ao_ref r;
1896 if (!lhs_ref_ok)
1897 return (void *)-1;
1899 /* See if the assignment kills REF. */
1900 base2 = ao_ref_base (&lhs_ref);
1901 maxsize2 = lhs_ref.max_size;
1902 if (maxsize2 == -1
1903 || (base != base2
1904 && (TREE_CODE (base) != MEM_REF
1905 || TREE_CODE (base2) != MEM_REF
1906 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
1907 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
1908 TREE_OPERAND (base2, 1))))
1909 || !stmt_kills_ref_p (def_stmt, ref))
1910 return (void *)-1;
1912 /* Find the common base of ref and the lhs. lhs_ops already
1913 contains valueized operands for the lhs. */
1914 i = vr->operands.length () - 1;
1915 j = lhs_ops.length () - 1;
1916 while (j >= 0 && i >= 0
1917 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
1919 i--;
1920 j--;
1923 /* ??? The innermost op should always be a MEM_REF and we already
1924 checked that the assignment to the lhs kills vr. Thus for
1925 aggregate copies using char[] types the vn_reference_op_eq
1926 may fail when comparing types for compatibility. But we really
1927 don't care here - further lookups with the rewritten operands
1928 will simply fail if we messed up types too badly. */
1929 HOST_WIDE_INT extra_off = 0;
1930 if (j == 0 && i >= 0
1931 && lhs_ops[0].opcode == MEM_REF
1932 && lhs_ops[0].off != -1)
1934 if (lhs_ops[0].off == vr->operands[i].off)
1935 i--, j--;
1936 else if (vr->operands[i].opcode == MEM_REF
1937 && vr->operands[i].off != -1)
1939 extra_off = vr->operands[i].off - lhs_ops[0].off;
1940 i--, j--;
1944 /* i now points to the first additional op.
1945 ??? LHS may not be completely contained in VR, one or more
1946 VIEW_CONVERT_EXPRs could be in its way. We could at least
1947 try handling outermost VIEW_CONVERT_EXPRs. */
1948 if (j != -1)
1949 return (void *)-1;
1951 /* Punt if the additional ops contain a storage order barrier. */
1952 for (k = i; k >= 0; k--)
1954 vro = &vr->operands[k];
1955 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
1956 return (void *)-1;
1959 /* Now re-write REF to be based on the rhs of the assignment. */
1960 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1962 /* Apply an extra offset to the inner MEM_REF of the RHS. */
1963 if (extra_off != 0)
1965 if (rhs.length () < 2
1966 || rhs[0].opcode != MEM_REF
1967 || rhs[0].off == -1)
1968 return (void *)-1;
1969 rhs[0].off += extra_off;
1970 rhs[0].op0 = int_const_binop (PLUS_EXPR, rhs[0].op0,
1971 build_int_cst (TREE_TYPE (rhs[0].op0),
1972 extra_off));
1975 /* We need to pre-pend vr->operands[0..i] to rhs. */
1976 vec<vn_reference_op_s> old = vr->operands;
1977 if (i + 1 + rhs.length () > vr->operands.length ())
1979 vr->operands.safe_grow (i + 1 + rhs.length ());
1980 if (old == shared_lookup_references)
1981 shared_lookup_references = vr->operands;
1983 else
1984 vr->operands.truncate (i + 1 + rhs.length ());
1985 FOR_EACH_VEC_ELT (rhs, j, vro)
1986 vr->operands[i + 1 + j] = *vro;
1987 vr->operands = valueize_refs (vr->operands);
1988 if (old == shared_lookup_references)
1989 shared_lookup_references = vr->operands;
1990 vr->hashcode = vn_reference_compute_hash (vr);
1992 /* Try folding the new reference to a constant. */
1993 tree val = fully_constant_vn_reference_p (vr);
1994 if (val)
1995 return vn_reference_lookup_or_insert_for_pieces
1996 (vuse, vr->set, vr->type, vr->operands, val);
1998 /* Adjust *ref from the new operands. */
1999 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2000 return (void *)-1;
2001 /* This can happen with bitfields. */
2002 if (ref->size != r.size)
2003 return (void *)-1;
2004 *ref = r;
2006 /* Do not update last seen VUSE after translating. */
2007 last_vuse_ptr = NULL;
2009 /* Keep looking for the adjusted *REF / VR pair. */
2010 return NULL;
2013 /* 6) For memcpy copies translate the reference through them if
2014 the copy kills ref. */
2015 else if (vn_walk_kind == VN_WALKREWRITE
2016 && is_gimple_reg_type (vr->type)
2017 /* ??? Handle BCOPY as well. */
2018 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2019 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2020 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2021 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2022 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2023 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2024 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2025 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2)))
2027 tree lhs, rhs;
2028 ao_ref r;
2029 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
2030 vn_reference_op_s op;
2031 HOST_WIDE_INT at;
2033 /* Only handle non-variable, addressable refs. */
2034 if (ref->size != maxsize
2035 || offset % BITS_PER_UNIT != 0
2036 || ref->size % BITS_PER_UNIT != 0)
2037 return (void *)-1;
2039 /* Extract a pointer base and an offset for the destination. */
2040 lhs = gimple_call_arg (def_stmt, 0);
2041 lhs_offset = 0;
2042 if (TREE_CODE (lhs) == SSA_NAME)
2044 lhs = SSA_VAL (lhs);
2045 if (TREE_CODE (lhs) == SSA_NAME)
2047 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
2048 if (gimple_assign_single_p (def_stmt)
2049 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2050 lhs = gimple_assign_rhs1 (def_stmt);
2053 if (TREE_CODE (lhs) == ADDR_EXPR)
2055 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2056 &lhs_offset);
2057 if (!tem)
2058 return (void *)-1;
2059 if (TREE_CODE (tem) == MEM_REF
2060 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2062 lhs = TREE_OPERAND (tem, 0);
2063 if (TREE_CODE (lhs) == SSA_NAME)
2064 lhs = SSA_VAL (lhs);
2065 lhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2067 else if (DECL_P (tem))
2068 lhs = build_fold_addr_expr (tem);
2069 else
2070 return (void *)-1;
2072 if (TREE_CODE (lhs) != SSA_NAME
2073 && TREE_CODE (lhs) != ADDR_EXPR)
2074 return (void *)-1;
2076 /* Extract a pointer base and an offset for the source. */
2077 rhs = gimple_call_arg (def_stmt, 1);
2078 rhs_offset = 0;
2079 if (TREE_CODE (rhs) == SSA_NAME)
2080 rhs = SSA_VAL (rhs);
2081 if (TREE_CODE (rhs) == ADDR_EXPR)
2083 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2084 &rhs_offset);
2085 if (!tem)
2086 return (void *)-1;
2087 if (TREE_CODE (tem) == MEM_REF
2088 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2090 rhs = TREE_OPERAND (tem, 0);
2091 rhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2093 else if (DECL_P (tem))
2094 rhs = build_fold_addr_expr (tem);
2095 else
2096 return (void *)-1;
2098 if (TREE_CODE (rhs) != SSA_NAME
2099 && TREE_CODE (rhs) != ADDR_EXPR)
2100 return (void *)-1;
2102 copy_size = tree_to_uhwi (gimple_call_arg (def_stmt, 2));
2104 /* The bases of the destination and the references have to agree. */
2105 if ((TREE_CODE (base) != MEM_REF
2106 && !DECL_P (base))
2107 || (TREE_CODE (base) == MEM_REF
2108 && (TREE_OPERAND (base, 0) != lhs
2109 || !tree_fits_uhwi_p (TREE_OPERAND (base, 1))))
2110 || (DECL_P (base)
2111 && (TREE_CODE (lhs) != ADDR_EXPR
2112 || TREE_OPERAND (lhs, 0) != base)))
2113 return (void *)-1;
2115 at = offset / BITS_PER_UNIT;
2116 if (TREE_CODE (base) == MEM_REF)
2117 at += tree_to_uhwi (TREE_OPERAND (base, 1));
2118 /* If the access is completely outside of the memcpy destination
2119 area there is no aliasing. */
2120 if (lhs_offset >= at + maxsize / BITS_PER_UNIT
2121 || lhs_offset + copy_size <= at)
2122 return NULL;
2123 /* And the access has to be contained within the memcpy destination. */
2124 if (lhs_offset > at
2125 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
2126 return (void *)-1;
2128 /* Make room for 2 operands in the new reference. */
2129 if (vr->operands.length () < 2)
2131 vec<vn_reference_op_s> old = vr->operands;
2132 vr->operands.safe_grow_cleared (2);
2133 if (old == shared_lookup_references
2134 && vr->operands != old)
2135 shared_lookup_references = vr->operands;
2137 else
2138 vr->operands.truncate (2);
2140 /* The looked-through reference is a simple MEM_REF. */
2141 memset (&op, 0, sizeof (op));
2142 op.type = vr->type;
2143 op.opcode = MEM_REF;
2144 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
2145 op.off = at - lhs_offset + rhs_offset;
2146 vr->operands[0] = op;
2147 op.type = TREE_TYPE (rhs);
2148 op.opcode = TREE_CODE (rhs);
2149 op.op0 = rhs;
2150 op.off = -1;
2151 vr->operands[1] = op;
2152 vr->hashcode = vn_reference_compute_hash (vr);
2154 /* Try folding the new reference to a constant. */
2155 tree val = fully_constant_vn_reference_p (vr);
2156 if (val)
2157 return vn_reference_lookup_or_insert_for_pieces
2158 (vuse, vr->set, vr->type, vr->operands, val);
2160 /* Adjust *ref from the new operands. */
2161 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2162 return (void *)-1;
2163 /* This can happen with bitfields. */
2164 if (ref->size != r.size)
2165 return (void *)-1;
2166 *ref = r;
2168 /* Do not update last seen VUSE after translating. */
2169 last_vuse_ptr = NULL;
2171 /* Keep looking for the adjusted *REF / VR pair. */
2172 return NULL;
2175 /* Bail out and stop walking. */
2176 return (void *)-1;
2179 /* Lookup a reference operation by it's parts, in the current hash table.
2180 Returns the resulting value number if it exists in the hash table,
2181 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2182 vn_reference_t stored in the hashtable if something is found. */
2184 tree
2185 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2186 vec<vn_reference_op_s> operands,
2187 vn_reference_t *vnresult, vn_lookup_kind kind)
2189 struct vn_reference_s vr1;
2190 vn_reference_t tmp;
2191 tree cst;
2193 if (!vnresult)
2194 vnresult = &tmp;
2195 *vnresult = NULL;
2197 vr1.vuse = vuse_ssa_val (vuse);
2198 shared_lookup_references.truncate (0);
2199 shared_lookup_references.safe_grow (operands.length ());
2200 memcpy (shared_lookup_references.address (),
2201 operands.address (),
2202 sizeof (vn_reference_op_s)
2203 * operands.length ());
2204 vr1.operands = operands = shared_lookup_references
2205 = valueize_refs (shared_lookup_references);
2206 vr1.type = type;
2207 vr1.set = set;
2208 vr1.hashcode = vn_reference_compute_hash (&vr1);
2209 if ((cst = fully_constant_vn_reference_p (&vr1)))
2210 return cst;
2212 vn_reference_lookup_1 (&vr1, vnresult);
2213 if (!*vnresult
2214 && kind != VN_NOWALK
2215 && vr1.vuse)
2217 ao_ref r;
2218 vn_walk_kind = kind;
2219 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2220 *vnresult =
2221 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2222 vn_reference_lookup_2,
2223 vn_reference_lookup_3,
2224 vuse_ssa_val, &vr1);
2225 gcc_checking_assert (vr1.operands == shared_lookup_references);
2228 if (*vnresult)
2229 return (*vnresult)->result;
2231 return NULL_TREE;
2234 /* Lookup OP in the current hash table, and return the resulting value
2235 number if it exists in the hash table. Return NULL_TREE if it does
2236 not exist in the hash table or if the result field of the structure
2237 was NULL.. VNRESULT will be filled in with the vn_reference_t
2238 stored in the hashtable if one exists. When TBAA_P is false assume
2239 we are looking up a store and treat it as having alias-set zero. */
2241 tree
2242 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2243 vn_reference_t *vnresult, bool tbaa_p)
2245 vec<vn_reference_op_s> operands;
2246 struct vn_reference_s vr1;
2247 tree cst;
2248 bool valuezied_anything;
2250 if (vnresult)
2251 *vnresult = NULL;
2253 vr1.vuse = vuse_ssa_val (vuse);
2254 vr1.operands = operands
2255 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2256 vr1.type = TREE_TYPE (op);
2257 vr1.set = tbaa_p ? get_alias_set (op) : 0;
2258 vr1.hashcode = vn_reference_compute_hash (&vr1);
2259 if ((cst = fully_constant_vn_reference_p (&vr1)))
2260 return cst;
2262 if (kind != VN_NOWALK
2263 && vr1.vuse)
2265 vn_reference_t wvnresult;
2266 ao_ref r;
2267 /* Make sure to use a valueized reference if we valueized anything.
2268 Otherwise preserve the full reference for advanced TBAA. */
2269 if (!valuezied_anything
2270 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2271 vr1.operands))
2272 ao_ref_init (&r, op);
2273 if (! tbaa_p)
2274 r.ref_alias_set = r.base_alias_set = 0;
2275 vn_walk_kind = kind;
2276 wvnresult =
2277 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2278 vn_reference_lookup_2,
2279 vn_reference_lookup_3,
2280 vuse_ssa_val, &vr1);
2281 gcc_checking_assert (vr1.operands == shared_lookup_references);
2282 if (wvnresult)
2284 if (vnresult)
2285 *vnresult = wvnresult;
2286 return wvnresult->result;
2289 return NULL_TREE;
2292 return vn_reference_lookup_1 (&vr1, vnresult);
2295 /* Lookup CALL in the current hash table and return the entry in
2296 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2298 void
2299 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2300 vn_reference_t vr)
2302 if (vnresult)
2303 *vnresult = NULL;
2305 tree vuse = gimple_vuse (call);
2307 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2308 vr->operands = valueize_shared_reference_ops_from_call (call);
2309 vr->type = gimple_expr_type (call);
2310 vr->set = 0;
2311 vr->hashcode = vn_reference_compute_hash (vr);
2312 vn_reference_lookup_1 (vr, vnresult);
2315 /* Insert OP into the current hash table with a value number of
2316 RESULT, and return the resulting reference structure we created. */
2318 static vn_reference_t
2319 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2321 vn_reference_s **slot;
2322 vn_reference_t vr1;
2323 bool tem;
2325 vr1 = current_info->references_pool->allocate ();
2326 if (TREE_CODE (result) == SSA_NAME)
2327 vr1->value_id = VN_INFO (result)->value_id;
2328 else
2329 vr1->value_id = get_or_alloc_constant_value_id (result);
2330 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2331 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2332 vr1->type = TREE_TYPE (op);
2333 vr1->set = get_alias_set (op);
2334 vr1->hashcode = vn_reference_compute_hash (vr1);
2335 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2336 vr1->result_vdef = vdef;
2338 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2339 INSERT);
2341 /* Because we lookup stores using vuses, and value number failures
2342 using the vdefs (see visit_reference_op_store for how and why),
2343 it's possible that on failure we may try to insert an already
2344 inserted store. This is not wrong, there is no ssa name for a
2345 store that we could use as a differentiator anyway. Thus, unlike
2346 the other lookup functions, you cannot gcc_assert (!*slot)
2347 here. */
2349 /* But free the old slot in case of a collision. */
2350 if (*slot)
2351 free_reference (*slot);
2353 *slot = vr1;
2354 return vr1;
2357 /* Insert a reference by it's pieces into the current hash table with
2358 a value number of RESULT. Return the resulting reference
2359 structure we created. */
2361 vn_reference_t
2362 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2363 vec<vn_reference_op_s> operands,
2364 tree result, unsigned int value_id)
2367 vn_reference_s **slot;
2368 vn_reference_t vr1;
2370 vr1 = current_info->references_pool->allocate ();
2371 vr1->value_id = value_id;
2372 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2373 vr1->operands = valueize_refs (operands);
2374 vr1->type = type;
2375 vr1->set = set;
2376 vr1->hashcode = vn_reference_compute_hash (vr1);
2377 if (result && TREE_CODE (result) == SSA_NAME)
2378 result = SSA_VAL (result);
2379 vr1->result = result;
2381 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2382 INSERT);
2384 /* At this point we should have all the things inserted that we have
2385 seen before, and we should never try inserting something that
2386 already exists. */
2387 gcc_assert (!*slot);
2388 if (*slot)
2389 free_reference (*slot);
2391 *slot = vr1;
2392 return vr1;
2395 /* Compute and return the hash value for nary operation VBO1. */
2397 static hashval_t
2398 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2400 inchash::hash hstate;
2401 unsigned i;
2403 for (i = 0; i < vno1->length; ++i)
2404 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2405 vno1->op[i] = SSA_VAL (vno1->op[i]);
2407 if (((vno1->length == 2
2408 && commutative_tree_code (vno1->opcode))
2409 || (vno1->length == 3
2410 && commutative_ternary_tree_code (vno1->opcode)))
2411 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2412 std::swap (vno1->op[0], vno1->op[1]);
2413 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
2414 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2416 std::swap (vno1->op[0], vno1->op[1]);
2417 vno1->opcode = swap_tree_comparison (vno1->opcode);
2420 hstate.add_int (vno1->opcode);
2421 for (i = 0; i < vno1->length; ++i)
2422 inchash::add_expr (vno1->op[i], hstate);
2424 return hstate.end ();
2427 /* Compare nary operations VNO1 and VNO2 and return true if they are
2428 equivalent. */
2430 bool
2431 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2433 unsigned i;
2435 if (vno1->hashcode != vno2->hashcode)
2436 return false;
2438 if (vno1->length != vno2->length)
2439 return false;
2441 if (vno1->opcode != vno2->opcode
2442 || !types_compatible_p (vno1->type, vno2->type))
2443 return false;
2445 for (i = 0; i < vno1->length; ++i)
2446 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2447 return false;
2449 return true;
2452 /* Initialize VNO from the pieces provided. */
2454 static void
2455 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2456 enum tree_code code, tree type, tree *ops)
2458 vno->opcode = code;
2459 vno->length = length;
2460 vno->type = type;
2461 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2464 /* Initialize VNO from OP. */
2466 static void
2467 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2469 unsigned i;
2471 vno->opcode = TREE_CODE (op);
2472 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2473 vno->type = TREE_TYPE (op);
2474 for (i = 0; i < vno->length; ++i)
2475 vno->op[i] = TREE_OPERAND (op, i);
2478 /* Return the number of operands for a vn_nary ops structure from STMT. */
2480 static unsigned int
2481 vn_nary_length_from_stmt (gimple *stmt)
2483 switch (gimple_assign_rhs_code (stmt))
2485 case REALPART_EXPR:
2486 case IMAGPART_EXPR:
2487 case VIEW_CONVERT_EXPR:
2488 return 1;
2490 case BIT_FIELD_REF:
2491 return 3;
2493 case CONSTRUCTOR:
2494 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2496 default:
2497 return gimple_num_ops (stmt) - 1;
2501 /* Initialize VNO from STMT. */
2503 static void
2504 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
2506 unsigned i;
2508 vno->opcode = gimple_assign_rhs_code (stmt);
2509 vno->type = gimple_expr_type (stmt);
2510 switch (vno->opcode)
2512 case REALPART_EXPR:
2513 case IMAGPART_EXPR:
2514 case VIEW_CONVERT_EXPR:
2515 vno->length = 1;
2516 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2517 break;
2519 case BIT_FIELD_REF:
2520 vno->length = 3;
2521 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2522 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2523 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2524 break;
2526 case CONSTRUCTOR:
2527 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2528 for (i = 0; i < vno->length; ++i)
2529 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2530 break;
2532 default:
2533 gcc_checking_assert (!gimple_assign_single_p (stmt));
2534 vno->length = gimple_num_ops (stmt) - 1;
2535 for (i = 0; i < vno->length; ++i)
2536 vno->op[i] = gimple_op (stmt, i + 1);
2540 /* Compute the hashcode for VNO and look for it in the hash table;
2541 return the resulting value number if it exists in the hash table.
2542 Return NULL_TREE if it does not exist in the hash table or if the
2543 result field of the operation is NULL. VNRESULT will contain the
2544 vn_nary_op_t from the hashtable if it exists. */
2546 static tree
2547 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2549 vn_nary_op_s **slot;
2551 if (vnresult)
2552 *vnresult = NULL;
2554 vno->hashcode = vn_nary_op_compute_hash (vno);
2555 slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2556 NO_INSERT);
2557 if (!slot && current_info == optimistic_info)
2558 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2559 NO_INSERT);
2560 if (!slot)
2561 return NULL_TREE;
2562 if (vnresult)
2563 *vnresult = *slot;
2564 return (*slot)->result;
2567 /* Lookup a n-ary operation by its pieces and return the resulting value
2568 number if it exists in the hash table. Return NULL_TREE if it does
2569 not exist in the hash table or if the result field of the operation
2570 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2571 if it exists. */
2573 tree
2574 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2575 tree type, tree *ops, vn_nary_op_t *vnresult)
2577 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2578 sizeof_vn_nary_op (length));
2579 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2580 return vn_nary_op_lookup_1 (vno1, vnresult);
2583 /* Lookup OP in the current hash table, and return the resulting value
2584 number if it exists in the hash table. Return NULL_TREE if it does
2585 not exist in the hash table or if the result field of the operation
2586 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2587 if it exists. */
2589 tree
2590 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2592 vn_nary_op_t vno1
2593 = XALLOCAVAR (struct vn_nary_op_s,
2594 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2595 init_vn_nary_op_from_op (vno1, op);
2596 return vn_nary_op_lookup_1 (vno1, vnresult);
2599 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2600 value number if it exists in the hash table. Return NULL_TREE if
2601 it does not exist in the hash table. VNRESULT will contain the
2602 vn_nary_op_t from the hashtable if it exists. */
2604 tree
2605 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
2607 vn_nary_op_t vno1
2608 = XALLOCAVAR (struct vn_nary_op_s,
2609 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2610 init_vn_nary_op_from_stmt (vno1, stmt);
2611 return vn_nary_op_lookup_1 (vno1, vnresult);
2614 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
2616 static tree
2617 vn_lookup_simplify_result (code_helper rcode, tree type, tree *ops)
2619 if (!rcode.is_tree_code ())
2620 return NULL_TREE;
2621 vn_nary_op_t vnresult = NULL;
2622 return vn_nary_op_lookup_pieces (TREE_CODE_LENGTH ((tree_code) rcode),
2623 (tree_code) rcode, type, ops, &vnresult);
2626 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2628 static vn_nary_op_t
2629 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2631 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2634 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2635 obstack. */
2637 static vn_nary_op_t
2638 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2640 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2641 &current_info->nary_obstack);
2643 vno1->value_id = value_id;
2644 vno1->length = length;
2645 vno1->result = result;
2647 return vno1;
2650 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2651 VNO->HASHCODE first. */
2653 static vn_nary_op_t
2654 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
2655 bool compute_hash)
2657 vn_nary_op_s **slot;
2659 if (compute_hash)
2660 vno->hashcode = vn_nary_op_compute_hash (vno);
2662 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
2663 gcc_assert (!*slot);
2665 *slot = vno;
2666 return vno;
2669 /* Insert a n-ary operation into the current hash table using it's
2670 pieces. Return the vn_nary_op_t structure we created and put in
2671 the hashtable. */
2673 vn_nary_op_t
2674 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2675 tree type, tree *ops,
2676 tree result, unsigned int value_id)
2678 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2679 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2680 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2683 /* Insert OP into the current hash table with a value number of
2684 RESULT. Return the vn_nary_op_t structure we created and put in
2685 the hashtable. */
2687 vn_nary_op_t
2688 vn_nary_op_insert (tree op, tree result)
2690 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2691 vn_nary_op_t vno1;
2693 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2694 init_vn_nary_op_from_op (vno1, op);
2695 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2698 /* Insert the rhs of STMT into the current hash table with a value number of
2699 RESULT. */
2701 static vn_nary_op_t
2702 vn_nary_op_insert_stmt (gimple *stmt, tree result)
2704 vn_nary_op_t vno1
2705 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2706 result, VN_INFO (result)->value_id);
2707 init_vn_nary_op_from_stmt (vno1, stmt);
2708 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2711 /* Compute a hashcode for PHI operation VP1 and return it. */
2713 static inline hashval_t
2714 vn_phi_compute_hash (vn_phi_t vp1)
2716 inchash::hash hstate (vp1->phiargs.length () > 2
2717 ? vp1->block->index : vp1->phiargs.length ());
2718 tree phi1op;
2719 tree type;
2720 edge e;
2721 edge_iterator ei;
2723 /* If all PHI arguments are constants we need to distinguish
2724 the PHI node via its type. */
2725 type = vp1->type;
2726 hstate.merge_hash (vn_hash_type (type));
2728 FOR_EACH_EDGE (e, ei, vp1->block->preds)
2730 /* Don't hash backedge values they need to be handled as VN_TOP
2731 for optimistic value-numbering. */
2732 if (e->flags & EDGE_DFS_BACK)
2733 continue;
2735 phi1op = vp1->phiargs[e->dest_idx];
2736 if (phi1op == VN_TOP)
2737 continue;
2738 inchash::add_expr (phi1op, hstate);
2741 return hstate.end ();
2745 /* Return true if COND1 and COND2 represent the same condition, set
2746 *INVERTED_P if one needs to be inverted to make it the same as
2747 the other. */
2749 static bool
2750 cond_stmts_equal_p (gcond *cond1, gcond *cond2, bool *inverted_p)
2752 enum tree_code code1 = gimple_cond_code (cond1);
2753 enum tree_code code2 = gimple_cond_code (cond2);
2754 tree lhs1 = gimple_cond_lhs (cond1);
2755 tree lhs2 = gimple_cond_lhs (cond2);
2756 tree rhs1 = gimple_cond_rhs (cond1);
2757 tree rhs2 = gimple_cond_rhs (cond2);
2759 *inverted_p = false;
2760 if (code1 == code2)
2762 else if (code1 == swap_tree_comparison (code2))
2763 std::swap (lhs2, rhs2);
2764 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
2765 *inverted_p = true;
2766 else if (code1 == invert_tree_comparison
2767 (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
2769 std::swap (lhs2, rhs2);
2770 *inverted_p = true;
2772 else
2773 return false;
2775 lhs1 = vn_valueize (lhs1);
2776 rhs1 = vn_valueize (rhs1);
2777 lhs2 = vn_valueize (lhs2);
2778 rhs2 = vn_valueize (rhs2);
2779 return ((expressions_equal_p (lhs1, lhs2)
2780 && expressions_equal_p (rhs1, rhs2))
2781 || (commutative_tree_code (code1)
2782 && expressions_equal_p (lhs1, rhs2)
2783 && expressions_equal_p (rhs1, lhs2)));
2786 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2788 static int
2789 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
2791 if (vp1->hashcode != vp2->hashcode)
2792 return false;
2794 if (vp1->block != vp2->block)
2796 if (vp1->phiargs.length () != vp2->phiargs.length ())
2797 return false;
2799 switch (vp1->phiargs.length ())
2801 case 1:
2802 /* Single-arg PHIs are just copies. */
2803 break;
2805 case 2:
2807 /* Rule out backedges into the PHI. */
2808 if (vp1->block->loop_father->header == vp1->block
2809 || vp2->block->loop_father->header == vp2->block)
2810 return false;
2812 /* If the PHI nodes do not have compatible types
2813 they are not the same. */
2814 if (!types_compatible_p (vp1->type, vp2->type))
2815 return false;
2817 basic_block idom1
2818 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
2819 basic_block idom2
2820 = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
2821 /* If the immediate dominator end in switch stmts multiple
2822 values may end up in the same PHI arg via intermediate
2823 CFG merges. */
2824 if (EDGE_COUNT (idom1->succs) != 2
2825 || EDGE_COUNT (idom2->succs) != 2)
2826 return false;
2828 /* Verify the controlling stmt is the same. */
2829 gimple *last1 = last_stmt (idom1);
2830 gimple *last2 = last_stmt (idom2);
2831 if (gimple_code (last1) != GIMPLE_COND
2832 || gimple_code (last2) != GIMPLE_COND)
2833 return false;
2834 bool inverted_p;
2835 if (! cond_stmts_equal_p (as_a <gcond *> (last1),
2836 as_a <gcond *> (last2), &inverted_p))
2837 return false;
2839 /* Get at true/false controlled edges into the PHI. */
2840 edge te1, te2, fe1, fe2;
2841 if (! extract_true_false_controlled_edges (idom1, vp1->block,
2842 &te1, &fe1)
2843 || ! extract_true_false_controlled_edges (idom2, vp2->block,
2844 &te2, &fe2))
2845 return false;
2847 /* Swap edges if the second condition is the inverted of the
2848 first. */
2849 if (inverted_p)
2850 std::swap (te2, fe2);
2852 /* ??? Handle VN_TOP specially. */
2853 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
2854 vp2->phiargs[te2->dest_idx])
2855 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
2856 vp2->phiargs[fe2->dest_idx]))
2857 return false;
2859 return true;
2862 default:
2863 return false;
2867 /* If the PHI nodes do not have compatible types
2868 they are not the same. */
2869 if (!types_compatible_p (vp1->type, vp2->type))
2870 return false;
2872 /* Any phi in the same block will have it's arguments in the
2873 same edge order, because of how we store phi nodes. */
2874 int i;
2875 tree phi1op;
2876 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2878 tree phi2op = vp2->phiargs[i];
2879 if (phi1op == VN_TOP || phi2op == VN_TOP)
2880 continue;
2881 if (!expressions_equal_p (phi1op, phi2op))
2882 return false;
2885 return true;
2888 static vec<tree> shared_lookup_phiargs;
2890 /* Lookup PHI in the current hash table, and return the resulting
2891 value number if it exists in the hash table. Return NULL_TREE if
2892 it does not exist in the hash table. */
2894 static tree
2895 vn_phi_lookup (gimple *phi)
2897 vn_phi_s **slot;
2898 struct vn_phi_s vp1;
2899 edge e;
2900 edge_iterator ei;
2902 shared_lookup_phiargs.truncate (0);
2903 shared_lookup_phiargs.safe_grow (gimple_phi_num_args (phi));
2905 /* Canonicalize the SSA_NAME's to their value number. */
2906 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
2908 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
2909 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2910 shared_lookup_phiargs[e->dest_idx] = def;
2912 vp1.type = TREE_TYPE (gimple_phi_result (phi));
2913 vp1.phiargs = shared_lookup_phiargs;
2914 vp1.block = gimple_bb (phi);
2915 vp1.hashcode = vn_phi_compute_hash (&vp1);
2916 slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
2917 NO_INSERT);
2918 if (!slot && current_info == optimistic_info)
2919 slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
2920 NO_INSERT);
2921 if (!slot)
2922 return NULL_TREE;
2923 return (*slot)->result;
2926 /* Insert PHI into the current hash table with a value number of
2927 RESULT. */
2929 static vn_phi_t
2930 vn_phi_insert (gimple *phi, tree result)
2932 vn_phi_s **slot;
2933 vn_phi_t vp1 = current_info->phis_pool->allocate ();
2934 vec<tree> args = vNULL;
2935 edge e;
2936 edge_iterator ei;
2938 args.safe_grow (gimple_phi_num_args (phi));
2940 /* Canonicalize the SSA_NAME's to their value number. */
2941 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
2943 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
2944 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2945 args[e->dest_idx] = def;
2947 vp1->value_id = VN_INFO (result)->value_id;
2948 vp1->type = TREE_TYPE (gimple_phi_result (phi));
2949 vp1->phiargs = args;
2950 vp1->block = gimple_bb (phi);
2951 vp1->result = result;
2952 vp1->hashcode = vn_phi_compute_hash (vp1);
2954 slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
2956 /* Because we iterate over phi operations more than once, it's
2957 possible the slot might already exist here, hence no assert.*/
2958 *slot = vp1;
2959 return vp1;
2963 /* Print set of components in strongly connected component SCC to OUT. */
2965 static void
2966 print_scc (FILE *out, vec<tree> scc)
2968 tree var;
2969 unsigned int i;
2971 fprintf (out, "SCC consists of:");
2972 FOR_EACH_VEC_ELT (scc, i, var)
2974 fprintf (out, " ");
2975 print_generic_expr (out, var, 0);
2977 fprintf (out, "\n");
2980 /* Return true if BB1 is dominated by BB2 taking into account edges
2981 that are not executable. */
2983 static bool
2984 dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
2986 edge_iterator ei;
2987 edge e;
2989 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
2990 return true;
2992 /* Before iterating we'd like to know if there exists a
2993 (executable) path from bb2 to bb1 at all, if not we can
2994 directly return false. For now simply iterate once. */
2996 /* Iterate to the single executable bb1 predecessor. */
2997 if (EDGE_COUNT (bb1->preds) > 1)
2999 edge prede = NULL;
3000 FOR_EACH_EDGE (e, ei, bb1->preds)
3001 if (e->flags & EDGE_EXECUTABLE)
3003 if (prede)
3005 prede = NULL;
3006 break;
3008 prede = e;
3010 if (prede)
3012 bb1 = prede->src;
3014 /* Re-do the dominance check with changed bb1. */
3015 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3016 return true;
3020 /* Iterate to the single executable bb2 successor. */
3021 edge succe = NULL;
3022 FOR_EACH_EDGE (e, ei, bb2->succs)
3023 if (e->flags & EDGE_EXECUTABLE)
3025 if (succe)
3027 succe = NULL;
3028 break;
3030 succe = e;
3032 if (succe)
3034 /* Verify the reached block is only reached through succe.
3035 If there is only one edge we can spare us the dominator
3036 check and iterate directly. */
3037 if (EDGE_COUNT (succe->dest->preds) > 1)
3039 FOR_EACH_EDGE (e, ei, succe->dest->preds)
3040 if (e != succe
3041 && (e->flags & EDGE_EXECUTABLE))
3043 succe = NULL;
3044 break;
3047 if (succe)
3049 bb2 = succe->dest;
3051 /* Re-do the dominance check with changed bb2. */
3052 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3053 return true;
3057 /* We could now iterate updating bb1 / bb2. */
3058 return false;
3061 /* Set the value number of FROM to TO, return true if it has changed
3062 as a result. */
3064 static inline bool
3065 set_ssa_val_to (tree from, tree to)
3067 tree currval = SSA_VAL (from);
3068 HOST_WIDE_INT toff, coff;
3070 /* The only thing we allow as value numbers are ssa_names
3071 and invariants. So assert that here. We don't allow VN_TOP
3072 as visiting a stmt should produce a value-number other than
3073 that.
3074 ??? Still VN_TOP can happen for unreachable code, so force
3075 it to varying in that case. Not all code is prepared to
3076 get VN_TOP on valueization. */
3077 if (to == VN_TOP)
3079 if (dump_file && (dump_flags & TDF_DETAILS))
3080 fprintf (dump_file, "Forcing value number to varying on "
3081 "receiving VN_TOP\n");
3082 to = from;
3085 gcc_assert (to != NULL_TREE
3086 && ((TREE_CODE (to) == SSA_NAME
3087 && (to == from || SSA_VAL (to) == to))
3088 || is_gimple_min_invariant (to)));
3090 if (from != to)
3092 if (currval == from)
3094 if (dump_file && (dump_flags & TDF_DETAILS))
3096 fprintf (dump_file, "Not changing value number of ");
3097 print_generic_expr (dump_file, from, 0);
3098 fprintf (dump_file, " from VARYING to ");
3099 print_generic_expr (dump_file, to, 0);
3100 fprintf (dump_file, "\n");
3102 return false;
3104 else if (TREE_CODE (to) == SSA_NAME
3105 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
3106 to = from;
3109 if (dump_file && (dump_flags & TDF_DETAILS))
3111 fprintf (dump_file, "Setting value number of ");
3112 print_generic_expr (dump_file, from, 0);
3113 fprintf (dump_file, " to ");
3114 print_generic_expr (dump_file, to, 0);
3117 if (currval != to
3118 && !operand_equal_p (currval, to, 0)
3119 /* ??? For addresses involving volatile objects or types operand_equal_p
3120 does not reliably detect ADDR_EXPRs as equal. We know we are only
3121 getting invariant gimple addresses here, so can use
3122 get_addr_base_and_unit_offset to do this comparison. */
3123 && !(TREE_CODE (currval) == ADDR_EXPR
3124 && TREE_CODE (to) == ADDR_EXPR
3125 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
3126 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
3127 && coff == toff))
3129 /* If we equate two SSA names we have to make the side-band info
3130 of the leader conservative (and remember whatever original value
3131 was present). */
3132 if (TREE_CODE (to) == SSA_NAME)
3134 if (INTEGRAL_TYPE_P (TREE_TYPE (to))
3135 && SSA_NAME_RANGE_INFO (to))
3137 if (SSA_NAME_IS_DEFAULT_DEF (to)
3138 || dominated_by_p_w_unex
3139 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3140 gimple_bb (SSA_NAME_DEF_STMT (to))))
3141 /* Keep the info from the dominator. */
3143 else if (SSA_NAME_IS_DEFAULT_DEF (from)
3144 || dominated_by_p_w_unex
3145 (gimple_bb (SSA_NAME_DEF_STMT (to)),
3146 gimple_bb (SSA_NAME_DEF_STMT (from))))
3148 /* Save old info. */
3149 if (! VN_INFO (to)->info.range_info)
3151 VN_INFO (to)->info.range_info = SSA_NAME_RANGE_INFO (to);
3152 VN_INFO (to)->range_info_anti_range_p
3153 = SSA_NAME_ANTI_RANGE_P (to);
3155 /* Use that from the dominator. */
3156 SSA_NAME_RANGE_INFO (to) = SSA_NAME_RANGE_INFO (from);
3157 SSA_NAME_ANTI_RANGE_P (to) = SSA_NAME_ANTI_RANGE_P (from);
3159 else
3161 /* Save old info. */
3162 if (! VN_INFO (to)->info.range_info)
3164 VN_INFO (to)->info.range_info = SSA_NAME_RANGE_INFO (to);
3165 VN_INFO (to)->range_info_anti_range_p
3166 = SSA_NAME_ANTI_RANGE_P (to);
3168 /* Rather than allocating memory and unioning the info
3169 just clear it. */
3170 SSA_NAME_RANGE_INFO (to) = NULL;
3173 else if (POINTER_TYPE_P (TREE_TYPE (to))
3174 && SSA_NAME_PTR_INFO (to))
3176 if (SSA_NAME_IS_DEFAULT_DEF (to)
3177 || dominated_by_p_w_unex
3178 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3179 gimple_bb (SSA_NAME_DEF_STMT (to))))
3180 /* Keep the info from the dominator. */
3182 else if (SSA_NAME_IS_DEFAULT_DEF (from)
3183 || dominated_by_p_w_unex
3184 (gimple_bb (SSA_NAME_DEF_STMT (to)),
3185 gimple_bb (SSA_NAME_DEF_STMT (from))))
3187 /* Save old info. */
3188 if (! VN_INFO (to)->info.ptr_info)
3189 VN_INFO (to)->info.ptr_info = SSA_NAME_PTR_INFO (to);
3190 /* Use that from the dominator. */
3191 SSA_NAME_PTR_INFO (to) = SSA_NAME_PTR_INFO (from);
3193 else if (! SSA_NAME_PTR_INFO (from)
3194 /* Handle the case of trivially equivalent info. */
3195 || memcmp (SSA_NAME_PTR_INFO (to),
3196 SSA_NAME_PTR_INFO (from),
3197 sizeof (ptr_info_def)) != 0)
3199 /* Save old info. */
3200 if (! VN_INFO (to)->info.ptr_info)
3201 VN_INFO (to)->info.ptr_info = SSA_NAME_PTR_INFO (to);
3202 /* Rather than allocating memory and unioning the info
3203 just clear it. */
3204 SSA_NAME_PTR_INFO (to) = NULL;
3209 VN_INFO (from)->valnum = to;
3210 if (dump_file && (dump_flags & TDF_DETAILS))
3211 fprintf (dump_file, " (changed)\n");
3212 return true;
3214 if (dump_file && (dump_flags & TDF_DETAILS))
3215 fprintf (dump_file, "\n");
3216 return false;
3219 /* Mark as processed all the definitions in the defining stmt of USE, or
3220 the USE itself. */
3222 static void
3223 mark_use_processed (tree use)
3225 ssa_op_iter iter;
3226 def_operand_p defp;
3227 gimple *stmt = SSA_NAME_DEF_STMT (use);
3229 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
3231 VN_INFO (use)->use_processed = true;
3232 return;
3235 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3237 tree def = DEF_FROM_PTR (defp);
3239 VN_INFO (def)->use_processed = true;
3243 /* Set all definitions in STMT to value number to themselves.
3244 Return true if a value number changed. */
3246 static bool
3247 defs_to_varying (gimple *stmt)
3249 bool changed = false;
3250 ssa_op_iter iter;
3251 def_operand_p defp;
3253 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3255 tree def = DEF_FROM_PTR (defp);
3256 changed |= set_ssa_val_to (def, def);
3258 return changed;
3261 /* Visit a copy between LHS and RHS, return true if the value number
3262 changed. */
3264 static bool
3265 visit_copy (tree lhs, tree rhs)
3267 /* Valueize. */
3268 rhs = SSA_VAL (rhs);
3270 return set_ssa_val_to (lhs, rhs);
3273 /* Visit a nary operator RHS, value number it, and return true if the
3274 value number of LHS has changed as a result. */
3276 static bool
3277 visit_nary_op (tree lhs, gimple *stmt)
3279 bool changed = false;
3280 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
3282 if (result)
3283 changed = set_ssa_val_to (lhs, result);
3284 else
3286 changed = set_ssa_val_to (lhs, lhs);
3287 vn_nary_op_insert_stmt (stmt, lhs);
3290 return changed;
3293 /* Visit a call STMT storing into LHS. Return true if the value number
3294 of the LHS has changed as a result. */
3296 static bool
3297 visit_reference_op_call (tree lhs, gcall *stmt)
3299 bool changed = false;
3300 struct vn_reference_s vr1;
3301 vn_reference_t vnresult = NULL;
3302 tree vdef = gimple_vdef (stmt);
3304 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
3305 if (lhs && TREE_CODE (lhs) != SSA_NAME)
3306 lhs = NULL_TREE;
3308 vn_reference_lookup_call (stmt, &vnresult, &vr1);
3309 if (vnresult)
3311 if (vnresult->result_vdef && vdef)
3312 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
3314 if (!vnresult->result && lhs)
3315 vnresult->result = lhs;
3317 if (vnresult->result && lhs)
3318 changed |= set_ssa_val_to (lhs, vnresult->result);
3320 else
3322 vn_reference_t vr2;
3323 vn_reference_s **slot;
3324 if (vdef)
3325 changed |= set_ssa_val_to (vdef, vdef);
3326 if (lhs)
3327 changed |= set_ssa_val_to (lhs, lhs);
3328 vr2 = current_info->references_pool->allocate ();
3329 vr2->vuse = vr1.vuse;
3330 /* As we are not walking the virtual operand chain we know the
3331 shared_lookup_references are still original so we can re-use
3332 them here. */
3333 vr2->operands = vr1.operands.copy ();
3334 vr2->type = vr1.type;
3335 vr2->set = vr1.set;
3336 vr2->hashcode = vr1.hashcode;
3337 vr2->result = lhs;
3338 vr2->result_vdef = vdef;
3339 slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3340 INSERT);
3341 gcc_assert (!*slot);
3342 *slot = vr2;
3345 return changed;
3348 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3349 and return true if the value number of the LHS has changed as a result. */
3351 static bool
3352 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
3354 bool changed = false;
3355 tree last_vuse;
3356 tree result;
3358 last_vuse = gimple_vuse (stmt);
3359 last_vuse_ptr = &last_vuse;
3360 result = vn_reference_lookup (op, gimple_vuse (stmt),
3361 default_vn_walk_kind, NULL, true);
3362 last_vuse_ptr = NULL;
3364 /* We handle type-punning through unions by value-numbering based
3365 on offset and size of the access. Be prepared to handle a
3366 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3367 if (result
3368 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3370 /* We will be setting the value number of lhs to the value number
3371 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3372 So first simplify and lookup this expression to see if it
3373 is already available. */
3374 mprts_hook = vn_lookup_simplify_result;
3375 code_helper rcode = VIEW_CONVERT_EXPR;
3376 tree ops[3] = { result };
3377 bool res = gimple_resimplify1 (NULL, &rcode, TREE_TYPE (op), ops,
3378 vn_valueize);
3379 mprts_hook = NULL;
3380 gimple *new_stmt = NULL;
3381 if (res
3382 && gimple_simplified_result_is_gimple_val (rcode, ops))
3383 /* The expression is already available. */
3384 result = ops[0];
3385 else
3387 tree val = vn_lookup_simplify_result (rcode, TREE_TYPE (op), ops);
3388 if (!val)
3390 gimple_seq stmts = NULL;
3391 result = maybe_push_res_to_seq (rcode, TREE_TYPE (op), ops,
3392 &stmts);
3393 if (result)
3395 gcc_assert (gimple_seq_singleton_p (stmts));
3396 new_stmt = gimple_seq_first_stmt (stmts);
3399 else
3400 /* The expression is already available. */
3401 result = val;
3403 if (new_stmt)
3405 /* The expression is not yet available, value-number lhs to
3406 the new SSA_NAME we created. */
3407 /* Initialize value-number information properly. */
3408 VN_INFO_GET (result)->valnum = result;
3409 VN_INFO (result)->value_id = get_next_value_id ();
3410 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
3411 new_stmt);
3412 VN_INFO (result)->needs_insertion = true;
3413 /* As all "inserted" statements are singleton SCCs, insert
3414 to the valid table. This is strictly needed to
3415 avoid re-generating new value SSA_NAMEs for the same
3416 expression during SCC iteration over and over (the
3417 optimistic table gets cleared after each iteration).
3418 We do not need to insert into the optimistic table, as
3419 lookups there will fall back to the valid table. */
3420 if (current_info == optimistic_info)
3422 current_info = valid_info;
3423 vn_nary_op_insert_stmt (new_stmt, result);
3424 current_info = optimistic_info;
3426 else
3427 vn_nary_op_insert_stmt (new_stmt, result);
3428 if (dump_file && (dump_flags & TDF_DETAILS))
3430 fprintf (dump_file, "Inserting name ");
3431 print_generic_expr (dump_file, result, 0);
3432 fprintf (dump_file, " for expression ");
3433 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
3434 fprintf (dump_file, "\n");
3439 if (result)
3440 changed = set_ssa_val_to (lhs, result);
3441 else
3443 changed = set_ssa_val_to (lhs, lhs);
3444 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
3447 return changed;
3451 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3452 and return true if the value number of the LHS has changed as a result. */
3454 static bool
3455 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
3457 bool changed = false;
3458 vn_reference_t vnresult = NULL;
3459 tree result, assign;
3460 bool resultsame = false;
3461 tree vuse = gimple_vuse (stmt);
3462 tree vdef = gimple_vdef (stmt);
3464 if (TREE_CODE (op) == SSA_NAME)
3465 op = SSA_VAL (op);
3467 /* First we want to lookup using the *vuses* from the store and see
3468 if there the last store to this location with the same address
3469 had the same value.
3471 The vuses represent the memory state before the store. If the
3472 memory state, address, and value of the store is the same as the
3473 last store to this location, then this store will produce the
3474 same memory state as that store.
3476 In this case the vdef versions for this store are value numbered to those
3477 vuse versions, since they represent the same memory state after
3478 this store.
3480 Otherwise, the vdefs for the store are used when inserting into
3481 the table, since the store generates a new memory state. */
3483 result = vn_reference_lookup (lhs, vuse, VN_NOWALK, NULL, false);
3485 if (result)
3487 if (TREE_CODE (result) == SSA_NAME)
3488 result = SSA_VAL (result);
3489 resultsame = expressions_equal_p (result, op);
3492 if ((!result || !resultsame)
3493 /* Only perform the following when being called from PRE
3494 which embeds tail merging. */
3495 && default_vn_walk_kind == VN_WALK)
3497 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3498 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
3499 if (vnresult)
3501 VN_INFO (vdef)->use_processed = true;
3502 return set_ssa_val_to (vdef, vnresult->result_vdef);
3506 if (!result || !resultsame)
3508 if (dump_file && (dump_flags & TDF_DETAILS))
3510 fprintf (dump_file, "No store match\n");
3511 fprintf (dump_file, "Value numbering store ");
3512 print_generic_expr (dump_file, lhs, 0);
3513 fprintf (dump_file, " to ");
3514 print_generic_expr (dump_file, op, 0);
3515 fprintf (dump_file, "\n");
3517 /* Have to set value numbers before insert, since insert is
3518 going to valueize the references in-place. */
3519 if (vdef)
3521 changed |= set_ssa_val_to (vdef, vdef);
3524 /* Do not insert structure copies into the tables. */
3525 if (is_gimple_min_invariant (op)
3526 || is_gimple_reg (op))
3527 vn_reference_insert (lhs, op, vdef, NULL);
3529 /* Only perform the following when being called from PRE
3530 which embeds tail merging. */
3531 if (default_vn_walk_kind == VN_WALK)
3533 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3534 vn_reference_insert (assign, lhs, vuse, vdef);
3537 else
3539 /* We had a match, so value number the vdef to have the value
3540 number of the vuse it came from. */
3542 if (dump_file && (dump_flags & TDF_DETAILS))
3543 fprintf (dump_file, "Store matched earlier value,"
3544 "value numbering store vdefs to matching vuses.\n");
3546 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3549 return changed;
3552 /* Visit and value number PHI, return true if the value number
3553 changed. */
3555 static bool
3556 visit_phi (gimple *phi)
3558 bool changed = false;
3559 tree result;
3560 tree sameval = VN_TOP;
3561 bool allsame = true;
3562 unsigned n_executable = 0;
3564 /* TODO: We could check for this in init_sccvn, and replace this
3565 with a gcc_assert. */
3566 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3567 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3569 /* See if all non-TOP arguments have the same value. TOP is
3570 equivalent to everything, so we can ignore it. */
3571 edge_iterator ei;
3572 edge e;
3573 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3574 if (e->flags & EDGE_EXECUTABLE)
3576 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3578 ++n_executable;
3579 if (TREE_CODE (def) == SSA_NAME)
3580 def = SSA_VAL (def);
3581 if (def == VN_TOP)
3582 continue;
3583 if (sameval == VN_TOP)
3584 sameval = def;
3585 else if (!expressions_equal_p (def, sameval))
3587 allsame = false;
3588 break;
3592 /* If none of the edges was executable or all incoming values are
3593 undefined keep the value-number at VN_TOP. If only a single edge
3594 is exectuable use its value. */
3595 if (sameval == VN_TOP
3596 || n_executable == 1)
3597 return set_ssa_val_to (PHI_RESULT (phi), sameval);
3599 /* First see if it is equivalent to a phi node in this block. We prefer
3600 this as it allows IV elimination - see PRs 66502 and 67167. */
3601 result = vn_phi_lookup (phi);
3602 if (result)
3603 changed = set_ssa_val_to (PHI_RESULT (phi), result);
3604 /* Otherwise all value numbered to the same value, the phi node has that
3605 value. */
3606 else if (allsame)
3607 changed = set_ssa_val_to (PHI_RESULT (phi), sameval);
3608 else
3610 vn_phi_insert (phi, PHI_RESULT (phi));
3611 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3614 return changed;
3617 /* Try to simplify RHS using equivalences and constant folding. */
3619 static tree
3620 try_to_simplify (gassign *stmt)
3622 enum tree_code code = gimple_assign_rhs_code (stmt);
3623 tree tem;
3625 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3626 in this case, there is no point in doing extra work. */
3627 if (code == SSA_NAME)
3628 return NULL_TREE;
3630 /* First try constant folding based on our current lattice. */
3631 mprts_hook = vn_lookup_simplify_result;
3632 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
3633 mprts_hook = NULL;
3634 if (tem
3635 && (TREE_CODE (tem) == SSA_NAME
3636 || is_gimple_min_invariant (tem)))
3637 return tem;
3639 return NULL_TREE;
3642 /* Visit and value number USE, return true if the value number
3643 changed. */
3645 static bool
3646 visit_use (tree use)
3648 bool changed = false;
3649 gimple *stmt = SSA_NAME_DEF_STMT (use);
3651 mark_use_processed (use);
3653 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3654 if (dump_file && (dump_flags & TDF_DETAILS)
3655 && !SSA_NAME_IS_DEFAULT_DEF (use))
3657 fprintf (dump_file, "Value numbering ");
3658 print_generic_expr (dump_file, use, 0);
3659 fprintf (dump_file, " stmt = ");
3660 print_gimple_stmt (dump_file, stmt, 0, 0);
3663 /* Handle uninitialized uses. */
3664 if (SSA_NAME_IS_DEFAULT_DEF (use))
3665 changed = set_ssa_val_to (use, use);
3666 else if (gimple_code (stmt) == GIMPLE_PHI)
3667 changed = visit_phi (stmt);
3668 else if (gimple_has_volatile_ops (stmt))
3669 changed = defs_to_varying (stmt);
3670 else if (gassign *ass = dyn_cast <gassign *> (stmt))
3672 enum tree_code code = gimple_assign_rhs_code (ass);
3673 tree lhs = gimple_assign_lhs (ass);
3674 tree rhs1 = gimple_assign_rhs1 (ass);
3675 tree simplified;
3677 /* Shortcut for copies. Simplifying copies is pointless,
3678 since we copy the expression and value they represent. */
3679 if (code == SSA_NAME
3680 && TREE_CODE (lhs) == SSA_NAME)
3682 changed = visit_copy (lhs, rhs1);
3683 goto done;
3685 simplified = try_to_simplify (ass);
3686 if (simplified)
3688 if (dump_file && (dump_flags & TDF_DETAILS))
3690 fprintf (dump_file, "RHS ");
3691 print_gimple_expr (dump_file, ass, 0, 0);
3692 fprintf (dump_file, " simplified to ");
3693 print_generic_expr (dump_file, simplified, 0);
3694 fprintf (dump_file, "\n");
3697 /* Setting value numbers to constants will occasionally
3698 screw up phi congruence because constants are not
3699 uniquely associated with a single ssa name that can be
3700 looked up. */
3701 if (simplified
3702 && is_gimple_min_invariant (simplified)
3703 && TREE_CODE (lhs) == SSA_NAME)
3705 changed = set_ssa_val_to (lhs, simplified);
3706 goto done;
3708 else if (simplified
3709 && TREE_CODE (simplified) == SSA_NAME
3710 && TREE_CODE (lhs) == SSA_NAME)
3712 changed = visit_copy (lhs, simplified);
3713 goto done;
3716 if ((TREE_CODE (lhs) == SSA_NAME
3717 /* We can substitute SSA_NAMEs that are live over
3718 abnormal edges with their constant value. */
3719 && !(gimple_assign_copy_p (ass)
3720 && is_gimple_min_invariant (rhs1))
3721 && !(simplified
3722 && is_gimple_min_invariant (simplified))
3723 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3724 /* Stores or copies from SSA_NAMEs that are live over
3725 abnormal edges are a problem. */
3726 || (code == SSA_NAME
3727 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3728 changed = defs_to_varying (ass);
3729 else if (REFERENCE_CLASS_P (lhs)
3730 || DECL_P (lhs))
3731 changed = visit_reference_op_store (lhs, rhs1, ass);
3732 else if (TREE_CODE (lhs) == SSA_NAME)
3734 if ((gimple_assign_copy_p (ass)
3735 && is_gimple_min_invariant (rhs1))
3736 || (simplified
3737 && is_gimple_min_invariant (simplified)))
3739 if (simplified)
3740 changed = set_ssa_val_to (lhs, simplified);
3741 else
3742 changed = set_ssa_val_to (lhs, rhs1);
3744 else
3746 /* Visit the original statement. */
3747 switch (vn_get_stmt_kind (ass))
3749 case VN_NARY:
3750 changed = visit_nary_op (lhs, ass);
3751 break;
3752 case VN_REFERENCE:
3753 changed = visit_reference_op_load (lhs, rhs1, ass);
3754 break;
3755 default:
3756 changed = defs_to_varying (ass);
3757 break;
3761 else
3762 changed = defs_to_varying (ass);
3764 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
3766 tree lhs = gimple_call_lhs (call_stmt);
3767 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3769 /* Try constant folding based on our current lattice. */
3770 tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
3771 vn_valueize);
3772 if (simplified)
3774 if (dump_file && (dump_flags & TDF_DETAILS))
3776 fprintf (dump_file, "call ");
3777 print_gimple_expr (dump_file, call_stmt, 0, 0);
3778 fprintf (dump_file, " simplified to ");
3779 print_generic_expr (dump_file, simplified, 0);
3780 fprintf (dump_file, "\n");
3783 /* Setting value numbers to constants will occasionally
3784 screw up phi congruence because constants are not
3785 uniquely associated with a single ssa name that can be
3786 looked up. */
3787 if (simplified
3788 && is_gimple_min_invariant (simplified))
3790 changed = set_ssa_val_to (lhs, simplified);
3791 if (gimple_vdef (call_stmt))
3792 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
3793 SSA_VAL (gimple_vuse (call_stmt)));
3794 goto done;
3796 else if (simplified
3797 && TREE_CODE (simplified) == SSA_NAME)
3799 changed = visit_copy (lhs, simplified);
3800 if (gimple_vdef (call_stmt))
3801 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
3802 SSA_VAL (gimple_vuse (call_stmt)));
3803 goto done;
3805 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3807 changed = defs_to_varying (call_stmt);
3808 goto done;
3812 if (!gimple_call_internal_p (call_stmt)
3813 && (/* Calls to the same function with the same vuse
3814 and the same operands do not necessarily return the same
3815 value, unless they're pure or const. */
3816 gimple_call_flags (call_stmt) & (ECF_PURE | ECF_CONST)
3817 /* If calls have a vdef, subsequent calls won't have
3818 the same incoming vuse. So, if 2 calls with vdef have the
3819 same vuse, we know they're not subsequent.
3820 We can value number 2 calls to the same function with the
3821 same vuse and the same operands which are not subsequent
3822 the same, because there is no code in the program that can
3823 compare the 2 values... */
3824 || (gimple_vdef (call_stmt)
3825 /* ... unless the call returns a pointer which does
3826 not alias with anything else. In which case the
3827 information that the values are distinct are encoded
3828 in the IL. */
3829 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
3830 /* Only perform the following when being called from PRE
3831 which embeds tail merging. */
3832 && default_vn_walk_kind == VN_WALK)))
3833 changed = visit_reference_op_call (lhs, call_stmt);
3834 else
3835 changed = defs_to_varying (call_stmt);
3837 else
3838 changed = defs_to_varying (stmt);
3839 done:
3840 return changed;
3843 /* Compare two operands by reverse postorder index */
3845 static int
3846 compare_ops (const void *pa, const void *pb)
3848 const tree opa = *((const tree *)pa);
3849 const tree opb = *((const tree *)pb);
3850 gimple *opstmta = SSA_NAME_DEF_STMT (opa);
3851 gimple *opstmtb = SSA_NAME_DEF_STMT (opb);
3852 basic_block bba;
3853 basic_block bbb;
3855 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3856 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3857 else if (gimple_nop_p (opstmta))
3858 return -1;
3859 else if (gimple_nop_p (opstmtb))
3860 return 1;
3862 bba = gimple_bb (opstmta);
3863 bbb = gimple_bb (opstmtb);
3865 if (!bba && !bbb)
3866 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3867 else if (!bba)
3868 return -1;
3869 else if (!bbb)
3870 return 1;
3872 if (bba == bbb)
3874 if (gimple_code (opstmta) == GIMPLE_PHI
3875 && gimple_code (opstmtb) == GIMPLE_PHI)
3876 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3877 else if (gimple_code (opstmta) == GIMPLE_PHI)
3878 return -1;
3879 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3880 return 1;
3881 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3882 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3883 else
3884 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3886 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3889 /* Sort an array containing members of a strongly connected component
3890 SCC so that the members are ordered by RPO number.
3891 This means that when the sort is complete, iterating through the
3892 array will give you the members in RPO order. */
3894 static void
3895 sort_scc (vec<tree> scc)
3897 scc.qsort (compare_ops);
3900 /* Insert the no longer used nary ONARY to the hash INFO. */
3902 static void
3903 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3905 size_t size = sizeof_vn_nary_op (onary->length);
3906 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3907 &info->nary_obstack);
3908 memcpy (nary, onary, size);
3909 vn_nary_op_insert_into (nary, info->nary, false);
3912 /* Insert the no longer used phi OPHI to the hash INFO. */
3914 static void
3915 copy_phi (vn_phi_t ophi, vn_tables_t info)
3917 vn_phi_t phi = info->phis_pool->allocate ();
3918 vn_phi_s **slot;
3919 memcpy (phi, ophi, sizeof (*phi));
3920 ophi->phiargs.create (0);
3921 slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
3922 gcc_assert (!*slot);
3923 *slot = phi;
3926 /* Insert the no longer used reference OREF to the hash INFO. */
3928 static void
3929 copy_reference (vn_reference_t oref, vn_tables_t info)
3931 vn_reference_t ref;
3932 vn_reference_s **slot;
3933 ref = info->references_pool->allocate ();
3934 memcpy (ref, oref, sizeof (*ref));
3935 oref->operands.create (0);
3936 slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
3937 if (*slot)
3938 free_reference (*slot);
3939 *slot = ref;
3942 /* Process a strongly connected component in the SSA graph. */
3944 static void
3945 process_scc (vec<tree> scc)
3947 tree var;
3948 unsigned int i;
3949 unsigned int iterations = 0;
3950 bool changed = true;
3951 vn_nary_op_iterator_type hin;
3952 vn_phi_iterator_type hip;
3953 vn_reference_iterator_type hir;
3954 vn_nary_op_t nary;
3955 vn_phi_t phi;
3956 vn_reference_t ref;
3958 /* If the SCC has a single member, just visit it. */
3959 if (scc.length () == 1)
3961 tree use = scc[0];
3962 if (VN_INFO (use)->use_processed)
3963 return;
3964 /* We need to make sure it doesn't form a cycle itself, which can
3965 happen for self-referential PHI nodes. In that case we would
3966 end up inserting an expression with VN_TOP operands into the
3967 valid table which makes us derive bogus equivalences later.
3968 The cheapest way to check this is to assume it for all PHI nodes. */
3969 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3970 /* Fallthru to iteration. */ ;
3971 else
3973 visit_use (use);
3974 return;
3978 if (dump_file && (dump_flags & TDF_DETAILS))
3979 print_scc (dump_file, scc);
3981 /* Iterate over the SCC with the optimistic table until it stops
3982 changing. */
3983 current_info = optimistic_info;
3984 while (changed)
3986 changed = false;
3987 iterations++;
3988 if (dump_file && (dump_flags & TDF_DETAILS))
3989 fprintf (dump_file, "Starting iteration %d\n", iterations);
3990 /* As we are value-numbering optimistically we have to
3991 clear the expression tables and the simplified expressions
3992 in each iteration until we converge. */
3993 optimistic_info->nary->empty ();
3994 optimistic_info->phis->empty ();
3995 optimistic_info->references->empty ();
3996 obstack_free (&optimistic_info->nary_obstack, NULL);
3997 gcc_obstack_init (&optimistic_info->nary_obstack);
3998 optimistic_info->phis_pool->release ();
3999 optimistic_info->references_pool->release ();
4000 FOR_EACH_VEC_ELT (scc, i, var)
4001 gcc_assert (!VN_INFO (var)->needs_insertion
4002 && VN_INFO (var)->expr == NULL);
4003 FOR_EACH_VEC_ELT (scc, i, var)
4004 changed |= visit_use (var);
4007 if (dump_file && (dump_flags & TDF_DETAILS))
4008 fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
4009 statistics_histogram_event (cfun, "SCC iterations", iterations);
4011 /* Finally, copy the contents of the no longer used optimistic
4012 table to the valid table. */
4013 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
4014 copy_nary (nary, valid_info);
4015 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
4016 copy_phi (phi, valid_info);
4017 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
4018 ref, vn_reference_t, hir)
4019 copy_reference (ref, valid_info);
4021 current_info = valid_info;
4025 /* Pop the components of the found SCC for NAME off the SCC stack
4026 and process them. Returns true if all went well, false if
4027 we run into resource limits. */
4029 static bool
4030 extract_and_process_scc_for_name (tree name)
4032 auto_vec<tree> scc;
4033 tree x;
4035 /* Found an SCC, pop the components off the SCC stack and
4036 process them. */
4039 x = sccstack.pop ();
4041 VN_INFO (x)->on_sccstack = false;
4042 scc.safe_push (x);
4043 } while (x != name);
4045 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
4046 if (scc.length ()
4047 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
4049 if (dump_file)
4050 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
4051 "SCC size %u exceeding %u\n", scc.length (),
4052 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
4054 return false;
4057 if (scc.length () > 1)
4058 sort_scc (scc);
4060 process_scc (scc);
4062 return true;
4065 /* Depth first search on NAME to discover and process SCC's in the SSA
4066 graph.
4067 Execution of this algorithm relies on the fact that the SCC's are
4068 popped off the stack in topological order.
4069 Returns true if successful, false if we stopped processing SCC's due
4070 to resource constraints. */
4072 static bool
4073 DFS (tree name)
4075 vec<ssa_op_iter> itervec = vNULL;
4076 vec<tree> namevec = vNULL;
4077 use_operand_p usep = NULL;
4078 gimple *defstmt;
4079 tree use;
4080 ssa_op_iter iter;
4082 start_over:
4083 /* SCC info */
4084 VN_INFO (name)->dfsnum = next_dfs_num++;
4085 VN_INFO (name)->visited = true;
4086 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
4088 sccstack.safe_push (name);
4089 VN_INFO (name)->on_sccstack = true;
4090 defstmt = SSA_NAME_DEF_STMT (name);
4092 /* Recursively DFS on our operands, looking for SCC's. */
4093 if (!gimple_nop_p (defstmt))
4095 /* Push a new iterator. */
4096 if (gphi *phi = dyn_cast <gphi *> (defstmt))
4097 usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
4098 else
4099 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
4101 else
4102 clear_and_done_ssa_iter (&iter);
4104 while (1)
4106 /* If we are done processing uses of a name, go up the stack
4107 of iterators and process SCCs as we found them. */
4108 if (op_iter_done (&iter))
4110 /* See if we found an SCC. */
4111 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
4112 if (!extract_and_process_scc_for_name (name))
4114 namevec.release ();
4115 itervec.release ();
4116 return false;
4119 /* Check if we are done. */
4120 if (namevec.is_empty ())
4122 namevec.release ();
4123 itervec.release ();
4124 return true;
4127 /* Restore the last use walker and continue walking there. */
4128 use = name;
4129 name = namevec.pop ();
4130 memcpy (&iter, &itervec.last (),
4131 sizeof (ssa_op_iter));
4132 itervec.pop ();
4133 goto continue_walking;
4136 use = USE_FROM_PTR (usep);
4138 /* Since we handle phi nodes, we will sometimes get
4139 invariants in the use expression. */
4140 if (TREE_CODE (use) == SSA_NAME)
4142 if (! (VN_INFO (use)->visited))
4144 /* Recurse by pushing the current use walking state on
4145 the stack and starting over. */
4146 itervec.safe_push (iter);
4147 namevec.safe_push (name);
4148 name = use;
4149 goto start_over;
4151 continue_walking:
4152 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
4153 VN_INFO (use)->low);
4155 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
4156 && VN_INFO (use)->on_sccstack)
4158 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
4159 VN_INFO (name)->low);
4163 usep = op_iter_next_use (&iter);
4167 /* Allocate a value number table. */
4169 static void
4170 allocate_vn_table (vn_tables_t table)
4172 table->phis = new vn_phi_table_type (23);
4173 table->nary = new vn_nary_op_table_type (23);
4174 table->references = new vn_reference_table_type (23);
4176 gcc_obstack_init (&table->nary_obstack);
4177 table->phis_pool = new object_allocator<vn_phi_s> ("VN phis");
4178 table->references_pool = new object_allocator<vn_reference_s>
4179 ("VN references");
4182 /* Free a value number table. */
4184 static void
4185 free_vn_table (vn_tables_t table)
4187 delete table->phis;
4188 table->phis = NULL;
4189 delete table->nary;
4190 table->nary = NULL;
4191 delete table->references;
4192 table->references = NULL;
4193 obstack_free (&table->nary_obstack, NULL);
4194 delete table->phis_pool;
4195 delete table->references_pool;
4198 static void
4199 init_scc_vn (void)
4201 size_t i;
4202 int j;
4203 int *rpo_numbers_temp;
4205 calculate_dominance_info (CDI_DOMINATORS);
4206 mark_dfs_back_edges ();
4208 sccstack.create (0);
4209 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
4211 constant_value_ids = BITMAP_ALLOC (NULL);
4213 next_dfs_num = 1;
4214 next_value_id = 1;
4216 vn_ssa_aux_table.create (num_ssa_names + 1);
4217 /* VEC_alloc doesn't actually grow it to the right size, it just
4218 preallocates the space to do so. */
4219 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
4220 gcc_obstack_init (&vn_ssa_aux_obstack);
4222 shared_lookup_phiargs.create (0);
4223 shared_lookup_references.create (0);
4224 rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
4225 rpo_numbers_temp =
4226 XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
4227 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4229 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4230 the i'th block in RPO order is bb. We want to map bb's to RPO
4231 numbers, so we need to rearrange this array. */
4232 for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
4233 rpo_numbers[rpo_numbers_temp[j]] = j;
4235 XDELETE (rpo_numbers_temp);
4237 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
4239 renumber_gimple_stmt_uids ();
4241 /* Create the valid and optimistic value numbering tables. */
4242 valid_info = XCNEW (struct vn_tables_s);
4243 allocate_vn_table (valid_info);
4244 optimistic_info = XCNEW (struct vn_tables_s);
4245 allocate_vn_table (optimistic_info);
4246 current_info = valid_info;
4248 /* Create the VN_INFO structures, and initialize value numbers to
4249 TOP or VARYING for parameters. */
4250 for (i = 1; i < num_ssa_names; i++)
4252 tree name = ssa_name (i);
4253 if (!name)
4254 continue;
4256 VN_INFO_GET (name)->valnum = VN_TOP;
4257 VN_INFO (name)->needs_insertion = false;
4258 VN_INFO (name)->expr = NULL;
4259 VN_INFO (name)->value_id = 0;
4261 if (!SSA_NAME_IS_DEFAULT_DEF (name))
4262 continue;
4264 switch (TREE_CODE (SSA_NAME_VAR (name)))
4266 case VAR_DECL:
4267 /* Undefined vars keep TOP. */
4268 break;
4270 case PARM_DECL:
4271 /* Parameters are VARYING but we can record a condition
4272 if we know it is a non-NULL pointer. */
4273 VN_INFO (name)->visited = true;
4274 VN_INFO (name)->valnum = name;
4275 if (POINTER_TYPE_P (TREE_TYPE (name))
4276 && nonnull_arg_p (SSA_NAME_VAR (name)))
4278 tree ops[2];
4279 ops[0] = name;
4280 ops[1] = build_int_cst (TREE_TYPE (name), 0);
4281 vn_nary_op_insert_pieces (2, NE_EXPR, boolean_type_node, ops,
4282 boolean_true_node, 0);
4283 if (dump_file && (dump_flags & TDF_DETAILS))
4285 fprintf (dump_file, "Recording ");
4286 print_generic_expr (dump_file, name, TDF_SLIM);
4287 fprintf (dump_file, " != 0\n");
4290 break;
4292 case RESULT_DECL:
4293 /* If the result is passed by invisible reference the default
4294 def is initialized, otherwise it's uninitialized. */
4295 if (DECL_BY_REFERENCE (SSA_NAME_VAR (name)))
4297 VN_INFO (name)->visited = true;
4298 VN_INFO (name)->valnum = name;
4300 break;
4302 default:
4303 gcc_unreachable ();
4308 /* Restore SSA info that has been reset on value leaders. */
4310 void
4311 scc_vn_restore_ssa_info (void)
4313 for (unsigned i = 0; i < num_ssa_names; i++)
4315 tree name = ssa_name (i);
4316 if (name
4317 && has_VN_INFO (name))
4319 if (VN_INFO (name)->needs_insertion)
4321 else if (POINTER_TYPE_P (TREE_TYPE (name))
4322 && VN_INFO (name)->info.ptr_info)
4323 SSA_NAME_PTR_INFO (name) = VN_INFO (name)->info.ptr_info;
4324 else if (INTEGRAL_TYPE_P (TREE_TYPE (name))
4325 && VN_INFO (name)->info.range_info)
4327 SSA_NAME_RANGE_INFO (name) = VN_INFO (name)->info.range_info;
4328 SSA_NAME_ANTI_RANGE_P (name)
4329 = VN_INFO (name)->range_info_anti_range_p;
4335 void
4336 free_scc_vn (void)
4338 size_t i;
4340 delete constant_to_value_id;
4341 constant_to_value_id = NULL;
4342 BITMAP_FREE (constant_value_ids);
4343 shared_lookup_phiargs.release ();
4344 shared_lookup_references.release ();
4345 XDELETEVEC (rpo_numbers);
4347 for (i = 0; i < num_ssa_names; i++)
4349 tree name = ssa_name (i);
4350 if (name
4351 && has_VN_INFO (name)
4352 && VN_INFO (name)->needs_insertion)
4353 release_ssa_name (name);
4355 obstack_free (&vn_ssa_aux_obstack, NULL);
4356 vn_ssa_aux_table.release ();
4358 sccstack.release ();
4359 free_vn_table (valid_info);
4360 XDELETE (valid_info);
4361 free_vn_table (optimistic_info);
4362 XDELETE (optimistic_info);
4364 BITMAP_FREE (const_parms);
4367 /* Set *ID according to RESULT. */
4369 static void
4370 set_value_id_for_result (tree result, unsigned int *id)
4372 if (result && TREE_CODE (result) == SSA_NAME)
4373 *id = VN_INFO (result)->value_id;
4374 else if (result && is_gimple_min_invariant (result))
4375 *id = get_or_alloc_constant_value_id (result);
4376 else
4377 *id = get_next_value_id ();
4380 /* Set the value ids in the valid hash tables. */
4382 static void
4383 set_hashtable_value_ids (void)
4385 vn_nary_op_iterator_type hin;
4386 vn_phi_iterator_type hip;
4387 vn_reference_iterator_type hir;
4388 vn_nary_op_t vno;
4389 vn_reference_t vr;
4390 vn_phi_t vp;
4392 /* Now set the value ids of the things we had put in the hash
4393 table. */
4395 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4396 set_value_id_for_result (vno->result, &vno->value_id);
4398 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4399 set_value_id_for_result (vp->result, &vp->value_id);
4401 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4402 hir)
4403 set_value_id_for_result (vr->result, &vr->value_id);
4406 class sccvn_dom_walker : public dom_walker
4408 public:
4409 sccvn_dom_walker ()
4410 : dom_walker (CDI_DOMINATORS, true), fail (false), cond_stack (vNULL) {}
4411 ~sccvn_dom_walker ();
4413 virtual edge before_dom_children (basic_block);
4414 virtual void after_dom_children (basic_block);
4416 void record_cond (basic_block,
4417 enum tree_code code, tree lhs, tree rhs, bool value);
4418 void record_conds (basic_block,
4419 enum tree_code code, tree lhs, tree rhs, bool value);
4421 bool fail;
4422 vec<std::pair <basic_block, std::pair <vn_nary_op_t, vn_nary_op_t> > >
4423 cond_stack;
4426 sccvn_dom_walker::~sccvn_dom_walker ()
4428 cond_stack.release ();
4431 /* Record a temporary condition for the BB and its dominated blocks. */
4433 void
4434 sccvn_dom_walker::record_cond (basic_block bb,
4435 enum tree_code code, tree lhs, tree rhs,
4436 bool value)
4438 tree ops[2] = { lhs, rhs };
4439 vn_nary_op_t old = NULL;
4440 if (vn_nary_op_lookup_pieces (2, code, boolean_type_node, ops, &old))
4441 current_info->nary->remove_elt_with_hash (old, old->hashcode);
4442 vn_nary_op_t cond
4443 = vn_nary_op_insert_pieces (2, code, boolean_type_node, ops,
4444 value
4445 ? boolean_true_node
4446 : boolean_false_node, 0);
4447 if (dump_file && (dump_flags & TDF_DETAILS))
4449 fprintf (dump_file, "Recording temporarily ");
4450 print_generic_expr (dump_file, ops[0], TDF_SLIM);
4451 fprintf (dump_file, " %s ", get_tree_code_name (code));
4452 print_generic_expr (dump_file, ops[1], TDF_SLIM);
4453 fprintf (dump_file, " == %s%s\n",
4454 value ? "true" : "false",
4455 old ? " (old entry saved)" : "");
4457 cond_stack.safe_push (std::make_pair (bb, std::make_pair (cond, old)));
4460 /* Record temporary conditions for the BB and its dominated blocks
4461 according to LHS CODE RHS == VALUE and its dominated conditions. */
4463 void
4464 sccvn_dom_walker::record_conds (basic_block bb,
4465 enum tree_code code, tree lhs, tree rhs,
4466 bool value)
4468 /* Record the original condition. */
4469 record_cond (bb, code, lhs, rhs, value);
4471 if (!value)
4472 return;
4474 /* Record dominated conditions if the condition is true. Note that
4475 the inversion is already recorded. */
4476 switch (code)
4478 case LT_EXPR:
4479 case GT_EXPR:
4480 record_cond (bb, code == LT_EXPR ? LE_EXPR : GE_EXPR, lhs, rhs, true);
4481 record_cond (bb, NE_EXPR, lhs, rhs, true);
4482 record_cond (bb, EQ_EXPR, lhs, rhs, false);
4483 break;
4485 case EQ_EXPR:
4486 record_cond (bb, LE_EXPR, lhs, rhs, true);
4487 record_cond (bb, GE_EXPR, lhs, rhs, true);
4488 record_cond (bb, LT_EXPR, lhs, rhs, false);
4489 record_cond (bb, GT_EXPR, lhs, rhs, false);
4490 break;
4492 default:
4493 break;
4497 /* Restore expressions and values derived from conditionals. */
4499 void
4500 sccvn_dom_walker::after_dom_children (basic_block bb)
4502 while (!cond_stack.is_empty ()
4503 && cond_stack.last ().first == bb)
4505 vn_nary_op_t cond = cond_stack.last ().second.first;
4506 vn_nary_op_t old = cond_stack.last ().second.second;
4507 current_info->nary->remove_elt_with_hash (cond, cond->hashcode);
4508 if (old)
4509 vn_nary_op_insert_into (old, current_info->nary, false);
4510 cond_stack.pop ();
4514 /* Value number all statements in BB. */
4516 edge
4517 sccvn_dom_walker::before_dom_children (basic_block bb)
4519 edge e;
4520 edge_iterator ei;
4522 if (fail)
4523 return NULL;
4525 if (dump_file && (dump_flags & TDF_DETAILS))
4526 fprintf (dump_file, "Visiting BB %d\n", bb->index);
4528 /* If we have a single predecessor record the equivalence from a
4529 possible condition on the predecessor edge. */
4530 edge pred_e = NULL;
4531 FOR_EACH_EDGE (e, ei, bb->preds)
4533 /* Ignore simple backedges from this to allow recording conditions
4534 in loop headers. */
4535 if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
4536 continue;
4537 if (! pred_e)
4538 pred_e = e;
4539 else
4541 pred_e = NULL;
4542 break;
4545 if (pred_e)
4547 /* Check if there are multiple executable successor edges in
4548 the source block. Otherwise there is no additional info
4549 to be recorded. */
4550 edge e2;
4551 FOR_EACH_EDGE (e2, ei, pred_e->src->succs)
4552 if (e2 != pred_e
4553 && e2->flags & EDGE_EXECUTABLE)
4554 break;
4555 if (e2 && (e2->flags & EDGE_EXECUTABLE))
4557 gimple *stmt = last_stmt (pred_e->src);
4558 if (stmt
4559 && gimple_code (stmt) == GIMPLE_COND)
4561 enum tree_code code = gimple_cond_code (stmt);
4562 tree lhs = gimple_cond_lhs (stmt);
4563 tree rhs = gimple_cond_rhs (stmt);
4564 record_conds (bb, code, lhs, rhs,
4565 (pred_e->flags & EDGE_TRUE_VALUE) != 0);
4566 code = invert_tree_comparison (code, HONOR_NANS (lhs));
4567 if (code != ERROR_MARK)
4568 record_conds (bb, code, lhs, rhs,
4569 (pred_e->flags & EDGE_TRUE_VALUE) == 0);
4574 /* Value-number all defs in the basic-block. */
4575 for (gphi_iterator gsi = gsi_start_phis (bb);
4576 !gsi_end_p (gsi); gsi_next (&gsi))
4578 gphi *phi = gsi.phi ();
4579 tree res = PHI_RESULT (phi);
4580 if (!VN_INFO (res)->visited
4581 && !DFS (res))
4583 fail = true;
4584 return NULL;
4587 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
4588 !gsi_end_p (gsi); gsi_next (&gsi))
4590 ssa_op_iter i;
4591 tree op;
4592 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
4593 if (!VN_INFO (op)->visited
4594 && !DFS (op))
4596 fail = true;
4597 return NULL;
4601 /* Finally look at the last stmt. */
4602 gimple *stmt = last_stmt (bb);
4603 if (!stmt)
4604 return NULL;
4606 enum gimple_code code = gimple_code (stmt);
4607 if (code != GIMPLE_COND
4608 && code != GIMPLE_SWITCH
4609 && code != GIMPLE_GOTO)
4610 return NULL;
4612 if (dump_file && (dump_flags & TDF_DETAILS))
4614 fprintf (dump_file, "Visiting control stmt ending BB %d: ", bb->index);
4615 print_gimple_stmt (dump_file, stmt, 0, 0);
4618 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
4619 if value-numbering can prove they are not reachable. Handling
4620 computed gotos is also possible. */
4621 tree val;
4622 switch (code)
4624 case GIMPLE_COND:
4626 tree lhs = vn_valueize (gimple_cond_lhs (stmt));
4627 tree rhs = vn_valueize (gimple_cond_rhs (stmt));
4628 val = gimple_simplify (gimple_cond_code (stmt),
4629 boolean_type_node, lhs, rhs,
4630 NULL, vn_valueize);
4631 /* If that didn't simplify to a constant see if we have recorded
4632 temporary expressions from taken edges. */
4633 if (!val || TREE_CODE (val) != INTEGER_CST)
4635 tree ops[2];
4636 ops[0] = lhs;
4637 ops[1] = rhs;
4638 val = vn_nary_op_lookup_pieces (2, gimple_cond_code (stmt),
4639 boolean_type_node, ops, NULL);
4641 break;
4643 case GIMPLE_SWITCH:
4644 val = gimple_switch_index (as_a <gswitch *> (stmt));
4645 break;
4646 case GIMPLE_GOTO:
4647 val = gimple_goto_dest (stmt);
4648 break;
4649 default:
4650 gcc_unreachable ();
4652 if (!val)
4653 return NULL;
4655 edge taken = find_taken_edge (bb, vn_valueize (val));
4656 if (!taken)
4657 return NULL;
4659 if (dump_file && (dump_flags & TDF_DETAILS))
4660 fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
4661 "not executable\n", bb->index, bb->index, taken->dest->index);
4663 return taken;
4666 /* Do SCCVN. Returns true if it finished, false if we bailed out
4667 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
4668 how we use the alias oracle walking during the VN process. */
4670 bool
4671 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
4673 size_t i;
4675 default_vn_walk_kind = default_vn_walk_kind_;
4677 init_scc_vn ();
4679 /* Collect pointers we know point to readonly memory. */
4680 const_parms = BITMAP_ALLOC (NULL);
4681 tree fnspec = lookup_attribute ("fn spec",
4682 TYPE_ATTRIBUTES (TREE_TYPE (cfun->decl)));
4683 if (fnspec)
4685 fnspec = TREE_VALUE (TREE_VALUE (fnspec));
4686 i = 1;
4687 for (tree arg = DECL_ARGUMENTS (cfun->decl);
4688 arg; arg = DECL_CHAIN (arg), ++i)
4690 if (i >= (unsigned) TREE_STRING_LENGTH (fnspec))
4691 break;
4692 if (TREE_STRING_POINTER (fnspec)[i] == 'R'
4693 || TREE_STRING_POINTER (fnspec)[i] == 'r')
4695 tree name = ssa_default_def (cfun, arg);
4696 if (name)
4697 bitmap_set_bit (const_parms, SSA_NAME_VERSION (name));
4702 /* Walk all blocks in dominator order, value-numbering stmts
4703 SSA defs and decide whether outgoing edges are not executable. */
4704 sccvn_dom_walker walker;
4705 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
4706 if (walker.fail)
4708 free_scc_vn ();
4709 return false;
4712 /* Initialize the value ids and prune out remaining VN_TOPs
4713 from dead code. */
4714 for (i = 1; i < num_ssa_names; ++i)
4716 tree name = ssa_name (i);
4717 vn_ssa_aux_t info;
4718 if (!name)
4719 continue;
4720 info = VN_INFO (name);
4721 if (!info->visited)
4722 info->valnum = name;
4723 if (info->valnum == name
4724 || info->valnum == VN_TOP)
4725 info->value_id = get_next_value_id ();
4726 else if (is_gimple_min_invariant (info->valnum))
4727 info->value_id = get_or_alloc_constant_value_id (info->valnum);
4730 /* Propagate. */
4731 for (i = 1; i < num_ssa_names; ++i)
4733 tree name = ssa_name (i);
4734 vn_ssa_aux_t info;
4735 if (!name)
4736 continue;
4737 info = VN_INFO (name);
4738 if (TREE_CODE (info->valnum) == SSA_NAME
4739 && info->valnum != name
4740 && info->value_id != VN_INFO (info->valnum)->value_id)
4741 info->value_id = VN_INFO (info->valnum)->value_id;
4744 set_hashtable_value_ids ();
4746 if (dump_file && (dump_flags & TDF_DETAILS))
4748 fprintf (dump_file, "Value numbers:\n");
4749 for (i = 0; i < num_ssa_names; i++)
4751 tree name = ssa_name (i);
4752 if (name
4753 && VN_INFO (name)->visited
4754 && SSA_VAL (name) != name)
4756 print_generic_expr (dump_file, name, 0);
4757 fprintf (dump_file, " = ");
4758 print_generic_expr (dump_file, SSA_VAL (name), 0);
4759 fprintf (dump_file, "\n");
4764 return true;
4767 /* Return the maximum value id we have ever seen. */
4769 unsigned int
4770 get_max_value_id (void)
4772 return next_value_id;
4775 /* Return the next unique value id. */
4777 unsigned int
4778 get_next_value_id (void)
4780 return next_value_id++;
4784 /* Compare two expressions E1 and E2 and return true if they are equal. */
4786 bool
4787 expressions_equal_p (tree e1, tree e2)
4789 /* The obvious case. */
4790 if (e1 == e2)
4791 return true;
4793 /* If either one is VN_TOP consider them equal. */
4794 if (e1 == VN_TOP || e2 == VN_TOP)
4795 return true;
4797 /* If only one of them is null, they cannot be equal. */
4798 if (!e1 || !e2)
4799 return false;
4801 /* Now perform the actual comparison. */
4802 if (TREE_CODE (e1) == TREE_CODE (e2)
4803 && operand_equal_p (e1, e2, OEP_PURE_SAME))
4804 return true;
4806 return false;
4810 /* Return true if the nary operation NARY may trap. This is a copy
4811 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4813 bool
4814 vn_nary_may_trap (vn_nary_op_t nary)
4816 tree type;
4817 tree rhs2 = NULL_TREE;
4818 bool honor_nans = false;
4819 bool honor_snans = false;
4820 bool fp_operation = false;
4821 bool honor_trapv = false;
4822 bool handled, ret;
4823 unsigned i;
4825 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4826 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4827 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4829 type = nary->type;
4830 fp_operation = FLOAT_TYPE_P (type);
4831 if (fp_operation)
4833 honor_nans = flag_trapping_math && !flag_finite_math_only;
4834 honor_snans = flag_signaling_nans != 0;
4836 else if (INTEGRAL_TYPE_P (type)
4837 && TYPE_OVERFLOW_TRAPS (type))
4838 honor_trapv = true;
4840 if (nary->length >= 2)
4841 rhs2 = nary->op[1];
4842 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4843 honor_trapv,
4844 honor_nans, honor_snans, rhs2,
4845 &handled);
4846 if (handled
4847 && ret)
4848 return true;
4850 for (i = 0; i < nary->length; ++i)
4851 if (tree_could_trap_p (nary->op[i]))
4852 return true;
4854 return false;