Configury changes for obstack optimization
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blob9585f905438b5ff07dae1e4782fd93b3d052347d
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2015 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "insn-config.h"
32 #include "emit-rtl.h"
33 #include "cgraph.h"
34 #include "gimple-pretty-print.h"
35 #include "alias.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "cfganal.h"
39 #include "tree-inline.h"
40 #include "internal-fn.h"
41 #include "gimple-fold.h"
42 #include "tree-eh.h"
43 #include "gimplify.h"
44 #include "flags.h"
45 #include "dojump.h"
46 #include "explow.h"
47 #include "calls.h"
48 #include "varasm.h"
49 #include "stmt.h"
50 #include "expr.h"
51 #include "tree-dfa.h"
52 #include "tree-ssa.h"
53 #include "dumpfile.h"
54 #include "cfgloop.h"
55 #include "params.h"
56 #include "tree-ssa-propagate.h"
57 #include "tree-ssa-sccvn.h"
58 #include "tree-cfg.h"
59 #include "domwalk.h"
60 #include "gimple-iterator.h"
61 #include "gimple-match.h"
63 /* This algorithm is based on the SCC algorithm presented by Keith
64 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
65 (http://citeseer.ist.psu.edu/41805.html). In
66 straight line code, it is equivalent to a regular hash based value
67 numbering that is performed in reverse postorder.
69 For code with cycles, there are two alternatives, both of which
70 require keeping the hashtables separate from the actual list of
71 value numbers for SSA names.
73 1. Iterate value numbering in an RPO walk of the blocks, removing
74 all the entries from the hashtable after each iteration (but
75 keeping the SSA name->value number mapping between iterations).
76 Iterate until it does not change.
78 2. Perform value numbering as part of an SCC walk on the SSA graph,
79 iterating only the cycles in the SSA graph until they do not change
80 (using a separate, optimistic hashtable for value numbering the SCC
81 operands).
83 The second is not just faster in practice (because most SSA graph
84 cycles do not involve all the variables in the graph), it also has
85 some nice properties.
87 One of these nice properties is that when we pop an SCC off the
88 stack, we are guaranteed to have processed all the operands coming from
89 *outside of that SCC*, so we do not need to do anything special to
90 ensure they have value numbers.
92 Another nice property is that the SCC walk is done as part of a DFS
93 of the SSA graph, which makes it easy to perform combining and
94 simplifying operations at the same time.
96 The code below is deliberately written in a way that makes it easy
97 to separate the SCC walk from the other work it does.
99 In order to propagate constants through the code, we track which
100 expressions contain constants, and use those while folding. In
101 theory, we could also track expressions whose value numbers are
102 replaced, in case we end up folding based on expression
103 identities.
105 In order to value number memory, we assign value numbers to vuses.
106 This enables us to note that, for example, stores to the same
107 address of the same value from the same starting memory states are
108 equivalent.
109 TODO:
111 1. We can iterate only the changing portions of the SCC's, but
112 I have not seen an SCC big enough for this to be a win.
113 2. If you differentiate between phi nodes for loops and phi nodes
114 for if-then-else, you can properly consider phi nodes in different
115 blocks for equivalence.
116 3. We could value number vuses in more cases, particularly, whole
117 structure copies.
121 static tree *last_vuse_ptr;
122 static vn_lookup_kind vn_walk_kind;
123 static vn_lookup_kind default_vn_walk_kind;
124 bitmap const_parms;
126 /* vn_nary_op hashtable helpers. */
128 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
130 typedef vn_nary_op_s *compare_type;
131 static inline hashval_t hash (const vn_nary_op_s *);
132 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
135 /* Return the computed hashcode for nary operation P1. */
137 inline hashval_t
138 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
140 return vno1->hashcode;
143 /* Compare nary operations P1 and P2 and return true if they are
144 equivalent. */
146 inline bool
147 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
149 return vn_nary_op_eq (vno1, vno2);
152 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
153 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
156 /* vn_phi hashtable helpers. */
158 static int
159 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
161 struct vn_phi_hasher : pointer_hash <vn_phi_s>
163 static inline hashval_t hash (const vn_phi_s *);
164 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
165 static inline void remove (vn_phi_s *);
168 /* Return the computed hashcode for phi operation P1. */
170 inline hashval_t
171 vn_phi_hasher::hash (const vn_phi_s *vp1)
173 return vp1->hashcode;
176 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
178 inline bool
179 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
181 return vn_phi_eq (vp1, vp2);
184 /* Free a phi operation structure VP. */
186 inline void
187 vn_phi_hasher::remove (vn_phi_s *phi)
189 phi->phiargs.release ();
192 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
193 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
196 /* Compare two reference operands P1 and P2 for equality. Return true if
197 they are equal, and false otherwise. */
199 static int
200 vn_reference_op_eq (const void *p1, const void *p2)
202 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
203 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
205 return (vro1->opcode == vro2->opcode
206 /* We do not care for differences in type qualification. */
207 && (vro1->type == vro2->type
208 || (vro1->type && vro2->type
209 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
210 TYPE_MAIN_VARIANT (vro2->type))))
211 && expressions_equal_p (vro1->op0, vro2->op0)
212 && expressions_equal_p (vro1->op1, vro2->op1)
213 && expressions_equal_p (vro1->op2, vro2->op2));
216 /* Free a reference operation structure VP. */
218 static inline void
219 free_reference (vn_reference_s *vr)
221 vr->operands.release ();
225 /* vn_reference hashtable helpers. */
227 struct vn_reference_hasher : pointer_hash <vn_reference_s>
229 static inline hashval_t hash (const vn_reference_s *);
230 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
231 static inline void remove (vn_reference_s *);
234 /* Return the hashcode for a given reference operation P1. */
236 inline hashval_t
237 vn_reference_hasher::hash (const vn_reference_s *vr1)
239 return vr1->hashcode;
242 inline bool
243 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
245 return vn_reference_eq (v, c);
248 inline void
249 vn_reference_hasher::remove (vn_reference_s *v)
251 free_reference (v);
254 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
255 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
258 /* The set of hashtables and alloc_pool's for their items. */
260 typedef struct vn_tables_s
262 vn_nary_op_table_type *nary;
263 vn_phi_table_type *phis;
264 vn_reference_table_type *references;
265 struct obstack nary_obstack;
266 object_allocator<vn_phi_s> *phis_pool;
267 object_allocator<vn_reference_s> *references_pool;
268 } *vn_tables_t;
271 /* vn_constant hashtable helpers. */
273 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
275 static inline hashval_t hash (const vn_constant_s *);
276 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
279 /* Hash table hash function for vn_constant_t. */
281 inline hashval_t
282 vn_constant_hasher::hash (const vn_constant_s *vc1)
284 return vc1->hashcode;
287 /* Hash table equality function for vn_constant_t. */
289 inline bool
290 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
292 if (vc1->hashcode != vc2->hashcode)
293 return false;
295 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
298 static hash_table<vn_constant_hasher> *constant_to_value_id;
299 static bitmap constant_value_ids;
302 /* Valid hashtables storing information we have proven to be
303 correct. */
305 static vn_tables_t valid_info;
307 /* Optimistic hashtables storing information we are making assumptions about
308 during iterations. */
310 static vn_tables_t optimistic_info;
312 /* Pointer to the set of hashtables that is currently being used.
313 Should always point to either the optimistic_info, or the
314 valid_info. */
316 static vn_tables_t current_info;
319 /* Reverse post order index for each basic block. */
321 static int *rpo_numbers;
323 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
325 /* Return the SSA value of the VUSE x, supporting released VDEFs
326 during elimination which will value-number the VDEF to the
327 associated VUSE (but not substitute in the whole lattice). */
329 static inline tree
330 vuse_ssa_val (tree x)
332 if (!x)
333 return NULL_TREE;
337 x = SSA_VAL (x);
339 while (SSA_NAME_IN_FREE_LIST (x));
341 return x;
344 /* This represents the top of the VN lattice, which is the universal
345 value. */
347 tree VN_TOP;
349 /* Unique counter for our value ids. */
351 static unsigned int next_value_id;
353 /* Next DFS number and the stack for strongly connected component
354 detection. */
356 static unsigned int next_dfs_num;
357 static vec<tree> sccstack;
361 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
362 are allocated on an obstack for locality reasons, and to free them
363 without looping over the vec. */
365 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
366 static struct obstack vn_ssa_aux_obstack;
368 /* Return whether there is value numbering information for a given SSA name. */
370 bool
371 has_VN_INFO (tree name)
373 if (SSA_NAME_VERSION (name) < vn_ssa_aux_table.length ())
374 return vn_ssa_aux_table[SSA_NAME_VERSION (name)] != NULL;
375 return false;
378 /* Return the value numbering information for a given SSA name. */
380 vn_ssa_aux_t
381 VN_INFO (tree name)
383 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
384 gcc_checking_assert (res);
385 return res;
388 /* Set the value numbering info for a given SSA name to a given
389 value. */
391 static inline void
392 VN_INFO_SET (tree name, vn_ssa_aux_t value)
394 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
397 /* Initialize the value numbering info for a given SSA name.
398 This should be called just once for every SSA name. */
400 vn_ssa_aux_t
401 VN_INFO_GET (tree name)
403 vn_ssa_aux_t newinfo;
405 gcc_assert (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ()
406 || vn_ssa_aux_table[SSA_NAME_VERSION (name)] == NULL);
407 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
408 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
409 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
410 vn_ssa_aux_table.safe_grow (SSA_NAME_VERSION (name) + 1);
411 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
412 return newinfo;
416 /* Return the vn_kind the expression computed by the stmt should be
417 associated with. */
419 enum vn_kind
420 vn_get_stmt_kind (gimple *stmt)
422 switch (gimple_code (stmt))
424 case GIMPLE_CALL:
425 return VN_REFERENCE;
426 case GIMPLE_PHI:
427 return VN_PHI;
428 case GIMPLE_ASSIGN:
430 enum tree_code code = gimple_assign_rhs_code (stmt);
431 tree rhs1 = gimple_assign_rhs1 (stmt);
432 switch (get_gimple_rhs_class (code))
434 case GIMPLE_UNARY_RHS:
435 case GIMPLE_BINARY_RHS:
436 case GIMPLE_TERNARY_RHS:
437 return VN_NARY;
438 case GIMPLE_SINGLE_RHS:
439 switch (TREE_CODE_CLASS (code))
441 case tcc_reference:
442 /* VOP-less references can go through unary case. */
443 if ((code == REALPART_EXPR
444 || code == IMAGPART_EXPR
445 || code == VIEW_CONVERT_EXPR
446 || code == BIT_FIELD_REF)
447 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
448 return VN_NARY;
450 /* Fallthrough. */
451 case tcc_declaration:
452 return VN_REFERENCE;
454 case tcc_constant:
455 return VN_CONSTANT;
457 default:
458 if (code == ADDR_EXPR)
459 return (is_gimple_min_invariant (rhs1)
460 ? VN_CONSTANT : VN_REFERENCE);
461 else if (code == CONSTRUCTOR)
462 return VN_NARY;
463 return VN_NONE;
465 default:
466 return VN_NONE;
469 default:
470 return VN_NONE;
474 /* Lookup a value id for CONSTANT and return it. If it does not
475 exist returns 0. */
477 unsigned int
478 get_constant_value_id (tree constant)
480 vn_constant_s **slot;
481 struct vn_constant_s vc;
483 vc.hashcode = vn_hash_constant_with_type (constant);
484 vc.constant = constant;
485 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
486 if (slot)
487 return (*slot)->value_id;
488 return 0;
491 /* Lookup a value id for CONSTANT, and if it does not exist, create a
492 new one and return it. If it does exist, return it. */
494 unsigned int
495 get_or_alloc_constant_value_id (tree constant)
497 vn_constant_s **slot;
498 struct vn_constant_s vc;
499 vn_constant_t vcp;
501 vc.hashcode = vn_hash_constant_with_type (constant);
502 vc.constant = constant;
503 slot = constant_to_value_id->find_slot (&vc, INSERT);
504 if (*slot)
505 return (*slot)->value_id;
507 vcp = XNEW (struct vn_constant_s);
508 vcp->hashcode = vc.hashcode;
509 vcp->constant = constant;
510 vcp->value_id = get_next_value_id ();
511 *slot = vcp;
512 bitmap_set_bit (constant_value_ids, vcp->value_id);
513 return vcp->value_id;
516 /* Return true if V is a value id for a constant. */
518 bool
519 value_id_constant_p (unsigned int v)
521 return bitmap_bit_p (constant_value_ids, v);
524 /* Compute the hash for a reference operand VRO1. */
526 static void
527 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
529 hstate.add_int (vro1->opcode);
530 if (vro1->op0)
531 inchash::add_expr (vro1->op0, hstate);
532 if (vro1->op1)
533 inchash::add_expr (vro1->op1, hstate);
534 if (vro1->op2)
535 inchash::add_expr (vro1->op2, hstate);
538 /* Compute a hash for the reference operation VR1 and return it. */
540 static hashval_t
541 vn_reference_compute_hash (const vn_reference_t vr1)
543 inchash::hash hstate;
544 hashval_t result;
545 int i;
546 vn_reference_op_t vro;
547 HOST_WIDE_INT off = -1;
548 bool deref = false;
550 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
552 if (vro->opcode == MEM_REF)
553 deref = true;
554 else if (vro->opcode != ADDR_EXPR)
555 deref = false;
556 if (vro->off != -1)
558 if (off == -1)
559 off = 0;
560 off += vro->off;
562 else
564 if (off != -1
565 && off != 0)
566 hstate.add_int (off);
567 off = -1;
568 if (deref
569 && vro->opcode == ADDR_EXPR)
571 if (vro->op0)
573 tree op = TREE_OPERAND (vro->op0, 0);
574 hstate.add_int (TREE_CODE (op));
575 inchash::add_expr (op, hstate);
578 else
579 vn_reference_op_compute_hash (vro, hstate);
582 result = hstate.end ();
583 /* ??? We would ICE later if we hash instead of adding that in. */
584 if (vr1->vuse)
585 result += SSA_NAME_VERSION (vr1->vuse);
587 return result;
590 /* Return true if reference operations VR1 and VR2 are equivalent. This
591 means they have the same set of operands and vuses. */
593 bool
594 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
596 unsigned i, j;
598 /* Early out if this is not a hash collision. */
599 if (vr1->hashcode != vr2->hashcode)
600 return false;
602 /* The VOP needs to be the same. */
603 if (vr1->vuse != vr2->vuse)
604 return false;
606 /* If the operands are the same we are done. */
607 if (vr1->operands == vr2->operands)
608 return true;
610 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
611 return false;
613 if (INTEGRAL_TYPE_P (vr1->type)
614 && INTEGRAL_TYPE_P (vr2->type))
616 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
617 return false;
619 else if (INTEGRAL_TYPE_P (vr1->type)
620 && (TYPE_PRECISION (vr1->type)
621 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
622 return false;
623 else if (INTEGRAL_TYPE_P (vr2->type)
624 && (TYPE_PRECISION (vr2->type)
625 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
626 return false;
628 i = 0;
629 j = 0;
632 HOST_WIDE_INT off1 = 0, off2 = 0;
633 vn_reference_op_t vro1, vro2;
634 vn_reference_op_s tem1, tem2;
635 bool deref1 = false, deref2 = false;
636 for (; vr1->operands.iterate (i, &vro1); i++)
638 if (vro1->opcode == MEM_REF)
639 deref1 = true;
640 /* Do not look through a storage order barrier. */
641 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
642 return false;
643 if (vro1->off == -1)
644 break;
645 off1 += vro1->off;
647 for (; vr2->operands.iterate (j, &vro2); j++)
649 if (vro2->opcode == MEM_REF)
650 deref2 = true;
651 /* Do not look through a storage order barrier. */
652 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
653 return false;
654 if (vro2->off == -1)
655 break;
656 off2 += vro2->off;
658 if (off1 != off2)
659 return false;
660 if (deref1 && vro1->opcode == ADDR_EXPR)
662 memset (&tem1, 0, sizeof (tem1));
663 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
664 tem1.type = TREE_TYPE (tem1.op0);
665 tem1.opcode = TREE_CODE (tem1.op0);
666 vro1 = &tem1;
667 deref1 = false;
669 if (deref2 && vro2->opcode == ADDR_EXPR)
671 memset (&tem2, 0, sizeof (tem2));
672 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
673 tem2.type = TREE_TYPE (tem2.op0);
674 tem2.opcode = TREE_CODE (tem2.op0);
675 vro2 = &tem2;
676 deref2 = false;
678 if (deref1 != deref2)
679 return false;
680 if (!vn_reference_op_eq (vro1, vro2))
681 return false;
682 ++j;
683 ++i;
685 while (vr1->operands.length () != i
686 || vr2->operands.length () != j);
688 return true;
691 /* Copy the operations present in load/store REF into RESULT, a vector of
692 vn_reference_op_s's. */
694 static void
695 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
697 if (TREE_CODE (ref) == TARGET_MEM_REF)
699 vn_reference_op_s temp;
701 result->reserve (3);
703 memset (&temp, 0, sizeof (temp));
704 temp.type = TREE_TYPE (ref);
705 temp.opcode = TREE_CODE (ref);
706 temp.op0 = TMR_INDEX (ref);
707 temp.op1 = TMR_STEP (ref);
708 temp.op2 = TMR_OFFSET (ref);
709 temp.off = -1;
710 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
711 temp.base = MR_DEPENDENCE_BASE (ref);
712 result->quick_push (temp);
714 memset (&temp, 0, sizeof (temp));
715 temp.type = NULL_TREE;
716 temp.opcode = ERROR_MARK;
717 temp.op0 = TMR_INDEX2 (ref);
718 temp.off = -1;
719 result->quick_push (temp);
721 memset (&temp, 0, sizeof (temp));
722 temp.type = NULL_TREE;
723 temp.opcode = TREE_CODE (TMR_BASE (ref));
724 temp.op0 = TMR_BASE (ref);
725 temp.off = -1;
726 result->quick_push (temp);
727 return;
730 /* For non-calls, store the information that makes up the address. */
731 tree orig = ref;
732 while (ref)
734 vn_reference_op_s temp;
736 memset (&temp, 0, sizeof (temp));
737 temp.type = TREE_TYPE (ref);
738 temp.opcode = TREE_CODE (ref);
739 temp.off = -1;
741 switch (temp.opcode)
743 case MODIFY_EXPR:
744 temp.op0 = TREE_OPERAND (ref, 1);
745 break;
746 case WITH_SIZE_EXPR:
747 temp.op0 = TREE_OPERAND (ref, 1);
748 temp.off = 0;
749 break;
750 case MEM_REF:
751 /* The base address gets its own vn_reference_op_s structure. */
752 temp.op0 = TREE_OPERAND (ref, 1);
753 if (tree_fits_shwi_p (TREE_OPERAND (ref, 1)))
754 temp.off = tree_to_shwi (TREE_OPERAND (ref, 1));
755 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
756 temp.base = MR_DEPENDENCE_BASE (ref);
757 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
758 break;
759 case BIT_FIELD_REF:
760 /* Record bits, position and storage order. */
761 temp.op0 = TREE_OPERAND (ref, 1);
762 temp.op1 = TREE_OPERAND (ref, 2);
763 if (tree_fits_shwi_p (TREE_OPERAND (ref, 2)))
765 HOST_WIDE_INT off = tree_to_shwi (TREE_OPERAND (ref, 2));
766 if (off % BITS_PER_UNIT == 0)
767 temp.off = off / BITS_PER_UNIT;
769 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
770 break;
771 case COMPONENT_REF:
772 /* The field decl is enough to unambiguously specify the field,
773 a matching type is not necessary and a mismatching type
774 is always a spurious difference. */
775 temp.type = NULL_TREE;
776 temp.op0 = TREE_OPERAND (ref, 1);
777 temp.op1 = TREE_OPERAND (ref, 2);
779 tree this_offset = component_ref_field_offset (ref);
780 if (this_offset
781 && TREE_CODE (this_offset) == INTEGER_CST)
783 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
784 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
786 offset_int off
787 = (wi::to_offset (this_offset)
788 + wi::lrshift (wi::to_offset (bit_offset),
789 LOG2_BITS_PER_UNIT));
790 if (wi::fits_shwi_p (off)
791 /* Probibit value-numbering zero offset components
792 of addresses the same before the pass folding
793 __builtin_object_size had a chance to run
794 (checking cfun->after_inlining does the
795 trick here). */
796 && (TREE_CODE (orig) != ADDR_EXPR
797 || off != 0
798 || cfun->after_inlining))
799 temp.off = off.to_shwi ();
803 break;
804 case ARRAY_RANGE_REF:
805 case ARRAY_REF:
806 /* Record index as operand. */
807 temp.op0 = TREE_OPERAND (ref, 1);
808 /* Always record lower bounds and element size. */
809 temp.op1 = array_ref_low_bound (ref);
810 temp.op2 = array_ref_element_size (ref);
811 if (TREE_CODE (temp.op0) == INTEGER_CST
812 && TREE_CODE (temp.op1) == INTEGER_CST
813 && TREE_CODE (temp.op2) == INTEGER_CST)
815 offset_int off = ((wi::to_offset (temp.op0)
816 - wi::to_offset (temp.op1))
817 * wi::to_offset (temp.op2));
818 if (wi::fits_shwi_p (off))
819 temp.off = off.to_shwi();
821 break;
822 case VAR_DECL:
823 if (DECL_HARD_REGISTER (ref))
825 temp.op0 = ref;
826 break;
828 /* Fallthru. */
829 case PARM_DECL:
830 case CONST_DECL:
831 case RESULT_DECL:
832 /* Canonicalize decls to MEM[&decl] which is what we end up with
833 when valueizing MEM[ptr] with ptr = &decl. */
834 temp.opcode = MEM_REF;
835 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
836 temp.off = 0;
837 result->safe_push (temp);
838 temp.opcode = ADDR_EXPR;
839 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
840 temp.type = TREE_TYPE (temp.op0);
841 temp.off = -1;
842 break;
843 case STRING_CST:
844 case INTEGER_CST:
845 case COMPLEX_CST:
846 case VECTOR_CST:
847 case REAL_CST:
848 case FIXED_CST:
849 case CONSTRUCTOR:
850 case SSA_NAME:
851 temp.op0 = ref;
852 break;
853 case ADDR_EXPR:
854 if (is_gimple_min_invariant (ref))
856 temp.op0 = ref;
857 break;
859 break;
860 /* These are only interesting for their operands, their
861 existence, and their type. They will never be the last
862 ref in the chain of references (IE they require an
863 operand), so we don't have to put anything
864 for op* as it will be handled by the iteration */
865 case REALPART_EXPR:
866 temp.off = 0;
867 break;
868 case VIEW_CONVERT_EXPR:
869 temp.off = 0;
870 temp.reverse = storage_order_barrier_p (ref);
871 break;
872 case IMAGPART_EXPR:
873 /* This is only interesting for its constant offset. */
874 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
875 break;
876 default:
877 gcc_unreachable ();
879 result->safe_push (temp);
881 if (REFERENCE_CLASS_P (ref)
882 || TREE_CODE (ref) == MODIFY_EXPR
883 || TREE_CODE (ref) == WITH_SIZE_EXPR
884 || (TREE_CODE (ref) == ADDR_EXPR
885 && !is_gimple_min_invariant (ref)))
886 ref = TREE_OPERAND (ref, 0);
887 else
888 ref = NULL_TREE;
892 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
893 operands in *OPS, the reference alias set SET and the reference type TYPE.
894 Return true if something useful was produced. */
896 bool
897 ao_ref_init_from_vn_reference (ao_ref *ref,
898 alias_set_type set, tree type,
899 vec<vn_reference_op_s> ops)
901 vn_reference_op_t op;
902 unsigned i;
903 tree base = NULL_TREE;
904 tree *op0_p = &base;
905 offset_int offset = 0;
906 offset_int max_size;
907 offset_int size = -1;
908 tree size_tree = NULL_TREE;
909 alias_set_type base_alias_set = -1;
911 /* First get the final access size from just the outermost expression. */
912 op = &ops[0];
913 if (op->opcode == COMPONENT_REF)
914 size_tree = DECL_SIZE (op->op0);
915 else if (op->opcode == BIT_FIELD_REF)
916 size_tree = op->op0;
917 else
919 machine_mode mode = TYPE_MODE (type);
920 if (mode == BLKmode)
921 size_tree = TYPE_SIZE (type);
922 else
923 size = int (GET_MODE_BITSIZE (mode));
925 if (size_tree != NULL_TREE
926 && TREE_CODE (size_tree) == INTEGER_CST)
927 size = wi::to_offset (size_tree);
929 /* Initially, maxsize is the same as the accessed element size.
930 In the following it will only grow (or become -1). */
931 max_size = size;
933 /* Compute cumulative bit-offset for nested component-refs and array-refs,
934 and find the ultimate containing object. */
935 FOR_EACH_VEC_ELT (ops, i, op)
937 switch (op->opcode)
939 /* These may be in the reference ops, but we cannot do anything
940 sensible with them here. */
941 case ADDR_EXPR:
942 /* Apart from ADDR_EXPR arguments to MEM_REF. */
943 if (base != NULL_TREE
944 && TREE_CODE (base) == MEM_REF
945 && op->op0
946 && DECL_P (TREE_OPERAND (op->op0, 0)))
948 vn_reference_op_t pop = &ops[i-1];
949 base = TREE_OPERAND (op->op0, 0);
950 if (pop->off == -1)
952 max_size = -1;
953 offset = 0;
955 else
956 offset += pop->off * BITS_PER_UNIT;
957 op0_p = NULL;
958 break;
960 /* Fallthru. */
961 case CALL_EXPR:
962 return false;
964 /* Record the base objects. */
965 case MEM_REF:
966 base_alias_set = get_deref_alias_set (op->op0);
967 *op0_p = build2 (MEM_REF, op->type,
968 NULL_TREE, op->op0);
969 MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
970 MR_DEPENDENCE_BASE (*op0_p) = op->base;
971 op0_p = &TREE_OPERAND (*op0_p, 0);
972 break;
974 case VAR_DECL:
975 case PARM_DECL:
976 case RESULT_DECL:
977 case SSA_NAME:
978 *op0_p = op->op0;
979 op0_p = NULL;
980 break;
982 /* And now the usual component-reference style ops. */
983 case BIT_FIELD_REF:
984 offset += wi::to_offset (op->op1);
985 break;
987 case COMPONENT_REF:
989 tree field = op->op0;
990 /* We do not have a complete COMPONENT_REF tree here so we
991 cannot use component_ref_field_offset. Do the interesting
992 parts manually. */
993 tree this_offset = DECL_FIELD_OFFSET (field);
995 if (op->op1 || TREE_CODE (this_offset) != INTEGER_CST)
996 max_size = -1;
997 else
999 offset_int woffset = wi::lshift (wi::to_offset (this_offset),
1000 LOG2_BITS_PER_UNIT);
1001 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1002 offset += woffset;
1004 break;
1007 case ARRAY_RANGE_REF:
1008 case ARRAY_REF:
1009 /* We recorded the lower bound and the element size. */
1010 if (TREE_CODE (op->op0) != INTEGER_CST
1011 || TREE_CODE (op->op1) != INTEGER_CST
1012 || TREE_CODE (op->op2) != INTEGER_CST)
1013 max_size = -1;
1014 else
1016 offset_int woffset
1017 = wi::sext (wi::to_offset (op->op0) - wi::to_offset (op->op1),
1018 TYPE_PRECISION (TREE_TYPE (op->op0)));
1019 woffset *= wi::to_offset (op->op2);
1020 woffset = wi::lshift (woffset, LOG2_BITS_PER_UNIT);
1021 offset += woffset;
1023 break;
1025 case REALPART_EXPR:
1026 break;
1028 case IMAGPART_EXPR:
1029 offset += size;
1030 break;
1032 case VIEW_CONVERT_EXPR:
1033 break;
1035 case STRING_CST:
1036 case INTEGER_CST:
1037 case COMPLEX_CST:
1038 case VECTOR_CST:
1039 case REAL_CST:
1040 case CONSTRUCTOR:
1041 case CONST_DECL:
1042 return false;
1044 default:
1045 return false;
1049 if (base == NULL_TREE)
1050 return false;
1052 ref->ref = NULL_TREE;
1053 ref->base = base;
1054 ref->ref_alias_set = set;
1055 if (base_alias_set != -1)
1056 ref->base_alias_set = base_alias_set;
1057 else
1058 ref->base_alias_set = get_alias_set (base);
1059 /* We discount volatiles from value-numbering elsewhere. */
1060 ref->volatile_p = false;
1062 if (!wi::fits_shwi_p (size) || wi::neg_p (size))
1064 ref->offset = 0;
1065 ref->size = -1;
1066 ref->max_size = -1;
1067 return true;
1070 ref->size = size.to_shwi ();
1072 if (!wi::fits_shwi_p (offset))
1074 ref->offset = 0;
1075 ref->max_size = -1;
1076 return true;
1079 ref->offset = offset.to_shwi ();
1081 if (!wi::fits_shwi_p (max_size) || wi::neg_p (max_size))
1082 ref->max_size = -1;
1083 else
1084 ref->max_size = max_size.to_shwi ();
1086 return true;
1089 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1090 vn_reference_op_s's. */
1092 static void
1093 copy_reference_ops_from_call (gcall *call,
1094 vec<vn_reference_op_s> *result)
1096 vn_reference_op_s temp;
1097 unsigned i;
1098 tree lhs = gimple_call_lhs (call);
1099 int lr;
1101 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1102 different. By adding the lhs here in the vector, we ensure that the
1103 hashcode is different, guaranteeing a different value number. */
1104 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1106 memset (&temp, 0, sizeof (temp));
1107 temp.opcode = MODIFY_EXPR;
1108 temp.type = TREE_TYPE (lhs);
1109 temp.op0 = lhs;
1110 temp.off = -1;
1111 result->safe_push (temp);
1114 /* Copy the type, opcode, function, static chain and EH region, if any. */
1115 memset (&temp, 0, sizeof (temp));
1116 temp.type = gimple_call_return_type (call);
1117 temp.opcode = CALL_EXPR;
1118 temp.op0 = gimple_call_fn (call);
1119 temp.op1 = gimple_call_chain (call);
1120 if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1121 temp.op2 = size_int (lr);
1122 temp.off = -1;
1123 if (gimple_call_with_bounds_p (call))
1124 temp.with_bounds = 1;
1125 result->safe_push (temp);
1127 /* Copy the call arguments. As they can be references as well,
1128 just chain them together. */
1129 for (i = 0; i < gimple_call_num_args (call); ++i)
1131 tree callarg = gimple_call_arg (call, i);
1132 copy_reference_ops_from_ref (callarg, result);
1136 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1137 *I_P to point to the last element of the replacement. */
1138 static bool
1139 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1140 unsigned int *i_p)
1142 unsigned int i = *i_p;
1143 vn_reference_op_t op = &(*ops)[i];
1144 vn_reference_op_t mem_op = &(*ops)[i - 1];
1145 tree addr_base;
1146 HOST_WIDE_INT addr_offset = 0;
1148 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1149 from .foo.bar to the preceding MEM_REF offset and replace the
1150 address with &OBJ. */
1151 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1152 &addr_offset);
1153 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1154 if (addr_base != TREE_OPERAND (op->op0, 0))
1156 offset_int off = offset_int::from (mem_op->op0, SIGNED);
1157 off += addr_offset;
1158 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1159 op->op0 = build_fold_addr_expr (addr_base);
1160 if (tree_fits_shwi_p (mem_op->op0))
1161 mem_op->off = tree_to_shwi (mem_op->op0);
1162 else
1163 mem_op->off = -1;
1164 return true;
1166 return false;
1169 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1170 *I_P to point to the last element of the replacement. */
1171 static bool
1172 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1173 unsigned int *i_p)
1175 unsigned int i = *i_p;
1176 vn_reference_op_t op = &(*ops)[i];
1177 vn_reference_op_t mem_op = &(*ops)[i - 1];
1178 gimple *def_stmt;
1179 enum tree_code code;
1180 offset_int off;
1182 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1183 if (!is_gimple_assign (def_stmt))
1184 return false;
1186 code = gimple_assign_rhs_code (def_stmt);
1187 if (code != ADDR_EXPR
1188 && code != POINTER_PLUS_EXPR)
1189 return false;
1191 off = offset_int::from (mem_op->op0, SIGNED);
1193 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1194 from .foo.bar to the preceding MEM_REF offset and replace the
1195 address with &OBJ. */
1196 if (code == ADDR_EXPR)
1198 tree addr, addr_base;
1199 HOST_WIDE_INT addr_offset;
1201 addr = gimple_assign_rhs1 (def_stmt);
1202 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1203 &addr_offset);
1204 /* If that didn't work because the address isn't invariant propagate
1205 the reference tree from the address operation in case the current
1206 dereference isn't offsetted. */
1207 if (!addr_base
1208 && *i_p == ops->length () - 1
1209 && off == 0
1210 /* This makes us disable this transform for PRE where the
1211 reference ops might be also used for code insertion which
1212 is invalid. */
1213 && default_vn_walk_kind == VN_WALKREWRITE)
1215 auto_vec<vn_reference_op_s, 32> tem;
1216 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1217 ops->pop ();
1218 ops->pop ();
1219 ops->safe_splice (tem);
1220 --*i_p;
1221 return true;
1223 if (!addr_base
1224 || TREE_CODE (addr_base) != MEM_REF)
1225 return false;
1227 off += addr_offset;
1228 off += mem_ref_offset (addr_base);
1229 op->op0 = TREE_OPERAND (addr_base, 0);
1231 else
1233 tree ptr, ptroff;
1234 ptr = gimple_assign_rhs1 (def_stmt);
1235 ptroff = gimple_assign_rhs2 (def_stmt);
1236 if (TREE_CODE (ptr) != SSA_NAME
1237 || TREE_CODE (ptroff) != INTEGER_CST)
1238 return false;
1240 off += wi::to_offset (ptroff);
1241 op->op0 = ptr;
1244 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1245 if (tree_fits_shwi_p (mem_op->op0))
1246 mem_op->off = tree_to_shwi (mem_op->op0);
1247 else
1248 mem_op->off = -1;
1249 if (TREE_CODE (op->op0) == SSA_NAME)
1250 op->op0 = SSA_VAL (op->op0);
1251 if (TREE_CODE (op->op0) != SSA_NAME)
1252 op->opcode = TREE_CODE (op->op0);
1254 /* And recurse. */
1255 if (TREE_CODE (op->op0) == SSA_NAME)
1256 vn_reference_maybe_forwprop_address (ops, i_p);
1257 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1258 vn_reference_fold_indirect (ops, i_p);
1259 return true;
1262 /* Optimize the reference REF to a constant if possible or return
1263 NULL_TREE if not. */
1265 tree
1266 fully_constant_vn_reference_p (vn_reference_t ref)
1268 vec<vn_reference_op_s> operands = ref->operands;
1269 vn_reference_op_t op;
1271 /* Try to simplify the translated expression if it is
1272 a call to a builtin function with at most two arguments. */
1273 op = &operands[0];
1274 if (op->opcode == CALL_EXPR
1275 && TREE_CODE (op->op0) == ADDR_EXPR
1276 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1277 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1278 && operands.length () >= 2
1279 && operands.length () <= 3)
1281 vn_reference_op_t arg0, arg1 = NULL;
1282 bool anyconst = false;
1283 arg0 = &operands[1];
1284 if (operands.length () > 2)
1285 arg1 = &operands[2];
1286 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1287 || (arg0->opcode == ADDR_EXPR
1288 && is_gimple_min_invariant (arg0->op0)))
1289 anyconst = true;
1290 if (arg1
1291 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1292 || (arg1->opcode == ADDR_EXPR
1293 && is_gimple_min_invariant (arg1->op0))))
1294 anyconst = true;
1295 if (anyconst)
1297 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1298 arg1 ? 2 : 1,
1299 arg0->op0,
1300 arg1 ? arg1->op0 : NULL);
1301 if (folded
1302 && TREE_CODE (folded) == NOP_EXPR)
1303 folded = TREE_OPERAND (folded, 0);
1304 if (folded
1305 && is_gimple_min_invariant (folded))
1306 return folded;
1310 /* Simplify reads from constants or constant initializers. */
1311 else if (BITS_PER_UNIT == 8
1312 && is_gimple_reg_type (ref->type)
1313 && (!INTEGRAL_TYPE_P (ref->type)
1314 || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
1316 HOST_WIDE_INT off = 0;
1317 HOST_WIDE_INT size;
1318 if (INTEGRAL_TYPE_P (ref->type))
1319 size = TYPE_PRECISION (ref->type);
1320 else
1321 size = tree_to_shwi (TYPE_SIZE (ref->type));
1322 if (size % BITS_PER_UNIT != 0
1323 || size > MAX_BITSIZE_MODE_ANY_MODE)
1324 return NULL_TREE;
1325 size /= BITS_PER_UNIT;
1326 unsigned i;
1327 for (i = 0; i < operands.length (); ++i)
1329 if (operands[i].off == -1)
1330 return NULL_TREE;
1331 off += operands[i].off;
1332 if (operands[i].opcode == MEM_REF)
1334 ++i;
1335 break;
1338 vn_reference_op_t base = &operands[--i];
1339 tree ctor = error_mark_node;
1340 tree decl = NULL_TREE;
1341 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1342 ctor = base->op0;
1343 else if (base->opcode == MEM_REF
1344 && base[1].opcode == ADDR_EXPR
1345 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1346 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL))
1348 decl = TREE_OPERAND (base[1].op0, 0);
1349 ctor = ctor_for_folding (decl);
1351 if (ctor == NULL_TREE)
1352 return build_zero_cst (ref->type);
1353 else if (ctor != error_mark_node)
1355 if (decl)
1357 tree res = fold_ctor_reference (ref->type, ctor,
1358 off * BITS_PER_UNIT,
1359 size * BITS_PER_UNIT, decl);
1360 if (res)
1362 STRIP_USELESS_TYPE_CONVERSION (res);
1363 if (is_gimple_min_invariant (res))
1364 return res;
1367 else
1369 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1370 if (native_encode_expr (ctor, buf, size, off) > 0)
1371 return native_interpret_expr (ref->type, buf, size);
1376 return NULL_TREE;
1379 /* Return true if OPS contain a storage order barrier. */
1381 static bool
1382 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1384 vn_reference_op_t op;
1385 unsigned i;
1387 FOR_EACH_VEC_ELT (ops, i, op)
1388 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1389 return true;
1391 return false;
1394 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1395 structures into their value numbers. This is done in-place, and
1396 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1397 whether any operands were valueized. */
1399 static vec<vn_reference_op_s>
1400 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1402 vn_reference_op_t vro;
1403 unsigned int i;
1405 *valueized_anything = false;
1407 FOR_EACH_VEC_ELT (orig, i, vro)
1409 if (vro->opcode == SSA_NAME
1410 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1412 tree tem = SSA_VAL (vro->op0);
1413 if (tem != vro->op0)
1415 *valueized_anything = true;
1416 vro->op0 = tem;
1418 /* If it transforms from an SSA_NAME to a constant, update
1419 the opcode. */
1420 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1421 vro->opcode = TREE_CODE (vro->op0);
1423 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1425 tree tem = SSA_VAL (vro->op1);
1426 if (tem != vro->op1)
1428 *valueized_anything = true;
1429 vro->op1 = tem;
1432 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1434 tree tem = SSA_VAL (vro->op2);
1435 if (tem != vro->op2)
1437 *valueized_anything = true;
1438 vro->op2 = tem;
1441 /* If it transforms from an SSA_NAME to an address, fold with
1442 a preceding indirect reference. */
1443 if (i > 0
1444 && vro->op0
1445 && TREE_CODE (vro->op0) == ADDR_EXPR
1446 && orig[i - 1].opcode == MEM_REF)
1448 if (vn_reference_fold_indirect (&orig, &i))
1449 *valueized_anything = true;
1451 else if (i > 0
1452 && vro->opcode == SSA_NAME
1453 && orig[i - 1].opcode == MEM_REF)
1455 if (vn_reference_maybe_forwprop_address (&orig, &i))
1456 *valueized_anything = true;
1458 /* If it transforms a non-constant ARRAY_REF into a constant
1459 one, adjust the constant offset. */
1460 else if (vro->opcode == ARRAY_REF
1461 && vro->off == -1
1462 && TREE_CODE (vro->op0) == INTEGER_CST
1463 && TREE_CODE (vro->op1) == INTEGER_CST
1464 && TREE_CODE (vro->op2) == INTEGER_CST)
1466 offset_int off = ((wi::to_offset (vro->op0)
1467 - wi::to_offset (vro->op1))
1468 * wi::to_offset (vro->op2));
1469 if (wi::fits_shwi_p (off))
1470 vro->off = off.to_shwi ();
1474 return orig;
1477 static vec<vn_reference_op_s>
1478 valueize_refs (vec<vn_reference_op_s> orig)
1480 bool tem;
1481 return valueize_refs_1 (orig, &tem);
1484 static vec<vn_reference_op_s> shared_lookup_references;
1486 /* Create a vector of vn_reference_op_s structures from REF, a
1487 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1488 this function. *VALUEIZED_ANYTHING will specify whether any
1489 operands were valueized. */
1491 static vec<vn_reference_op_s>
1492 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1494 if (!ref)
1495 return vNULL;
1496 shared_lookup_references.truncate (0);
1497 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1498 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1499 valueized_anything);
1500 return shared_lookup_references;
1503 /* Create a vector of vn_reference_op_s structures from CALL, a
1504 call statement. The vector is shared among all callers of
1505 this function. */
1507 static vec<vn_reference_op_s>
1508 valueize_shared_reference_ops_from_call (gcall *call)
1510 if (!call)
1511 return vNULL;
1512 shared_lookup_references.truncate (0);
1513 copy_reference_ops_from_call (call, &shared_lookup_references);
1514 shared_lookup_references = valueize_refs (shared_lookup_references);
1515 return shared_lookup_references;
1518 /* Lookup a SCCVN reference operation VR in the current hash table.
1519 Returns the resulting value number if it exists in the hash table,
1520 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1521 vn_reference_t stored in the hashtable if something is found. */
1523 static tree
1524 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1526 vn_reference_s **slot;
1527 hashval_t hash;
1529 hash = vr->hashcode;
1530 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1531 if (!slot && current_info == optimistic_info)
1532 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1533 if (slot)
1535 if (vnresult)
1536 *vnresult = (vn_reference_t)*slot;
1537 return ((vn_reference_t)*slot)->result;
1540 return NULL_TREE;
1543 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1544 with the current VUSE and performs the expression lookup. */
1546 static void *
1547 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1548 unsigned int cnt, void *vr_)
1550 vn_reference_t vr = (vn_reference_t)vr_;
1551 vn_reference_s **slot;
1552 hashval_t hash;
1554 /* This bounds the stmt walks we perform on reference lookups
1555 to O(1) instead of O(N) where N is the number of dominating
1556 stores. */
1557 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1558 return (void *)-1;
1560 if (last_vuse_ptr)
1561 *last_vuse_ptr = vuse;
1563 /* Fixup vuse and hash. */
1564 if (vr->vuse)
1565 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1566 vr->vuse = vuse_ssa_val (vuse);
1567 if (vr->vuse)
1568 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1570 hash = vr->hashcode;
1571 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1572 if (!slot && current_info == optimistic_info)
1573 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1574 if (slot)
1575 return *slot;
1577 return NULL;
1580 /* Lookup an existing or insert a new vn_reference entry into the
1581 value table for the VUSE, SET, TYPE, OPERANDS reference which
1582 has the value VALUE which is either a constant or an SSA name. */
1584 static vn_reference_t
1585 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1586 alias_set_type set,
1587 tree type,
1588 vec<vn_reference_op_s,
1589 va_heap> operands,
1590 tree value)
1592 vn_reference_s vr1;
1593 vn_reference_t result;
1594 unsigned value_id;
1595 vr1.vuse = vuse;
1596 vr1.operands = operands;
1597 vr1.type = type;
1598 vr1.set = set;
1599 vr1.hashcode = vn_reference_compute_hash (&vr1);
1600 if (vn_reference_lookup_1 (&vr1, &result))
1601 return result;
1602 if (TREE_CODE (value) == SSA_NAME)
1603 value_id = VN_INFO (value)->value_id;
1604 else
1605 value_id = get_or_alloc_constant_value_id (value);
1606 return vn_reference_insert_pieces (vuse, set, type,
1607 operands.copy (), value, value_id);
1610 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1611 from the statement defining VUSE and if not successful tries to
1612 translate *REFP and VR_ through an aggregate copy at the definition
1613 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
1614 of *REF and *VR. If only disambiguation was performed then
1615 *DISAMBIGUATE_ONLY is set to true. */
1617 static void *
1618 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1619 bool *disambiguate_only)
1621 vn_reference_t vr = (vn_reference_t)vr_;
1622 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
1623 tree base = ao_ref_base (ref);
1624 HOST_WIDE_INT offset, maxsize;
1625 static vec<vn_reference_op_s>
1626 lhs_ops = vNULL;
1627 ao_ref lhs_ref;
1628 bool lhs_ref_ok = false;
1630 /* If the reference is based on a parameter that was determined as
1631 pointing to readonly memory it doesn't change. */
1632 if (TREE_CODE (base) == MEM_REF
1633 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
1634 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
1635 && bitmap_bit_p (const_parms,
1636 SSA_NAME_VERSION (TREE_OPERAND (base, 0))))
1638 *disambiguate_only = true;
1639 return NULL;
1642 /* First try to disambiguate after value-replacing in the definitions LHS. */
1643 if (is_gimple_assign (def_stmt))
1645 tree lhs = gimple_assign_lhs (def_stmt);
1646 bool valueized_anything = false;
1647 /* Avoid re-allocation overhead. */
1648 lhs_ops.truncate (0);
1649 copy_reference_ops_from_ref (lhs, &lhs_ops);
1650 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1651 if (valueized_anything)
1653 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1654 get_alias_set (lhs),
1655 TREE_TYPE (lhs), lhs_ops);
1656 if (lhs_ref_ok
1657 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1659 *disambiguate_only = true;
1660 return NULL;
1663 else
1665 ao_ref_init (&lhs_ref, lhs);
1666 lhs_ref_ok = true;
1669 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1670 && gimple_call_num_args (def_stmt) <= 4)
1672 /* For builtin calls valueize its arguments and call the
1673 alias oracle again. Valueization may improve points-to
1674 info of pointers and constify size and position arguments.
1675 Originally this was motivated by PR61034 which has
1676 conditional calls to free falsely clobbering ref because
1677 of imprecise points-to info of the argument. */
1678 tree oldargs[4];
1679 bool valueized_anything = false;
1680 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1682 oldargs[i] = gimple_call_arg (def_stmt, i);
1683 if (TREE_CODE (oldargs[i]) == SSA_NAME
1684 && VN_INFO (oldargs[i])->valnum != oldargs[i])
1686 gimple_call_set_arg (def_stmt, i, VN_INFO (oldargs[i])->valnum);
1687 valueized_anything = true;
1690 if (valueized_anything)
1692 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1693 ref);
1694 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1695 gimple_call_set_arg (def_stmt, i, oldargs[i]);
1696 if (!res)
1698 *disambiguate_only = true;
1699 return NULL;
1704 if (*disambiguate_only)
1705 return (void *)-1;
1707 offset = ref->offset;
1708 maxsize = ref->max_size;
1710 /* If we cannot constrain the size of the reference we cannot
1711 test if anything kills it. */
1712 if (maxsize == -1)
1713 return (void *)-1;
1715 /* We can't deduce anything useful from clobbers. */
1716 if (gimple_clobber_p (def_stmt))
1717 return (void *)-1;
1719 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1720 from that definition.
1721 1) Memset. */
1722 if (is_gimple_reg_type (vr->type)
1723 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1724 && integer_zerop (gimple_call_arg (def_stmt, 1))
1725 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2))
1726 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1728 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1729 tree base2;
1730 HOST_WIDE_INT offset2, size2, maxsize2;
1731 bool reverse;
1732 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
1733 &reverse);
1734 size2 = tree_to_uhwi (gimple_call_arg (def_stmt, 2)) * 8;
1735 if ((unsigned HOST_WIDE_INT)size2 / 8
1736 == tree_to_uhwi (gimple_call_arg (def_stmt, 2))
1737 && maxsize2 != -1
1738 && operand_equal_p (base, base2, 0)
1739 && offset2 <= offset
1740 && offset2 + size2 >= offset + maxsize)
1742 tree val = build_zero_cst (vr->type);
1743 return vn_reference_lookup_or_insert_for_pieces
1744 (vuse, vr->set, vr->type, vr->operands, val);
1748 /* 2) Assignment from an empty CONSTRUCTOR. */
1749 else if (is_gimple_reg_type (vr->type)
1750 && gimple_assign_single_p (def_stmt)
1751 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1752 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1754 tree base2;
1755 HOST_WIDE_INT offset2, size2, maxsize2;
1756 bool reverse;
1757 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1758 &offset2, &size2, &maxsize2, &reverse);
1759 if (maxsize2 != -1
1760 && operand_equal_p (base, base2, 0)
1761 && offset2 <= offset
1762 && offset2 + size2 >= offset + maxsize)
1764 tree val = build_zero_cst (vr->type);
1765 return vn_reference_lookup_or_insert_for_pieces
1766 (vuse, vr->set, vr->type, vr->operands, val);
1770 /* 3) Assignment from a constant. We can use folds native encode/interpret
1771 routines to extract the assigned bits. */
1772 else if (vn_walk_kind == VN_WALKREWRITE
1773 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1774 && ref->size == maxsize
1775 && maxsize % BITS_PER_UNIT == 0
1776 && offset % BITS_PER_UNIT == 0
1777 && is_gimple_reg_type (vr->type)
1778 && !contains_storage_order_barrier_p (vr->operands)
1779 && gimple_assign_single_p (def_stmt)
1780 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
1782 tree base2;
1783 HOST_WIDE_INT offset2, size2, maxsize2;
1784 bool reverse;
1785 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1786 &offset2, &size2, &maxsize2, &reverse);
1787 if (!reverse
1788 && maxsize2 != -1
1789 && maxsize2 == size2
1790 && size2 % BITS_PER_UNIT == 0
1791 && offset2 % BITS_PER_UNIT == 0
1792 && operand_equal_p (base, base2, 0)
1793 && offset2 <= offset
1794 && offset2 + size2 >= offset + maxsize)
1796 /* We support up to 512-bit values (for V8DFmode). */
1797 unsigned char buffer[64];
1798 int len;
1800 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1801 buffer, sizeof (buffer));
1802 if (len > 0)
1804 tree val = native_interpret_expr (vr->type,
1805 buffer
1806 + ((offset - offset2)
1807 / BITS_PER_UNIT),
1808 ref->size / BITS_PER_UNIT);
1809 if (val)
1810 return vn_reference_lookup_or_insert_for_pieces
1811 (vuse, vr->set, vr->type, vr->operands, val);
1816 /* 4) Assignment from an SSA name which definition we may be able
1817 to access pieces from. */
1818 else if (ref->size == maxsize
1819 && is_gimple_reg_type (vr->type)
1820 && !contains_storage_order_barrier_p (vr->operands)
1821 && gimple_assign_single_p (def_stmt)
1822 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
1824 tree rhs1 = gimple_assign_rhs1 (def_stmt);
1825 gimple *def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
1826 if (is_gimple_assign (def_stmt2)
1827 && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
1828 || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
1829 && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1))))
1831 tree base2;
1832 HOST_WIDE_INT offset2, size2, maxsize2, off;
1833 bool reverse;
1834 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1835 &offset2, &size2, &maxsize2,
1836 &reverse);
1837 off = offset - offset2;
1838 if (!reverse
1839 && maxsize2 != -1
1840 && maxsize2 == size2
1841 && operand_equal_p (base, base2, 0)
1842 && offset2 <= offset
1843 && offset2 + size2 >= offset + maxsize)
1845 tree val = NULL_TREE;
1846 HOST_WIDE_INT elsz
1847 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1))));
1848 if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR)
1850 if (off == 0)
1851 val = gimple_assign_rhs1 (def_stmt2);
1852 else if (off == elsz)
1853 val = gimple_assign_rhs2 (def_stmt2);
1855 else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR
1856 && off % elsz == 0)
1858 tree ctor = gimple_assign_rhs1 (def_stmt2);
1859 unsigned i = off / elsz;
1860 if (i < CONSTRUCTOR_NELTS (ctor))
1862 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
1863 if (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
1865 if (TREE_CODE (TREE_TYPE (elt->value))
1866 != VECTOR_TYPE)
1867 val = elt->value;
1871 if (val)
1872 return vn_reference_lookup_or_insert_for_pieces
1873 (vuse, vr->set, vr->type, vr->operands, val);
1878 /* 5) For aggregate copies translate the reference through them if
1879 the copy kills ref. */
1880 else if (vn_walk_kind == VN_WALKREWRITE
1881 && gimple_assign_single_p (def_stmt)
1882 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1883 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1884 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1886 tree base2;
1887 HOST_WIDE_INT maxsize2;
1888 int i, j, k;
1889 auto_vec<vn_reference_op_s> rhs;
1890 vn_reference_op_t vro;
1891 ao_ref r;
1893 if (!lhs_ref_ok)
1894 return (void *)-1;
1896 /* See if the assignment kills REF. */
1897 base2 = ao_ref_base (&lhs_ref);
1898 maxsize2 = lhs_ref.max_size;
1899 if (maxsize2 == -1
1900 || (base != base2
1901 && (TREE_CODE (base) != MEM_REF
1902 || TREE_CODE (base2) != MEM_REF
1903 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
1904 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
1905 TREE_OPERAND (base2, 1))))
1906 || !stmt_kills_ref_p (def_stmt, ref))
1907 return (void *)-1;
1909 /* Find the common base of ref and the lhs. lhs_ops already
1910 contains valueized operands for the lhs. */
1911 i = vr->operands.length () - 1;
1912 j = lhs_ops.length () - 1;
1913 while (j >= 0 && i >= 0
1914 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
1916 i--;
1917 j--;
1920 /* ??? The innermost op should always be a MEM_REF and we already
1921 checked that the assignment to the lhs kills vr. Thus for
1922 aggregate copies using char[] types the vn_reference_op_eq
1923 may fail when comparing types for compatibility. But we really
1924 don't care here - further lookups with the rewritten operands
1925 will simply fail if we messed up types too badly. */
1926 HOST_WIDE_INT extra_off = 0;
1927 if (j == 0 && i >= 0
1928 && lhs_ops[0].opcode == MEM_REF
1929 && lhs_ops[0].off != -1)
1931 if (lhs_ops[0].off == vr->operands[i].off)
1932 i--, j--;
1933 else if (vr->operands[i].opcode == MEM_REF
1934 && vr->operands[i].off != -1)
1936 extra_off = vr->operands[i].off - lhs_ops[0].off;
1937 i--, j--;
1941 /* i now points to the first additional op.
1942 ??? LHS may not be completely contained in VR, one or more
1943 VIEW_CONVERT_EXPRs could be in its way. We could at least
1944 try handling outermost VIEW_CONVERT_EXPRs. */
1945 if (j != -1)
1946 return (void *)-1;
1948 /* Punt if the additional ops contain a storage order barrier. */
1949 for (k = i; k >= 0; k--)
1951 vro = &vr->operands[k];
1952 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
1953 return (void *)-1;
1956 /* Now re-write REF to be based on the rhs of the assignment. */
1957 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1959 /* Apply an extra offset to the inner MEM_REF of the RHS. */
1960 if (extra_off != 0)
1962 if (rhs.length () < 2
1963 || rhs[0].opcode != MEM_REF
1964 || rhs[0].off == -1)
1965 return (void *)-1;
1966 rhs[0].off += extra_off;
1967 rhs[0].op0 = int_const_binop (PLUS_EXPR, rhs[0].op0,
1968 build_int_cst (TREE_TYPE (rhs[0].op0),
1969 extra_off));
1972 /* We need to pre-pend vr->operands[0..i] to rhs. */
1973 vec<vn_reference_op_s> old = vr->operands;
1974 if (i + 1 + rhs.length () > vr->operands.length ())
1976 vr->operands.safe_grow (i + 1 + rhs.length ());
1977 if (old == shared_lookup_references)
1978 shared_lookup_references = vr->operands;
1980 else
1981 vr->operands.truncate (i + 1 + rhs.length ());
1982 FOR_EACH_VEC_ELT (rhs, j, vro)
1983 vr->operands[i + 1 + j] = *vro;
1984 vr->operands = valueize_refs (vr->operands);
1985 if (old == shared_lookup_references)
1986 shared_lookup_references = vr->operands;
1987 vr->hashcode = vn_reference_compute_hash (vr);
1989 /* Try folding the new reference to a constant. */
1990 tree val = fully_constant_vn_reference_p (vr);
1991 if (val)
1992 return vn_reference_lookup_or_insert_for_pieces
1993 (vuse, vr->set, vr->type, vr->operands, val);
1995 /* Adjust *ref from the new operands. */
1996 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1997 return (void *)-1;
1998 /* This can happen with bitfields. */
1999 if (ref->size != r.size)
2000 return (void *)-1;
2001 *ref = r;
2003 /* Do not update last seen VUSE after translating. */
2004 last_vuse_ptr = NULL;
2006 /* Keep looking for the adjusted *REF / VR pair. */
2007 return NULL;
2010 /* 6) For memcpy copies translate the reference through them if
2011 the copy kills ref. */
2012 else if (vn_walk_kind == VN_WALKREWRITE
2013 && is_gimple_reg_type (vr->type)
2014 /* ??? Handle BCOPY as well. */
2015 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2016 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2017 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2018 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2019 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2020 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2021 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2022 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2)))
2024 tree lhs, rhs;
2025 ao_ref r;
2026 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
2027 vn_reference_op_s op;
2028 HOST_WIDE_INT at;
2030 /* Only handle non-variable, addressable refs. */
2031 if (ref->size != maxsize
2032 || offset % BITS_PER_UNIT != 0
2033 || ref->size % BITS_PER_UNIT != 0)
2034 return (void *)-1;
2036 /* Extract a pointer base and an offset for the destination. */
2037 lhs = gimple_call_arg (def_stmt, 0);
2038 lhs_offset = 0;
2039 if (TREE_CODE (lhs) == SSA_NAME)
2041 lhs = SSA_VAL (lhs);
2042 if (TREE_CODE (lhs) == SSA_NAME)
2044 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
2045 if (gimple_assign_single_p (def_stmt)
2046 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2047 lhs = gimple_assign_rhs1 (def_stmt);
2050 if (TREE_CODE (lhs) == ADDR_EXPR)
2052 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2053 &lhs_offset);
2054 if (!tem)
2055 return (void *)-1;
2056 if (TREE_CODE (tem) == MEM_REF
2057 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2059 lhs = TREE_OPERAND (tem, 0);
2060 if (TREE_CODE (lhs) == SSA_NAME)
2061 lhs = SSA_VAL (lhs);
2062 lhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2064 else if (DECL_P (tem))
2065 lhs = build_fold_addr_expr (tem);
2066 else
2067 return (void *)-1;
2069 if (TREE_CODE (lhs) != SSA_NAME
2070 && TREE_CODE (lhs) != ADDR_EXPR)
2071 return (void *)-1;
2073 /* Extract a pointer base and an offset for the source. */
2074 rhs = gimple_call_arg (def_stmt, 1);
2075 rhs_offset = 0;
2076 if (TREE_CODE (rhs) == SSA_NAME)
2077 rhs = SSA_VAL (rhs);
2078 if (TREE_CODE (rhs) == ADDR_EXPR)
2080 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2081 &rhs_offset);
2082 if (!tem)
2083 return (void *)-1;
2084 if (TREE_CODE (tem) == MEM_REF
2085 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2087 rhs = TREE_OPERAND (tem, 0);
2088 rhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2090 else if (DECL_P (tem))
2091 rhs = build_fold_addr_expr (tem);
2092 else
2093 return (void *)-1;
2095 if (TREE_CODE (rhs) != SSA_NAME
2096 && TREE_CODE (rhs) != ADDR_EXPR)
2097 return (void *)-1;
2099 copy_size = tree_to_uhwi (gimple_call_arg (def_stmt, 2));
2101 /* The bases of the destination and the references have to agree. */
2102 if ((TREE_CODE (base) != MEM_REF
2103 && !DECL_P (base))
2104 || (TREE_CODE (base) == MEM_REF
2105 && (TREE_OPERAND (base, 0) != lhs
2106 || !tree_fits_uhwi_p (TREE_OPERAND (base, 1))))
2107 || (DECL_P (base)
2108 && (TREE_CODE (lhs) != ADDR_EXPR
2109 || TREE_OPERAND (lhs, 0) != base)))
2110 return (void *)-1;
2112 at = offset / BITS_PER_UNIT;
2113 if (TREE_CODE (base) == MEM_REF)
2114 at += tree_to_uhwi (TREE_OPERAND (base, 1));
2115 /* If the access is completely outside of the memcpy destination
2116 area there is no aliasing. */
2117 if (lhs_offset >= at + maxsize / BITS_PER_UNIT
2118 || lhs_offset + copy_size <= at)
2119 return NULL;
2120 /* And the access has to be contained within the memcpy destination. */
2121 if (lhs_offset > at
2122 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
2123 return (void *)-1;
2125 /* Make room for 2 operands in the new reference. */
2126 if (vr->operands.length () < 2)
2128 vec<vn_reference_op_s> old = vr->operands;
2129 vr->operands.safe_grow_cleared (2);
2130 if (old == shared_lookup_references
2131 && vr->operands != old)
2132 shared_lookup_references = vr->operands;
2134 else
2135 vr->operands.truncate (2);
2137 /* The looked-through reference is a simple MEM_REF. */
2138 memset (&op, 0, sizeof (op));
2139 op.type = vr->type;
2140 op.opcode = MEM_REF;
2141 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
2142 op.off = at - lhs_offset + rhs_offset;
2143 vr->operands[0] = op;
2144 op.type = TREE_TYPE (rhs);
2145 op.opcode = TREE_CODE (rhs);
2146 op.op0 = rhs;
2147 op.off = -1;
2148 vr->operands[1] = op;
2149 vr->hashcode = vn_reference_compute_hash (vr);
2151 /* Adjust *ref from the new operands. */
2152 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2153 return (void *)-1;
2154 /* This can happen with bitfields. */
2155 if (ref->size != r.size)
2156 return (void *)-1;
2157 *ref = r;
2159 /* Do not update last seen VUSE after translating. */
2160 last_vuse_ptr = NULL;
2162 /* Keep looking for the adjusted *REF / VR pair. */
2163 return NULL;
2166 /* Bail out and stop walking. */
2167 return (void *)-1;
2170 /* Lookup a reference operation by it's parts, in the current hash table.
2171 Returns the resulting value number if it exists in the hash table,
2172 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2173 vn_reference_t stored in the hashtable if something is found. */
2175 tree
2176 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2177 vec<vn_reference_op_s> operands,
2178 vn_reference_t *vnresult, vn_lookup_kind kind)
2180 struct vn_reference_s vr1;
2181 vn_reference_t tmp;
2182 tree cst;
2184 if (!vnresult)
2185 vnresult = &tmp;
2186 *vnresult = NULL;
2188 vr1.vuse = vuse_ssa_val (vuse);
2189 shared_lookup_references.truncate (0);
2190 shared_lookup_references.safe_grow (operands.length ());
2191 memcpy (shared_lookup_references.address (),
2192 operands.address (),
2193 sizeof (vn_reference_op_s)
2194 * operands.length ());
2195 vr1.operands = operands = shared_lookup_references
2196 = valueize_refs (shared_lookup_references);
2197 vr1.type = type;
2198 vr1.set = set;
2199 vr1.hashcode = vn_reference_compute_hash (&vr1);
2200 if ((cst = fully_constant_vn_reference_p (&vr1)))
2201 return cst;
2203 vn_reference_lookup_1 (&vr1, vnresult);
2204 if (!*vnresult
2205 && kind != VN_NOWALK
2206 && vr1.vuse)
2208 ao_ref r;
2209 vn_walk_kind = kind;
2210 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2211 *vnresult =
2212 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2213 vn_reference_lookup_2,
2214 vn_reference_lookup_3,
2215 vuse_ssa_val, &vr1);
2216 gcc_checking_assert (vr1.operands == shared_lookup_references);
2219 if (*vnresult)
2220 return (*vnresult)->result;
2222 return NULL_TREE;
2225 /* Lookup OP in the current hash table, and return the resulting value
2226 number if it exists in the hash table. Return NULL_TREE if it does
2227 not exist in the hash table or if the result field of the structure
2228 was NULL.. VNRESULT will be filled in with the vn_reference_t
2229 stored in the hashtable if one exists. */
2231 tree
2232 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2233 vn_reference_t *vnresult)
2235 vec<vn_reference_op_s> operands;
2236 struct vn_reference_s vr1;
2237 tree cst;
2238 bool valuezied_anything;
2240 if (vnresult)
2241 *vnresult = NULL;
2243 vr1.vuse = vuse_ssa_val (vuse);
2244 vr1.operands = operands
2245 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2246 vr1.type = TREE_TYPE (op);
2247 vr1.set = get_alias_set (op);
2248 vr1.hashcode = vn_reference_compute_hash (&vr1);
2249 if ((cst = fully_constant_vn_reference_p (&vr1)))
2250 return cst;
2252 if (kind != VN_NOWALK
2253 && vr1.vuse)
2255 vn_reference_t wvnresult;
2256 ao_ref r;
2257 /* Make sure to use a valueized reference if we valueized anything.
2258 Otherwise preserve the full reference for advanced TBAA. */
2259 if (!valuezied_anything
2260 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2261 vr1.operands))
2262 ao_ref_init (&r, op);
2263 vn_walk_kind = kind;
2264 wvnresult =
2265 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2266 vn_reference_lookup_2,
2267 vn_reference_lookup_3,
2268 vuse_ssa_val, &vr1);
2269 gcc_checking_assert (vr1.operands == shared_lookup_references);
2270 if (wvnresult)
2272 if (vnresult)
2273 *vnresult = wvnresult;
2274 return wvnresult->result;
2277 return NULL_TREE;
2280 return vn_reference_lookup_1 (&vr1, vnresult);
2283 /* Lookup CALL in the current hash table and return the entry in
2284 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2286 void
2287 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2288 vn_reference_t vr)
2290 if (vnresult)
2291 *vnresult = NULL;
2293 tree vuse = gimple_vuse (call);
2295 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2296 vr->operands = valueize_shared_reference_ops_from_call (call);
2297 vr->type = gimple_expr_type (call);
2298 vr->set = 0;
2299 vr->hashcode = vn_reference_compute_hash (vr);
2300 vn_reference_lookup_1 (vr, vnresult);
2303 /* Insert OP into the current hash table with a value number of
2304 RESULT, and return the resulting reference structure we created. */
2306 static vn_reference_t
2307 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2309 vn_reference_s **slot;
2310 vn_reference_t vr1;
2311 bool tem;
2313 vr1 = current_info->references_pool->allocate ();
2314 if (TREE_CODE (result) == SSA_NAME)
2315 vr1->value_id = VN_INFO (result)->value_id;
2316 else
2317 vr1->value_id = get_or_alloc_constant_value_id (result);
2318 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2319 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2320 vr1->type = TREE_TYPE (op);
2321 vr1->set = get_alias_set (op);
2322 vr1->hashcode = vn_reference_compute_hash (vr1);
2323 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2324 vr1->result_vdef = vdef;
2326 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2327 INSERT);
2329 /* Because we lookup stores using vuses, and value number failures
2330 using the vdefs (see visit_reference_op_store for how and why),
2331 it's possible that on failure we may try to insert an already
2332 inserted store. This is not wrong, there is no ssa name for a
2333 store that we could use as a differentiator anyway. Thus, unlike
2334 the other lookup functions, you cannot gcc_assert (!*slot)
2335 here. */
2337 /* But free the old slot in case of a collision. */
2338 if (*slot)
2339 free_reference (*slot);
2341 *slot = vr1;
2342 return vr1;
2345 /* Insert a reference by it's pieces into the current hash table with
2346 a value number of RESULT. Return the resulting reference
2347 structure we created. */
2349 vn_reference_t
2350 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2351 vec<vn_reference_op_s> operands,
2352 tree result, unsigned int value_id)
2355 vn_reference_s **slot;
2356 vn_reference_t vr1;
2358 vr1 = current_info->references_pool->allocate ();
2359 vr1->value_id = value_id;
2360 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2361 vr1->operands = valueize_refs (operands);
2362 vr1->type = type;
2363 vr1->set = set;
2364 vr1->hashcode = vn_reference_compute_hash (vr1);
2365 if (result && TREE_CODE (result) == SSA_NAME)
2366 result = SSA_VAL (result);
2367 vr1->result = result;
2369 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2370 INSERT);
2372 /* At this point we should have all the things inserted that we have
2373 seen before, and we should never try inserting something that
2374 already exists. */
2375 gcc_assert (!*slot);
2376 if (*slot)
2377 free_reference (*slot);
2379 *slot = vr1;
2380 return vr1;
2383 /* Compute and return the hash value for nary operation VBO1. */
2385 static hashval_t
2386 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2388 inchash::hash hstate;
2389 unsigned i;
2391 for (i = 0; i < vno1->length; ++i)
2392 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2393 vno1->op[i] = SSA_VAL (vno1->op[i]);
2395 if (((vno1->length == 2
2396 && commutative_tree_code (vno1->opcode))
2397 || (vno1->length == 3
2398 && commutative_ternary_tree_code (vno1->opcode)))
2399 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2400 std::swap (vno1->op[0], vno1->op[1]);
2401 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
2402 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2404 std::swap (vno1->op[0], vno1->op[1]);
2405 vno1->opcode = swap_tree_comparison (vno1->opcode);
2408 hstate.add_int (vno1->opcode);
2409 for (i = 0; i < vno1->length; ++i)
2410 inchash::add_expr (vno1->op[i], hstate);
2412 return hstate.end ();
2415 /* Compare nary operations VNO1 and VNO2 and return true if they are
2416 equivalent. */
2418 bool
2419 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2421 unsigned i;
2423 if (vno1->hashcode != vno2->hashcode)
2424 return false;
2426 if (vno1->length != vno2->length)
2427 return false;
2429 if (vno1->opcode != vno2->opcode
2430 || !types_compatible_p (vno1->type, vno2->type))
2431 return false;
2433 for (i = 0; i < vno1->length; ++i)
2434 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2435 return false;
2437 return true;
2440 /* Initialize VNO from the pieces provided. */
2442 static void
2443 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2444 enum tree_code code, tree type, tree *ops)
2446 vno->opcode = code;
2447 vno->length = length;
2448 vno->type = type;
2449 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2452 /* Initialize VNO from OP. */
2454 static void
2455 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2457 unsigned i;
2459 vno->opcode = TREE_CODE (op);
2460 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2461 vno->type = TREE_TYPE (op);
2462 for (i = 0; i < vno->length; ++i)
2463 vno->op[i] = TREE_OPERAND (op, i);
2466 /* Return the number of operands for a vn_nary ops structure from STMT. */
2468 static unsigned int
2469 vn_nary_length_from_stmt (gimple *stmt)
2471 switch (gimple_assign_rhs_code (stmt))
2473 case REALPART_EXPR:
2474 case IMAGPART_EXPR:
2475 case VIEW_CONVERT_EXPR:
2476 return 1;
2478 case BIT_FIELD_REF:
2479 return 3;
2481 case CONSTRUCTOR:
2482 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2484 default:
2485 return gimple_num_ops (stmt) - 1;
2489 /* Initialize VNO from STMT. */
2491 static void
2492 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
2494 unsigned i;
2496 vno->opcode = gimple_assign_rhs_code (stmt);
2497 vno->type = gimple_expr_type (stmt);
2498 switch (vno->opcode)
2500 case REALPART_EXPR:
2501 case IMAGPART_EXPR:
2502 case VIEW_CONVERT_EXPR:
2503 vno->length = 1;
2504 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2505 break;
2507 case BIT_FIELD_REF:
2508 vno->length = 3;
2509 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2510 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2511 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2512 break;
2514 case CONSTRUCTOR:
2515 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2516 for (i = 0; i < vno->length; ++i)
2517 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2518 break;
2520 default:
2521 gcc_checking_assert (!gimple_assign_single_p (stmt));
2522 vno->length = gimple_num_ops (stmt) - 1;
2523 for (i = 0; i < vno->length; ++i)
2524 vno->op[i] = gimple_op (stmt, i + 1);
2528 /* Compute the hashcode for VNO and look for it in the hash table;
2529 return the resulting value number if it exists in the hash table.
2530 Return NULL_TREE if it does not exist in the hash table or if the
2531 result field of the operation is NULL. VNRESULT will contain the
2532 vn_nary_op_t from the hashtable if it exists. */
2534 static tree
2535 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2537 vn_nary_op_s **slot;
2539 if (vnresult)
2540 *vnresult = NULL;
2542 vno->hashcode = vn_nary_op_compute_hash (vno);
2543 slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2544 NO_INSERT);
2545 if (!slot && current_info == optimistic_info)
2546 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2547 NO_INSERT);
2548 if (!slot)
2549 return NULL_TREE;
2550 if (vnresult)
2551 *vnresult = *slot;
2552 return (*slot)->result;
2555 /* Lookup a n-ary operation by its pieces and return the resulting value
2556 number if it exists in the hash table. Return NULL_TREE if it does
2557 not exist in the hash table or if the result field of the operation
2558 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2559 if it exists. */
2561 tree
2562 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2563 tree type, tree *ops, vn_nary_op_t *vnresult)
2565 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2566 sizeof_vn_nary_op (length));
2567 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2568 return vn_nary_op_lookup_1 (vno1, vnresult);
2571 /* Lookup OP in the current hash table, and return the resulting value
2572 number if it exists in the hash table. Return NULL_TREE if it does
2573 not exist in the hash table or if the result field of the operation
2574 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2575 if it exists. */
2577 tree
2578 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2580 vn_nary_op_t vno1
2581 = XALLOCAVAR (struct vn_nary_op_s,
2582 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2583 init_vn_nary_op_from_op (vno1, op);
2584 return vn_nary_op_lookup_1 (vno1, vnresult);
2587 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2588 value number if it exists in the hash table. Return NULL_TREE if
2589 it does not exist in the hash table. VNRESULT will contain the
2590 vn_nary_op_t from the hashtable if it exists. */
2592 tree
2593 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
2595 vn_nary_op_t vno1
2596 = XALLOCAVAR (struct vn_nary_op_s,
2597 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2598 init_vn_nary_op_from_stmt (vno1, stmt);
2599 return vn_nary_op_lookup_1 (vno1, vnresult);
2602 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
2604 static tree
2605 vn_lookup_simplify_result (code_helper rcode, tree type, tree *ops)
2607 if (!rcode.is_tree_code ())
2608 return NULL_TREE;
2609 vn_nary_op_t vnresult = NULL;
2610 return vn_nary_op_lookup_pieces (TREE_CODE_LENGTH ((tree_code) rcode),
2611 (tree_code) rcode, type, ops, &vnresult);
2614 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2616 static vn_nary_op_t
2617 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2619 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2622 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2623 obstack. */
2625 static vn_nary_op_t
2626 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2628 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2629 &current_info->nary_obstack);
2631 vno1->value_id = value_id;
2632 vno1->length = length;
2633 vno1->result = result;
2635 return vno1;
2638 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2639 VNO->HASHCODE first. */
2641 static vn_nary_op_t
2642 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
2643 bool compute_hash)
2645 vn_nary_op_s **slot;
2647 if (compute_hash)
2648 vno->hashcode = vn_nary_op_compute_hash (vno);
2650 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
2651 gcc_assert (!*slot);
2653 *slot = vno;
2654 return vno;
2657 /* Insert a n-ary operation into the current hash table using it's
2658 pieces. Return the vn_nary_op_t structure we created and put in
2659 the hashtable. */
2661 vn_nary_op_t
2662 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2663 tree type, tree *ops,
2664 tree result, unsigned int value_id)
2666 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2667 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2668 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2671 /* Insert OP into the current hash table with a value number of
2672 RESULT. Return the vn_nary_op_t structure we created and put in
2673 the hashtable. */
2675 vn_nary_op_t
2676 vn_nary_op_insert (tree op, tree result)
2678 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2679 vn_nary_op_t vno1;
2681 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2682 init_vn_nary_op_from_op (vno1, op);
2683 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2686 /* Insert the rhs of STMT into the current hash table with a value number of
2687 RESULT. */
2689 static vn_nary_op_t
2690 vn_nary_op_insert_stmt (gimple *stmt, tree result)
2692 vn_nary_op_t vno1
2693 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2694 result, VN_INFO (result)->value_id);
2695 init_vn_nary_op_from_stmt (vno1, stmt);
2696 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2699 /* Compute a hashcode for PHI operation VP1 and return it. */
2701 static inline hashval_t
2702 vn_phi_compute_hash (vn_phi_t vp1)
2704 inchash::hash hstate (vp1->phiargs.length () > 2
2705 ? vp1->block->index : vp1->phiargs.length ());
2706 tree phi1op;
2707 tree type;
2708 edge e;
2709 edge_iterator ei;
2711 /* If all PHI arguments are constants we need to distinguish
2712 the PHI node via its type. */
2713 type = vp1->type;
2714 hstate.merge_hash (vn_hash_type (type));
2716 FOR_EACH_EDGE (e, ei, vp1->block->preds)
2718 /* Don't hash backedge values they need to be handled as VN_TOP
2719 for optimistic value-numbering. */
2720 if (e->flags & EDGE_DFS_BACK)
2721 continue;
2723 phi1op = vp1->phiargs[e->dest_idx];
2724 if (phi1op == VN_TOP)
2725 continue;
2726 inchash::add_expr (phi1op, hstate);
2729 return hstate.end ();
2733 /* Return true if COND1 and COND2 represent the same condition, set
2734 *INVERTED_P if one needs to be inverted to make it the same as
2735 the other. */
2737 static bool
2738 cond_stmts_equal_p (gcond *cond1, gcond *cond2, bool *inverted_p)
2740 enum tree_code code1 = gimple_cond_code (cond1);
2741 enum tree_code code2 = gimple_cond_code (cond2);
2742 tree lhs1 = gimple_cond_lhs (cond1);
2743 tree lhs2 = gimple_cond_lhs (cond2);
2744 tree rhs1 = gimple_cond_rhs (cond1);
2745 tree rhs2 = gimple_cond_rhs (cond2);
2747 *inverted_p = false;
2748 if (code1 == code2)
2750 else if (code1 == swap_tree_comparison (code2))
2751 std::swap (lhs2, rhs2);
2752 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
2753 *inverted_p = true;
2754 else if (code1 == invert_tree_comparison
2755 (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
2757 std::swap (lhs2, rhs2);
2758 *inverted_p = true;
2760 else
2761 return false;
2763 if (! expressions_equal_p (vn_valueize (lhs1), vn_valueize (lhs2))
2764 || ! expressions_equal_p (vn_valueize (rhs1), vn_valueize (rhs2)))
2765 return false;
2767 return true;
2770 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2772 static int
2773 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
2775 if (vp1->hashcode != vp2->hashcode)
2776 return false;
2778 if (vp1->block != vp2->block)
2780 if (vp1->phiargs.length () != vp2->phiargs.length ())
2781 return false;
2783 switch (vp1->phiargs.length ())
2785 case 1:
2786 /* Single-arg PHIs are just copies. */
2787 break;
2789 case 2:
2791 /* Rule out backedges into the PHI. */
2792 if (vp1->block->loop_father->header == vp1->block
2793 || vp2->block->loop_father->header == vp2->block)
2794 return false;
2796 /* If the PHI nodes do not have compatible types
2797 they are not the same. */
2798 if (!types_compatible_p (vp1->type, vp2->type))
2799 return false;
2801 basic_block idom1
2802 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
2803 basic_block idom2
2804 = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
2805 /* If the immediate dominator end in switch stmts multiple
2806 values may end up in the same PHI arg via intermediate
2807 CFG merges. */
2808 if (EDGE_COUNT (idom1->succs) != 2
2809 || EDGE_COUNT (idom2->succs) != 2)
2810 return false;
2812 /* Verify the controlling stmt is the same. */
2813 gimple *last1 = last_stmt (idom1);
2814 gimple *last2 = last_stmt (idom2);
2815 if (gimple_code (last1) != GIMPLE_COND
2816 || gimple_code (last2) != GIMPLE_COND)
2817 return false;
2818 bool inverted_p;
2819 if (! cond_stmts_equal_p (as_a <gcond *> (last1),
2820 as_a <gcond *> (last2), &inverted_p))
2821 return false;
2823 /* Get at true/false controlled edges into the PHI. */
2824 edge te1, te2, fe1, fe2;
2825 if (! extract_true_false_controlled_edges (idom1, vp1->block,
2826 &te1, &fe1)
2827 || ! extract_true_false_controlled_edges (idom2, vp2->block,
2828 &te2, &fe2))
2829 return false;
2831 /* Swap edges if the second condition is the inverted of the
2832 first. */
2833 if (inverted_p)
2834 std::swap (te2, fe2);
2836 /* ??? Handle VN_TOP specially. */
2837 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
2838 vp2->phiargs[te2->dest_idx])
2839 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
2840 vp2->phiargs[fe2->dest_idx]))
2841 return false;
2843 return true;
2846 default:
2847 return false;
2851 /* If the PHI nodes do not have compatible types
2852 they are not the same. */
2853 if (!types_compatible_p (vp1->type, vp2->type))
2854 return false;
2856 /* Any phi in the same block will have it's arguments in the
2857 same edge order, because of how we store phi nodes. */
2858 int i;
2859 tree phi1op;
2860 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2862 tree phi2op = vp2->phiargs[i];
2863 if (phi1op == VN_TOP || phi2op == VN_TOP)
2864 continue;
2865 if (!expressions_equal_p (phi1op, phi2op))
2866 return false;
2869 return true;
2872 static vec<tree> shared_lookup_phiargs;
2874 /* Lookup PHI in the current hash table, and return the resulting
2875 value number if it exists in the hash table. Return NULL_TREE if
2876 it does not exist in the hash table. */
2878 static tree
2879 vn_phi_lookup (gimple *phi)
2881 vn_phi_s **slot;
2882 struct vn_phi_s vp1;
2883 edge e;
2884 edge_iterator ei;
2886 shared_lookup_phiargs.truncate (0);
2887 shared_lookup_phiargs.safe_grow (gimple_phi_num_args (phi));
2889 /* Canonicalize the SSA_NAME's to their value number. */
2890 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
2892 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
2893 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2894 shared_lookup_phiargs[e->dest_idx] = def;
2896 vp1.type = TREE_TYPE (gimple_phi_result (phi));
2897 vp1.phiargs = shared_lookup_phiargs;
2898 vp1.block = gimple_bb (phi);
2899 vp1.hashcode = vn_phi_compute_hash (&vp1);
2900 slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
2901 NO_INSERT);
2902 if (!slot && current_info == optimistic_info)
2903 slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
2904 NO_INSERT);
2905 if (!slot)
2906 return NULL_TREE;
2907 return (*slot)->result;
2910 /* Insert PHI into the current hash table with a value number of
2911 RESULT. */
2913 static vn_phi_t
2914 vn_phi_insert (gimple *phi, tree result)
2916 vn_phi_s **slot;
2917 vn_phi_t vp1 = current_info->phis_pool->allocate ();
2918 vec<tree> args = vNULL;
2919 edge e;
2920 edge_iterator ei;
2922 args.safe_grow (gimple_phi_num_args (phi));
2924 /* Canonicalize the SSA_NAME's to their value number. */
2925 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
2927 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
2928 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2929 args[e->dest_idx] = def;
2931 vp1->value_id = VN_INFO (result)->value_id;
2932 vp1->type = TREE_TYPE (gimple_phi_result (phi));
2933 vp1->phiargs = args;
2934 vp1->block = gimple_bb (phi);
2935 vp1->result = result;
2936 vp1->hashcode = vn_phi_compute_hash (vp1);
2938 slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
2940 /* Because we iterate over phi operations more than once, it's
2941 possible the slot might already exist here, hence no assert.*/
2942 *slot = vp1;
2943 return vp1;
2947 /* Print set of components in strongly connected component SCC to OUT. */
2949 static void
2950 print_scc (FILE *out, vec<tree> scc)
2952 tree var;
2953 unsigned int i;
2955 fprintf (out, "SCC consists of:");
2956 FOR_EACH_VEC_ELT (scc, i, var)
2958 fprintf (out, " ");
2959 print_generic_expr (out, var, 0);
2961 fprintf (out, "\n");
2964 /* Set the value number of FROM to TO, return true if it has changed
2965 as a result. */
2967 static inline bool
2968 set_ssa_val_to (tree from, tree to)
2970 tree currval = SSA_VAL (from);
2971 HOST_WIDE_INT toff, coff;
2973 /* The only thing we allow as value numbers are ssa_names
2974 and invariants. So assert that here. We don't allow VN_TOP
2975 as visiting a stmt should produce a value-number other than
2976 that.
2977 ??? Still VN_TOP can happen for unreachable code, so force
2978 it to varying in that case. Not all code is prepared to
2979 get VN_TOP on valueization. */
2980 if (to == VN_TOP)
2982 if (dump_file && (dump_flags & TDF_DETAILS))
2983 fprintf (dump_file, "Forcing value number to varying on "
2984 "receiving VN_TOP\n");
2985 to = from;
2988 gcc_assert (to != NULL_TREE
2989 && ((TREE_CODE (to) == SSA_NAME
2990 && (to == from || SSA_VAL (to) == to))
2991 || is_gimple_min_invariant (to)));
2993 if (from != to)
2995 if (currval == from)
2997 if (dump_file && (dump_flags & TDF_DETAILS))
2999 fprintf (dump_file, "Not changing value number of ");
3000 print_generic_expr (dump_file, from, 0);
3001 fprintf (dump_file, " from VARYING to ");
3002 print_generic_expr (dump_file, to, 0);
3003 fprintf (dump_file, "\n");
3005 return false;
3007 else if (TREE_CODE (to) == SSA_NAME
3008 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
3009 to = from;
3012 if (dump_file && (dump_flags & TDF_DETAILS))
3014 fprintf (dump_file, "Setting value number of ");
3015 print_generic_expr (dump_file, from, 0);
3016 fprintf (dump_file, " to ");
3017 print_generic_expr (dump_file, to, 0);
3020 if (currval != to
3021 && !operand_equal_p (currval, to, 0)
3022 /* ??? For addresses involving volatile objects or types operand_equal_p
3023 does not reliably detect ADDR_EXPRs as equal. We know we are only
3024 getting invariant gimple addresses here, so can use
3025 get_addr_base_and_unit_offset to do this comparison. */
3026 && !(TREE_CODE (currval) == ADDR_EXPR
3027 && TREE_CODE (to) == ADDR_EXPR
3028 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
3029 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
3030 && coff == toff))
3032 VN_INFO (from)->valnum = to;
3033 if (dump_file && (dump_flags & TDF_DETAILS))
3034 fprintf (dump_file, " (changed)\n");
3035 return true;
3037 if (dump_file && (dump_flags & TDF_DETAILS))
3038 fprintf (dump_file, "\n");
3039 return false;
3042 /* Mark as processed all the definitions in the defining stmt of USE, or
3043 the USE itself. */
3045 static void
3046 mark_use_processed (tree use)
3048 ssa_op_iter iter;
3049 def_operand_p defp;
3050 gimple *stmt = SSA_NAME_DEF_STMT (use);
3052 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
3054 VN_INFO (use)->use_processed = true;
3055 return;
3058 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3060 tree def = DEF_FROM_PTR (defp);
3062 VN_INFO (def)->use_processed = true;
3066 /* Set all definitions in STMT to value number to themselves.
3067 Return true if a value number changed. */
3069 static bool
3070 defs_to_varying (gimple *stmt)
3072 bool changed = false;
3073 ssa_op_iter iter;
3074 def_operand_p defp;
3076 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3078 tree def = DEF_FROM_PTR (defp);
3079 changed |= set_ssa_val_to (def, def);
3081 return changed;
3084 /* Visit a copy between LHS and RHS, return true if the value number
3085 changed. */
3087 static bool
3088 visit_copy (tree lhs, tree rhs)
3090 /* Valueize. */
3091 rhs = SSA_VAL (rhs);
3093 return set_ssa_val_to (lhs, rhs);
3096 /* Visit a nary operator RHS, value number it, and return true if the
3097 value number of LHS has changed as a result. */
3099 static bool
3100 visit_nary_op (tree lhs, gimple *stmt)
3102 bool changed = false;
3103 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
3105 if (result)
3106 changed = set_ssa_val_to (lhs, result);
3107 else
3109 changed = set_ssa_val_to (lhs, lhs);
3110 vn_nary_op_insert_stmt (stmt, lhs);
3113 return changed;
3116 /* Visit a call STMT storing into LHS. Return true if the value number
3117 of the LHS has changed as a result. */
3119 static bool
3120 visit_reference_op_call (tree lhs, gcall *stmt)
3122 bool changed = false;
3123 struct vn_reference_s vr1;
3124 vn_reference_t vnresult = NULL;
3125 tree vdef = gimple_vdef (stmt);
3127 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
3128 if (lhs && TREE_CODE (lhs) != SSA_NAME)
3129 lhs = NULL_TREE;
3131 vn_reference_lookup_call (stmt, &vnresult, &vr1);
3132 if (vnresult)
3134 if (vnresult->result_vdef && vdef)
3135 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
3137 if (!vnresult->result && lhs)
3138 vnresult->result = lhs;
3140 if (vnresult->result && lhs)
3141 changed |= set_ssa_val_to (lhs, vnresult->result);
3143 else
3145 vn_reference_t vr2;
3146 vn_reference_s **slot;
3147 if (vdef)
3148 changed |= set_ssa_val_to (vdef, vdef);
3149 if (lhs)
3150 changed |= set_ssa_val_to (lhs, lhs);
3151 vr2 = current_info->references_pool->allocate ();
3152 vr2->vuse = vr1.vuse;
3153 /* As we are not walking the virtual operand chain we know the
3154 shared_lookup_references are still original so we can re-use
3155 them here. */
3156 vr2->operands = vr1.operands.copy ();
3157 vr2->type = vr1.type;
3158 vr2->set = vr1.set;
3159 vr2->hashcode = vr1.hashcode;
3160 vr2->result = lhs;
3161 vr2->result_vdef = vdef;
3162 slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3163 INSERT);
3164 gcc_assert (!*slot);
3165 *slot = vr2;
3168 return changed;
3171 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3172 and return true if the value number of the LHS has changed as a result. */
3174 static bool
3175 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
3177 bool changed = false;
3178 tree last_vuse;
3179 tree result;
3181 last_vuse = gimple_vuse (stmt);
3182 last_vuse_ptr = &last_vuse;
3183 result = vn_reference_lookup (op, gimple_vuse (stmt),
3184 default_vn_walk_kind, NULL);
3185 last_vuse_ptr = NULL;
3187 /* We handle type-punning through unions by value-numbering based
3188 on offset and size of the access. Be prepared to handle a
3189 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3190 if (result
3191 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3193 /* We will be setting the value number of lhs to the value number
3194 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3195 So first simplify and lookup this expression to see if it
3196 is already available. */
3197 mprts_hook = vn_lookup_simplify_result;
3198 code_helper rcode = VIEW_CONVERT_EXPR;
3199 tree ops[3] = { result };
3200 bool res = gimple_resimplify1 (NULL, &rcode, TREE_TYPE (op), ops,
3201 vn_valueize);
3202 mprts_hook = NULL;
3203 gimple *new_stmt = NULL;
3204 if (res
3205 && gimple_simplified_result_is_gimple_val (rcode, ops))
3206 /* The expression is already available. */
3207 result = ops[0];
3208 else
3210 tree val = vn_lookup_simplify_result (rcode, TREE_TYPE (op), ops);
3211 if (!val)
3213 gimple_seq stmts = NULL;
3214 result = maybe_push_res_to_seq (rcode, TREE_TYPE (op), ops,
3215 &stmts);
3216 gcc_assert (result && gimple_seq_singleton_p (stmts));
3217 new_stmt = gimple_seq_first_stmt (stmts);
3219 else
3220 /* The expression is already available. */
3221 result = val;
3223 if (new_stmt)
3225 /* The expression is not yet available, value-number lhs to
3226 the new SSA_NAME we created. */
3227 /* Initialize value-number information properly. */
3228 VN_INFO_GET (result)->valnum = result;
3229 VN_INFO (result)->value_id = get_next_value_id ();
3230 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
3231 new_stmt);
3232 VN_INFO (result)->needs_insertion = true;
3233 /* As all "inserted" statements are singleton SCCs, insert
3234 to the valid table. This is strictly needed to
3235 avoid re-generating new value SSA_NAMEs for the same
3236 expression during SCC iteration over and over (the
3237 optimistic table gets cleared after each iteration).
3238 We do not need to insert into the optimistic table, as
3239 lookups there will fall back to the valid table. */
3240 if (current_info == optimistic_info)
3242 current_info = valid_info;
3243 vn_nary_op_insert_stmt (new_stmt, result);
3244 current_info = optimistic_info;
3246 else
3247 vn_nary_op_insert_stmt (new_stmt, result);
3248 if (dump_file && (dump_flags & TDF_DETAILS))
3250 fprintf (dump_file, "Inserting name ");
3251 print_generic_expr (dump_file, result, 0);
3252 fprintf (dump_file, " for expression ");
3253 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
3254 fprintf (dump_file, "\n");
3259 if (result)
3260 changed = set_ssa_val_to (lhs, result);
3261 else
3263 changed = set_ssa_val_to (lhs, lhs);
3264 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
3267 return changed;
3271 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3272 and return true if the value number of the LHS has changed as a result. */
3274 static bool
3275 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
3277 bool changed = false;
3278 vn_reference_t vnresult = NULL;
3279 tree result, assign;
3280 bool resultsame = false;
3281 tree vuse = gimple_vuse (stmt);
3282 tree vdef = gimple_vdef (stmt);
3284 if (TREE_CODE (op) == SSA_NAME)
3285 op = SSA_VAL (op);
3287 /* First we want to lookup using the *vuses* from the store and see
3288 if there the last store to this location with the same address
3289 had the same value.
3291 The vuses represent the memory state before the store. If the
3292 memory state, address, and value of the store is the same as the
3293 last store to this location, then this store will produce the
3294 same memory state as that store.
3296 In this case the vdef versions for this store are value numbered to those
3297 vuse versions, since they represent the same memory state after
3298 this store.
3300 Otherwise, the vdefs for the store are used when inserting into
3301 the table, since the store generates a new memory state. */
3303 result = vn_reference_lookup (lhs, vuse, VN_NOWALK, NULL);
3305 if (result)
3307 if (TREE_CODE (result) == SSA_NAME)
3308 result = SSA_VAL (result);
3309 resultsame = expressions_equal_p (result, op);
3312 if ((!result || !resultsame)
3313 /* Only perform the following when being called from PRE
3314 which embeds tail merging. */
3315 && default_vn_walk_kind == VN_WALK)
3317 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3318 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult);
3319 if (vnresult)
3321 VN_INFO (vdef)->use_processed = true;
3322 return set_ssa_val_to (vdef, vnresult->result_vdef);
3326 if (!result || !resultsame)
3328 if (dump_file && (dump_flags & TDF_DETAILS))
3330 fprintf (dump_file, "No store match\n");
3331 fprintf (dump_file, "Value numbering store ");
3332 print_generic_expr (dump_file, lhs, 0);
3333 fprintf (dump_file, " to ");
3334 print_generic_expr (dump_file, op, 0);
3335 fprintf (dump_file, "\n");
3337 /* Have to set value numbers before insert, since insert is
3338 going to valueize the references in-place. */
3339 if (vdef)
3341 changed |= set_ssa_val_to (vdef, vdef);
3344 /* Do not insert structure copies into the tables. */
3345 if (is_gimple_min_invariant (op)
3346 || is_gimple_reg (op))
3347 vn_reference_insert (lhs, op, vdef, NULL);
3349 /* Only perform the following when being called from PRE
3350 which embeds tail merging. */
3351 if (default_vn_walk_kind == VN_WALK)
3353 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3354 vn_reference_insert (assign, lhs, vuse, vdef);
3357 else
3359 /* We had a match, so value number the vdef to have the value
3360 number of the vuse it came from. */
3362 if (dump_file && (dump_flags & TDF_DETAILS))
3363 fprintf (dump_file, "Store matched earlier value,"
3364 "value numbering store vdefs to matching vuses.\n");
3366 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3369 return changed;
3372 /* Visit and value number PHI, return true if the value number
3373 changed. */
3375 static bool
3376 visit_phi (gimple *phi)
3378 bool changed = false;
3379 tree result;
3380 tree sameval = VN_TOP;
3381 bool allsame = true;
3383 /* TODO: We could check for this in init_sccvn, and replace this
3384 with a gcc_assert. */
3385 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3386 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3388 /* See if all non-TOP arguments have the same value. TOP is
3389 equivalent to everything, so we can ignore it. */
3390 edge_iterator ei;
3391 edge e;
3392 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3393 if (e->flags & EDGE_EXECUTABLE)
3395 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3397 if (TREE_CODE (def) == SSA_NAME)
3398 def = SSA_VAL (def);
3399 if (def == VN_TOP)
3400 continue;
3401 if (sameval == VN_TOP)
3402 sameval = def;
3403 else if (!expressions_equal_p (def, sameval))
3405 allsame = false;
3406 break;
3410 /* If none of the edges was executable or all incoming values are
3411 undefined keep the value-number at VN_TOP. */
3412 if (sameval == VN_TOP)
3413 return set_ssa_val_to (PHI_RESULT (phi), VN_TOP);
3415 /* First see if it is equivalent to a phi node in this block. We prefer
3416 this as it allows IV elimination - see PRs 66502 and 67167. */
3417 result = vn_phi_lookup (phi);
3418 if (result)
3419 changed = set_ssa_val_to (PHI_RESULT (phi), result);
3420 /* Otherwise all value numbered to the same value, the phi node has that
3421 value. */
3422 else if (allsame)
3423 changed = set_ssa_val_to (PHI_RESULT (phi), sameval);
3424 else
3426 vn_phi_insert (phi, PHI_RESULT (phi));
3427 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3430 return changed;
3433 /* Try to simplify RHS using equivalences and constant folding. */
3435 static tree
3436 try_to_simplify (gassign *stmt)
3438 enum tree_code code = gimple_assign_rhs_code (stmt);
3439 tree tem;
3441 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3442 in this case, there is no point in doing extra work. */
3443 if (code == SSA_NAME)
3444 return NULL_TREE;
3446 /* First try constant folding based on our current lattice. */
3447 mprts_hook = vn_lookup_simplify_result;
3448 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
3449 mprts_hook = NULL;
3450 if (tem
3451 && (TREE_CODE (tem) == SSA_NAME
3452 || is_gimple_min_invariant (tem)))
3453 return tem;
3455 return NULL_TREE;
3458 /* Visit and value number USE, return true if the value number
3459 changed. */
3461 static bool
3462 visit_use (tree use)
3464 bool changed = false;
3465 gimple *stmt = SSA_NAME_DEF_STMT (use);
3467 mark_use_processed (use);
3469 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3470 if (dump_file && (dump_flags & TDF_DETAILS)
3471 && !SSA_NAME_IS_DEFAULT_DEF (use))
3473 fprintf (dump_file, "Value numbering ");
3474 print_generic_expr (dump_file, use, 0);
3475 fprintf (dump_file, " stmt = ");
3476 print_gimple_stmt (dump_file, stmt, 0, 0);
3479 /* Handle uninitialized uses. */
3480 if (SSA_NAME_IS_DEFAULT_DEF (use))
3481 changed = set_ssa_val_to (use, use);
3482 else if (gimple_code (stmt) == GIMPLE_PHI)
3483 changed = visit_phi (stmt);
3484 else if (gimple_has_volatile_ops (stmt))
3485 changed = defs_to_varying (stmt);
3486 else if (gassign *ass = dyn_cast <gassign *> (stmt))
3488 enum tree_code code = gimple_assign_rhs_code (ass);
3489 tree lhs = gimple_assign_lhs (ass);
3490 tree rhs1 = gimple_assign_rhs1 (ass);
3491 tree simplified;
3493 /* Shortcut for copies. Simplifying copies is pointless,
3494 since we copy the expression and value they represent. */
3495 if (code == SSA_NAME
3496 && TREE_CODE (lhs) == SSA_NAME)
3498 changed = visit_copy (lhs, rhs1);
3499 goto done;
3501 simplified = try_to_simplify (ass);
3502 if (simplified)
3504 if (dump_file && (dump_flags & TDF_DETAILS))
3506 fprintf (dump_file, "RHS ");
3507 print_gimple_expr (dump_file, ass, 0, 0);
3508 fprintf (dump_file, " simplified to ");
3509 print_generic_expr (dump_file, simplified, 0);
3510 fprintf (dump_file, "\n");
3513 /* Setting value numbers to constants will occasionally
3514 screw up phi congruence because constants are not
3515 uniquely associated with a single ssa name that can be
3516 looked up. */
3517 if (simplified
3518 && is_gimple_min_invariant (simplified)
3519 && TREE_CODE (lhs) == SSA_NAME)
3521 changed = set_ssa_val_to (lhs, simplified);
3522 goto done;
3524 else if (simplified
3525 && TREE_CODE (simplified) == SSA_NAME
3526 && TREE_CODE (lhs) == SSA_NAME)
3528 changed = visit_copy (lhs, simplified);
3529 goto done;
3532 if ((TREE_CODE (lhs) == SSA_NAME
3533 /* We can substitute SSA_NAMEs that are live over
3534 abnormal edges with their constant value. */
3535 && !(gimple_assign_copy_p (ass)
3536 && is_gimple_min_invariant (rhs1))
3537 && !(simplified
3538 && is_gimple_min_invariant (simplified))
3539 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3540 /* Stores or copies from SSA_NAMEs that are live over
3541 abnormal edges are a problem. */
3542 || (code == SSA_NAME
3543 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3544 changed = defs_to_varying (ass);
3545 else if (REFERENCE_CLASS_P (lhs)
3546 || DECL_P (lhs))
3547 changed = visit_reference_op_store (lhs, rhs1, ass);
3548 else if (TREE_CODE (lhs) == SSA_NAME)
3550 if ((gimple_assign_copy_p (ass)
3551 && is_gimple_min_invariant (rhs1))
3552 || (simplified
3553 && is_gimple_min_invariant (simplified)))
3555 if (simplified)
3556 changed = set_ssa_val_to (lhs, simplified);
3557 else
3558 changed = set_ssa_val_to (lhs, rhs1);
3560 else
3562 /* Visit the original statement. */
3563 switch (vn_get_stmt_kind (ass))
3565 case VN_NARY:
3566 changed = visit_nary_op (lhs, ass);
3567 break;
3568 case VN_REFERENCE:
3569 changed = visit_reference_op_load (lhs, rhs1, ass);
3570 break;
3571 default:
3572 changed = defs_to_varying (ass);
3573 break;
3577 else
3578 changed = defs_to_varying (ass);
3580 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
3582 tree lhs = gimple_call_lhs (call_stmt);
3583 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3585 /* Try constant folding based on our current lattice. */
3586 tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
3587 vn_valueize);
3588 if (simplified)
3590 if (dump_file && (dump_flags & TDF_DETAILS))
3592 fprintf (dump_file, "call ");
3593 print_gimple_expr (dump_file, call_stmt, 0, 0);
3594 fprintf (dump_file, " simplified to ");
3595 print_generic_expr (dump_file, simplified, 0);
3596 fprintf (dump_file, "\n");
3599 /* Setting value numbers to constants will occasionally
3600 screw up phi congruence because constants are not
3601 uniquely associated with a single ssa name that can be
3602 looked up. */
3603 if (simplified
3604 && is_gimple_min_invariant (simplified))
3606 changed = set_ssa_val_to (lhs, simplified);
3607 if (gimple_vdef (call_stmt))
3608 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
3609 SSA_VAL (gimple_vuse (call_stmt)));
3610 goto done;
3612 else if (simplified
3613 && TREE_CODE (simplified) == SSA_NAME)
3615 changed = visit_copy (lhs, simplified);
3616 if (gimple_vdef (call_stmt))
3617 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
3618 SSA_VAL (gimple_vuse (call_stmt)));
3619 goto done;
3621 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3623 changed = defs_to_varying (call_stmt);
3624 goto done;
3628 if (!gimple_call_internal_p (call_stmt)
3629 && (/* Calls to the same function with the same vuse
3630 and the same operands do not necessarily return the same
3631 value, unless they're pure or const. */
3632 gimple_call_flags (call_stmt) & (ECF_PURE | ECF_CONST)
3633 /* If calls have a vdef, subsequent calls won't have
3634 the same incoming vuse. So, if 2 calls with vdef have the
3635 same vuse, we know they're not subsequent.
3636 We can value number 2 calls to the same function with the
3637 same vuse and the same operands which are not subsequent
3638 the same, because there is no code in the program that can
3639 compare the 2 values... */
3640 || (gimple_vdef (call_stmt)
3641 /* ... unless the call returns a pointer which does
3642 not alias with anything else. In which case the
3643 information that the values are distinct are encoded
3644 in the IL. */
3645 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
3646 /* Only perform the following when being called from PRE
3647 which embeds tail merging. */
3648 && default_vn_walk_kind == VN_WALK)))
3649 changed = visit_reference_op_call (lhs, call_stmt);
3650 else
3651 changed = defs_to_varying (call_stmt);
3653 else
3654 changed = defs_to_varying (stmt);
3655 done:
3656 return changed;
3659 /* Compare two operands by reverse postorder index */
3661 static int
3662 compare_ops (const void *pa, const void *pb)
3664 const tree opa = *((const tree *)pa);
3665 const tree opb = *((const tree *)pb);
3666 gimple *opstmta = SSA_NAME_DEF_STMT (opa);
3667 gimple *opstmtb = SSA_NAME_DEF_STMT (opb);
3668 basic_block bba;
3669 basic_block bbb;
3671 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3672 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3673 else if (gimple_nop_p (opstmta))
3674 return -1;
3675 else if (gimple_nop_p (opstmtb))
3676 return 1;
3678 bba = gimple_bb (opstmta);
3679 bbb = gimple_bb (opstmtb);
3681 if (!bba && !bbb)
3682 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3683 else if (!bba)
3684 return -1;
3685 else if (!bbb)
3686 return 1;
3688 if (bba == bbb)
3690 if (gimple_code (opstmta) == GIMPLE_PHI
3691 && gimple_code (opstmtb) == GIMPLE_PHI)
3692 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3693 else if (gimple_code (opstmta) == GIMPLE_PHI)
3694 return -1;
3695 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3696 return 1;
3697 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3698 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3699 else
3700 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3702 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3705 /* Sort an array containing members of a strongly connected component
3706 SCC so that the members are ordered by RPO number.
3707 This means that when the sort is complete, iterating through the
3708 array will give you the members in RPO order. */
3710 static void
3711 sort_scc (vec<tree> scc)
3713 scc.qsort (compare_ops);
3716 /* Insert the no longer used nary ONARY to the hash INFO. */
3718 static void
3719 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3721 size_t size = sizeof_vn_nary_op (onary->length);
3722 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3723 &info->nary_obstack);
3724 memcpy (nary, onary, size);
3725 vn_nary_op_insert_into (nary, info->nary, false);
3728 /* Insert the no longer used phi OPHI to the hash INFO. */
3730 static void
3731 copy_phi (vn_phi_t ophi, vn_tables_t info)
3733 vn_phi_t phi = info->phis_pool->allocate ();
3734 vn_phi_s **slot;
3735 memcpy (phi, ophi, sizeof (*phi));
3736 ophi->phiargs.create (0);
3737 slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
3738 gcc_assert (!*slot);
3739 *slot = phi;
3742 /* Insert the no longer used reference OREF to the hash INFO. */
3744 static void
3745 copy_reference (vn_reference_t oref, vn_tables_t info)
3747 vn_reference_t ref;
3748 vn_reference_s **slot;
3749 ref = info->references_pool->allocate ();
3750 memcpy (ref, oref, sizeof (*ref));
3751 oref->operands.create (0);
3752 slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
3753 if (*slot)
3754 free_reference (*slot);
3755 *slot = ref;
3758 /* Process a strongly connected component in the SSA graph. */
3760 static void
3761 process_scc (vec<tree> scc)
3763 tree var;
3764 unsigned int i;
3765 unsigned int iterations = 0;
3766 bool changed = true;
3767 vn_nary_op_iterator_type hin;
3768 vn_phi_iterator_type hip;
3769 vn_reference_iterator_type hir;
3770 vn_nary_op_t nary;
3771 vn_phi_t phi;
3772 vn_reference_t ref;
3774 /* If the SCC has a single member, just visit it. */
3775 if (scc.length () == 1)
3777 tree use = scc[0];
3778 if (VN_INFO (use)->use_processed)
3779 return;
3780 /* We need to make sure it doesn't form a cycle itself, which can
3781 happen for self-referential PHI nodes. In that case we would
3782 end up inserting an expression with VN_TOP operands into the
3783 valid table which makes us derive bogus equivalences later.
3784 The cheapest way to check this is to assume it for all PHI nodes. */
3785 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3786 /* Fallthru to iteration. */ ;
3787 else
3789 visit_use (use);
3790 return;
3794 if (dump_file && (dump_flags & TDF_DETAILS))
3795 print_scc (dump_file, scc);
3797 /* Iterate over the SCC with the optimistic table until it stops
3798 changing. */
3799 current_info = optimistic_info;
3800 while (changed)
3802 changed = false;
3803 iterations++;
3804 if (dump_file && (dump_flags & TDF_DETAILS))
3805 fprintf (dump_file, "Starting iteration %d\n", iterations);
3806 /* As we are value-numbering optimistically we have to
3807 clear the expression tables and the simplified expressions
3808 in each iteration until we converge. */
3809 optimistic_info->nary->empty ();
3810 optimistic_info->phis->empty ();
3811 optimistic_info->references->empty ();
3812 obstack_free (&optimistic_info->nary_obstack, NULL);
3813 gcc_obstack_init (&optimistic_info->nary_obstack);
3814 optimistic_info->phis_pool->release ();
3815 optimistic_info->references_pool->release ();
3816 FOR_EACH_VEC_ELT (scc, i, var)
3817 gcc_assert (!VN_INFO (var)->needs_insertion
3818 && VN_INFO (var)->expr == NULL);
3819 FOR_EACH_VEC_ELT (scc, i, var)
3820 changed |= visit_use (var);
3823 if (dump_file && (dump_flags & TDF_DETAILS))
3824 fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
3825 statistics_histogram_event (cfun, "SCC iterations", iterations);
3827 /* Finally, copy the contents of the no longer used optimistic
3828 table to the valid table. */
3829 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
3830 copy_nary (nary, valid_info);
3831 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
3832 copy_phi (phi, valid_info);
3833 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
3834 ref, vn_reference_t, hir)
3835 copy_reference (ref, valid_info);
3837 current_info = valid_info;
3841 /* Pop the components of the found SCC for NAME off the SCC stack
3842 and process them. Returns true if all went well, false if
3843 we run into resource limits. */
3845 static bool
3846 extract_and_process_scc_for_name (tree name)
3848 auto_vec<tree> scc;
3849 tree x;
3851 /* Found an SCC, pop the components off the SCC stack and
3852 process them. */
3855 x = sccstack.pop ();
3857 VN_INFO (x)->on_sccstack = false;
3858 scc.safe_push (x);
3859 } while (x != name);
3861 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
3862 if (scc.length ()
3863 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
3865 if (dump_file)
3866 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
3867 "SCC size %u exceeding %u\n", scc.length (),
3868 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
3870 return false;
3873 if (scc.length () > 1)
3874 sort_scc (scc);
3876 process_scc (scc);
3878 return true;
3881 /* Depth first search on NAME to discover and process SCC's in the SSA
3882 graph.
3883 Execution of this algorithm relies on the fact that the SCC's are
3884 popped off the stack in topological order.
3885 Returns true if successful, false if we stopped processing SCC's due
3886 to resource constraints. */
3888 static bool
3889 DFS (tree name)
3891 vec<ssa_op_iter> itervec = vNULL;
3892 vec<tree> namevec = vNULL;
3893 use_operand_p usep = NULL;
3894 gimple *defstmt;
3895 tree use;
3896 ssa_op_iter iter;
3898 start_over:
3899 /* SCC info */
3900 VN_INFO (name)->dfsnum = next_dfs_num++;
3901 VN_INFO (name)->visited = true;
3902 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
3904 sccstack.safe_push (name);
3905 VN_INFO (name)->on_sccstack = true;
3906 defstmt = SSA_NAME_DEF_STMT (name);
3908 /* Recursively DFS on our operands, looking for SCC's. */
3909 if (!gimple_nop_p (defstmt))
3911 /* Push a new iterator. */
3912 if (gphi *phi = dyn_cast <gphi *> (defstmt))
3913 usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
3914 else
3915 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
3917 else
3918 clear_and_done_ssa_iter (&iter);
3920 while (1)
3922 /* If we are done processing uses of a name, go up the stack
3923 of iterators and process SCCs as we found them. */
3924 if (op_iter_done (&iter))
3926 /* See if we found an SCC. */
3927 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
3928 if (!extract_and_process_scc_for_name (name))
3930 namevec.release ();
3931 itervec.release ();
3932 return false;
3935 /* Check if we are done. */
3936 if (namevec.is_empty ())
3938 namevec.release ();
3939 itervec.release ();
3940 return true;
3943 /* Restore the last use walker and continue walking there. */
3944 use = name;
3945 name = namevec.pop ();
3946 memcpy (&iter, &itervec.last (),
3947 sizeof (ssa_op_iter));
3948 itervec.pop ();
3949 goto continue_walking;
3952 use = USE_FROM_PTR (usep);
3954 /* Since we handle phi nodes, we will sometimes get
3955 invariants in the use expression. */
3956 if (TREE_CODE (use) == SSA_NAME)
3958 if (! (VN_INFO (use)->visited))
3960 /* Recurse by pushing the current use walking state on
3961 the stack and starting over. */
3962 itervec.safe_push (iter);
3963 namevec.safe_push (name);
3964 name = use;
3965 goto start_over;
3967 continue_walking:
3968 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3969 VN_INFO (use)->low);
3971 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3972 && VN_INFO (use)->on_sccstack)
3974 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3975 VN_INFO (name)->low);
3979 usep = op_iter_next_use (&iter);
3983 /* Allocate a value number table. */
3985 static void
3986 allocate_vn_table (vn_tables_t table)
3988 table->phis = new vn_phi_table_type (23);
3989 table->nary = new vn_nary_op_table_type (23);
3990 table->references = new vn_reference_table_type (23);
3992 gcc_obstack_init (&table->nary_obstack);
3993 table->phis_pool = new object_allocator<vn_phi_s> ("VN phis");
3994 table->references_pool = new object_allocator<vn_reference_s>
3995 ("VN references");
3998 /* Free a value number table. */
4000 static void
4001 free_vn_table (vn_tables_t table)
4003 delete table->phis;
4004 table->phis = NULL;
4005 delete table->nary;
4006 table->nary = NULL;
4007 delete table->references;
4008 table->references = NULL;
4009 obstack_free (&table->nary_obstack, NULL);
4010 delete table->phis_pool;
4011 delete table->references_pool;
4014 static void
4015 init_scc_vn (void)
4017 size_t i;
4018 int j;
4019 int *rpo_numbers_temp;
4021 calculate_dominance_info (CDI_DOMINATORS);
4022 mark_dfs_back_edges ();
4024 sccstack.create (0);
4025 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
4027 constant_value_ids = BITMAP_ALLOC (NULL);
4029 next_dfs_num = 1;
4030 next_value_id = 1;
4032 vn_ssa_aux_table.create (num_ssa_names + 1);
4033 /* VEC_alloc doesn't actually grow it to the right size, it just
4034 preallocates the space to do so. */
4035 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
4036 gcc_obstack_init (&vn_ssa_aux_obstack);
4038 shared_lookup_phiargs.create (0);
4039 shared_lookup_references.create (0);
4040 rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
4041 rpo_numbers_temp =
4042 XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
4043 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4045 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4046 the i'th block in RPO order is bb. We want to map bb's to RPO
4047 numbers, so we need to rearrange this array. */
4048 for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
4049 rpo_numbers[rpo_numbers_temp[j]] = j;
4051 XDELETE (rpo_numbers_temp);
4053 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
4055 renumber_gimple_stmt_uids ();
4057 /* Create the valid and optimistic value numbering tables. */
4058 valid_info = XCNEW (struct vn_tables_s);
4059 allocate_vn_table (valid_info);
4060 optimistic_info = XCNEW (struct vn_tables_s);
4061 allocate_vn_table (optimistic_info);
4062 current_info = valid_info;
4064 /* Create the VN_INFO structures, and initialize value numbers to
4065 TOP or VARYING for parameters. */
4066 for (i = 1; i < num_ssa_names; i++)
4068 tree name = ssa_name (i);
4069 if (!name)
4070 continue;
4072 VN_INFO_GET (name)->valnum = VN_TOP;
4073 VN_INFO (name)->needs_insertion = false;
4074 VN_INFO (name)->expr = NULL;
4075 VN_INFO (name)->value_id = 0;
4077 if (!SSA_NAME_IS_DEFAULT_DEF (name))
4078 continue;
4080 switch (TREE_CODE (SSA_NAME_VAR (name)))
4082 case VAR_DECL:
4083 /* Undefined vars keep TOP. */
4084 break;
4086 case PARM_DECL:
4087 /* Parameters are VARYING but we can record a condition
4088 if we know it is a non-NULL pointer. */
4089 VN_INFO (name)->visited = true;
4090 VN_INFO (name)->valnum = name;
4091 if (POINTER_TYPE_P (TREE_TYPE (name))
4092 && nonnull_arg_p (SSA_NAME_VAR (name)))
4094 tree ops[2];
4095 ops[0] = name;
4096 ops[1] = build_int_cst (TREE_TYPE (name), 0);
4097 vn_nary_op_insert_pieces (2, NE_EXPR, boolean_type_node, ops,
4098 boolean_true_node, 0);
4099 if (dump_file && (dump_flags & TDF_DETAILS))
4101 fprintf (dump_file, "Recording ");
4102 print_generic_expr (dump_file, name, TDF_SLIM);
4103 fprintf (dump_file, " != 0\n");
4106 break;
4108 case RESULT_DECL:
4109 /* If the result is passed by invisible reference the default
4110 def is initialized, otherwise it's uninitialized. */
4111 if (DECL_BY_REFERENCE (SSA_NAME_VAR (name)))
4113 VN_INFO (name)->visited = true;
4114 VN_INFO (name)->valnum = name;
4116 break;
4118 default:
4119 gcc_unreachable ();
4124 void
4125 free_scc_vn (void)
4127 size_t i;
4129 delete constant_to_value_id;
4130 constant_to_value_id = NULL;
4131 BITMAP_FREE (constant_value_ids);
4132 shared_lookup_phiargs.release ();
4133 shared_lookup_references.release ();
4134 XDELETEVEC (rpo_numbers);
4136 for (i = 0; i < num_ssa_names; i++)
4138 tree name = ssa_name (i);
4139 if (name
4140 && has_VN_INFO (name)
4141 && VN_INFO (name)->needs_insertion)
4142 release_ssa_name (name);
4144 obstack_free (&vn_ssa_aux_obstack, NULL);
4145 vn_ssa_aux_table.release ();
4147 sccstack.release ();
4148 free_vn_table (valid_info);
4149 XDELETE (valid_info);
4150 free_vn_table (optimistic_info);
4151 XDELETE (optimistic_info);
4153 BITMAP_FREE (const_parms);
4156 /* Set *ID according to RESULT. */
4158 static void
4159 set_value_id_for_result (tree result, unsigned int *id)
4161 if (result && TREE_CODE (result) == SSA_NAME)
4162 *id = VN_INFO (result)->value_id;
4163 else if (result && is_gimple_min_invariant (result))
4164 *id = get_or_alloc_constant_value_id (result);
4165 else
4166 *id = get_next_value_id ();
4169 /* Set the value ids in the valid hash tables. */
4171 static void
4172 set_hashtable_value_ids (void)
4174 vn_nary_op_iterator_type hin;
4175 vn_phi_iterator_type hip;
4176 vn_reference_iterator_type hir;
4177 vn_nary_op_t vno;
4178 vn_reference_t vr;
4179 vn_phi_t vp;
4181 /* Now set the value ids of the things we had put in the hash
4182 table. */
4184 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4185 set_value_id_for_result (vno->result, &vno->value_id);
4187 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4188 set_value_id_for_result (vp->result, &vp->value_id);
4190 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4191 hir)
4192 set_value_id_for_result (vr->result, &vr->value_id);
4195 class sccvn_dom_walker : public dom_walker
4197 public:
4198 sccvn_dom_walker ()
4199 : dom_walker (CDI_DOMINATORS), fail (false), cond_stack (vNULL) {}
4200 ~sccvn_dom_walker ();
4202 virtual void before_dom_children (basic_block);
4203 virtual void after_dom_children (basic_block);
4205 void record_cond (basic_block,
4206 enum tree_code code, tree lhs, tree rhs, bool value);
4207 void record_conds (basic_block,
4208 enum tree_code code, tree lhs, tree rhs, bool value);
4210 bool fail;
4211 vec<std::pair <basic_block, std::pair <vn_nary_op_t, vn_nary_op_t> > >
4212 cond_stack;
4215 sccvn_dom_walker::~sccvn_dom_walker ()
4217 cond_stack.release ();
4220 /* Record a temporary condition for the BB and its dominated blocks. */
4222 void
4223 sccvn_dom_walker::record_cond (basic_block bb,
4224 enum tree_code code, tree lhs, tree rhs,
4225 bool value)
4227 tree ops[2] = { lhs, rhs };
4228 vn_nary_op_t old = NULL;
4229 if (vn_nary_op_lookup_pieces (2, code, boolean_type_node, ops, &old))
4230 current_info->nary->remove_elt_with_hash (old, old->hashcode);
4231 vn_nary_op_t cond
4232 = vn_nary_op_insert_pieces (2, code, boolean_type_node, ops,
4233 value
4234 ? boolean_true_node
4235 : boolean_false_node, 0);
4236 if (dump_file && (dump_flags & TDF_DETAILS))
4238 fprintf (dump_file, "Recording temporarily ");
4239 print_generic_expr (dump_file, ops[0], TDF_SLIM);
4240 fprintf (dump_file, " %s ", get_tree_code_name (code));
4241 print_generic_expr (dump_file, ops[1], TDF_SLIM);
4242 fprintf (dump_file, " == %s%s\n",
4243 value ? "true" : "false",
4244 old ? " (old entry saved)" : "");
4246 cond_stack.safe_push (std::make_pair (bb, std::make_pair (cond, old)));
4249 /* Record temporary conditions for the BB and its dominated blocks
4250 according to LHS CODE RHS == VALUE and its dominated conditions. */
4252 void
4253 sccvn_dom_walker::record_conds (basic_block bb,
4254 enum tree_code code, tree lhs, tree rhs,
4255 bool value)
4257 /* Record the original condition. */
4258 record_cond (bb, code, lhs, rhs, value);
4260 if (!value)
4261 return;
4263 /* Record dominated conditions if the condition is true. Note that
4264 the inversion is already recorded. */
4265 switch (code)
4267 case LT_EXPR:
4268 case GT_EXPR:
4269 record_cond (bb, code == LT_EXPR ? LE_EXPR : GE_EXPR, lhs, rhs, true);
4270 record_cond (bb, NE_EXPR, lhs, rhs, true);
4271 record_cond (bb, EQ_EXPR, lhs, rhs, false);
4272 break;
4274 case EQ_EXPR:
4275 record_cond (bb, LE_EXPR, lhs, rhs, true);
4276 record_cond (bb, GE_EXPR, lhs, rhs, true);
4277 record_cond (bb, LT_EXPR, lhs, rhs, false);
4278 record_cond (bb, GT_EXPR, lhs, rhs, false);
4279 break;
4281 default:
4282 break;
4286 /* Restore expressions and values derived from conditionals. */
4288 void
4289 sccvn_dom_walker::after_dom_children (basic_block bb)
4291 while (!cond_stack.is_empty ()
4292 && cond_stack.last ().first == bb)
4294 vn_nary_op_t cond = cond_stack.last ().second.first;
4295 vn_nary_op_t old = cond_stack.last ().second.second;
4296 current_info->nary->remove_elt_with_hash (cond, cond->hashcode);
4297 if (old)
4298 vn_nary_op_insert_into (old, current_info->nary, false);
4299 cond_stack.pop ();
4303 /* Value number all statements in BB. */
4305 void
4306 sccvn_dom_walker::before_dom_children (basic_block bb)
4308 edge e;
4309 edge_iterator ei;
4311 if (fail)
4312 return;
4314 /* If any of the predecessor edges that do not come from blocks dominated
4315 by us are still marked as possibly executable consider this block
4316 reachable. */
4317 bool reachable = bb == ENTRY_BLOCK_PTR_FOR_FN (cfun);
4318 FOR_EACH_EDGE (e, ei, bb->preds)
4319 if (!dominated_by_p (CDI_DOMINATORS, e->src, bb))
4320 reachable |= (e->flags & EDGE_EXECUTABLE);
4322 /* If the block is not reachable all outgoing edges are not
4323 executable. Neither are incoming edges with src dominated by us. */
4324 if (!reachable)
4326 if (dump_file && (dump_flags & TDF_DETAILS))
4327 fprintf (dump_file, "Marking all outgoing edges of unreachable "
4328 "BB %d as not executable\n", bb->index);
4330 FOR_EACH_EDGE (e, ei, bb->succs)
4331 e->flags &= ~EDGE_EXECUTABLE;
4333 FOR_EACH_EDGE (e, ei, bb->preds)
4335 if (dominated_by_p (CDI_DOMINATORS, e->src, bb))
4337 if (dump_file && (dump_flags & TDF_DETAILS))
4338 fprintf (dump_file, "Marking backedge from BB %d into "
4339 "unreachable BB %d as not executable\n",
4340 e->src->index, bb->index);
4341 e->flags &= ~EDGE_EXECUTABLE;
4344 return;
4347 if (dump_file && (dump_flags & TDF_DETAILS))
4348 fprintf (dump_file, "Visiting BB %d\n", bb->index);
4350 /* If we have a single predecessor record the equivalence from a
4351 possible condition on the predecessor edge. */
4352 if (single_pred_p (bb))
4354 edge e = single_pred_edge (bb);
4355 /* Check if there are multiple executable successor edges in
4356 the source block. Otherwise there is no additional info
4357 to be recorded. */
4358 edge e2;
4359 FOR_EACH_EDGE (e2, ei, e->src->succs)
4360 if (e2 != e
4361 && e2->flags & EDGE_EXECUTABLE)
4362 break;
4363 if (e2 && (e2->flags & EDGE_EXECUTABLE))
4365 gimple *stmt = last_stmt (e->src);
4366 if (stmt
4367 && gimple_code (stmt) == GIMPLE_COND)
4369 enum tree_code code = gimple_cond_code (stmt);
4370 tree lhs = gimple_cond_lhs (stmt);
4371 tree rhs = gimple_cond_rhs (stmt);
4372 record_conds (bb, code, lhs, rhs,
4373 (e->flags & EDGE_TRUE_VALUE) != 0);
4374 code = invert_tree_comparison (code, HONOR_NANS (lhs));
4375 if (code != ERROR_MARK)
4376 record_conds (bb, code, lhs, rhs,
4377 (e->flags & EDGE_TRUE_VALUE) == 0);
4382 /* Value-number all defs in the basic-block. */
4383 for (gphi_iterator gsi = gsi_start_phis (bb);
4384 !gsi_end_p (gsi); gsi_next (&gsi))
4386 gphi *phi = gsi.phi ();
4387 tree res = PHI_RESULT (phi);
4388 if (!VN_INFO (res)->visited
4389 && !DFS (res))
4391 fail = true;
4392 return;
4395 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
4396 !gsi_end_p (gsi); gsi_next (&gsi))
4398 ssa_op_iter i;
4399 tree op;
4400 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
4401 if (!VN_INFO (op)->visited
4402 && !DFS (op))
4404 fail = true;
4405 return;
4409 /* Finally look at the last stmt. */
4410 gimple *stmt = last_stmt (bb);
4411 if (!stmt)
4412 return;
4414 enum gimple_code code = gimple_code (stmt);
4415 if (code != GIMPLE_COND
4416 && code != GIMPLE_SWITCH
4417 && code != GIMPLE_GOTO)
4418 return;
4420 if (dump_file && (dump_flags & TDF_DETAILS))
4422 fprintf (dump_file, "Visiting control stmt ending BB %d: ", bb->index);
4423 print_gimple_stmt (dump_file, stmt, 0, 0);
4426 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
4427 if value-numbering can prove they are not reachable. Handling
4428 computed gotos is also possible. */
4429 tree val;
4430 switch (code)
4432 case GIMPLE_COND:
4434 tree lhs = vn_valueize (gimple_cond_lhs (stmt));
4435 tree rhs = vn_valueize (gimple_cond_rhs (stmt));
4436 val = gimple_simplify (gimple_cond_code (stmt),
4437 boolean_type_node, lhs, rhs,
4438 NULL, vn_valueize);
4439 /* If that didn't simplify to a constant see if we have recorded
4440 temporary expressions from taken edges. */
4441 if (!val || TREE_CODE (val) != INTEGER_CST)
4443 tree ops[2];
4444 ops[0] = lhs;
4445 ops[1] = rhs;
4446 val = vn_nary_op_lookup_pieces (2, gimple_cond_code (stmt),
4447 boolean_type_node, ops, NULL);
4449 break;
4451 case GIMPLE_SWITCH:
4452 val = gimple_switch_index (as_a <gswitch *> (stmt));
4453 break;
4454 case GIMPLE_GOTO:
4455 val = gimple_goto_dest (stmt);
4456 break;
4457 default:
4458 gcc_unreachable ();
4460 if (!val)
4461 return;
4463 edge taken = find_taken_edge (bb, vn_valueize (val));
4464 if (!taken)
4465 return;
4467 if (dump_file && (dump_flags & TDF_DETAILS))
4468 fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
4469 "not executable\n", bb->index, bb->index, taken->dest->index);
4471 FOR_EACH_EDGE (e, ei, bb->succs)
4472 if (e != taken)
4473 e->flags &= ~EDGE_EXECUTABLE;
4476 /* Do SCCVN. Returns true if it finished, false if we bailed out
4477 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
4478 how we use the alias oracle walking during the VN process. */
4480 bool
4481 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
4483 basic_block bb;
4484 size_t i;
4486 default_vn_walk_kind = default_vn_walk_kind_;
4488 init_scc_vn ();
4490 /* Collect pointers we know point to readonly memory. */
4491 const_parms = BITMAP_ALLOC (NULL);
4492 tree fnspec = lookup_attribute ("fn spec",
4493 TYPE_ATTRIBUTES (TREE_TYPE (cfun->decl)));
4494 if (fnspec)
4496 fnspec = TREE_VALUE (TREE_VALUE (fnspec));
4497 i = 1;
4498 for (tree arg = DECL_ARGUMENTS (cfun->decl);
4499 arg; arg = DECL_CHAIN (arg), ++i)
4501 if (i >= (unsigned) TREE_STRING_LENGTH (fnspec))
4502 break;
4503 if (TREE_STRING_POINTER (fnspec)[i] == 'R'
4504 || TREE_STRING_POINTER (fnspec)[i] == 'r')
4506 tree name = ssa_default_def (cfun, arg);
4507 if (name)
4508 bitmap_set_bit (const_parms, SSA_NAME_VERSION (name));
4513 /* Mark all edges as possibly executable. */
4514 FOR_ALL_BB_FN (bb, cfun)
4516 edge_iterator ei;
4517 edge e;
4518 FOR_EACH_EDGE (e, ei, bb->succs)
4519 e->flags |= EDGE_EXECUTABLE;
4522 /* Walk all blocks in dominator order, value-numbering stmts
4523 SSA defs and decide whether outgoing edges are not executable. */
4524 sccvn_dom_walker walker;
4525 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
4526 if (walker.fail)
4528 free_scc_vn ();
4529 return false;
4532 /* Initialize the value ids and prune out remaining VN_TOPs
4533 from dead code. */
4534 for (i = 1; i < num_ssa_names; ++i)
4536 tree name = ssa_name (i);
4537 vn_ssa_aux_t info;
4538 if (!name)
4539 continue;
4540 info = VN_INFO (name);
4541 if (!info->visited)
4542 info->valnum = name;
4543 if (info->valnum == name
4544 || info->valnum == VN_TOP)
4545 info->value_id = get_next_value_id ();
4546 else if (is_gimple_min_invariant (info->valnum))
4547 info->value_id = get_or_alloc_constant_value_id (info->valnum);
4550 /* Propagate. */
4551 for (i = 1; i < num_ssa_names; ++i)
4553 tree name = ssa_name (i);
4554 vn_ssa_aux_t info;
4555 if (!name)
4556 continue;
4557 info = VN_INFO (name);
4558 if (TREE_CODE (info->valnum) == SSA_NAME
4559 && info->valnum != name
4560 && info->value_id != VN_INFO (info->valnum)->value_id)
4561 info->value_id = VN_INFO (info->valnum)->value_id;
4564 set_hashtable_value_ids ();
4566 if (dump_file && (dump_flags & TDF_DETAILS))
4568 fprintf (dump_file, "Value numbers:\n");
4569 for (i = 0; i < num_ssa_names; i++)
4571 tree name = ssa_name (i);
4572 if (name
4573 && VN_INFO (name)->visited
4574 && SSA_VAL (name) != name)
4576 print_generic_expr (dump_file, name, 0);
4577 fprintf (dump_file, " = ");
4578 print_generic_expr (dump_file, SSA_VAL (name), 0);
4579 fprintf (dump_file, "\n");
4584 return true;
4587 /* Return the maximum value id we have ever seen. */
4589 unsigned int
4590 get_max_value_id (void)
4592 return next_value_id;
4595 /* Return the next unique value id. */
4597 unsigned int
4598 get_next_value_id (void)
4600 return next_value_id++;
4604 /* Compare two expressions E1 and E2 and return true if they are equal. */
4606 bool
4607 expressions_equal_p (tree e1, tree e2)
4609 /* The obvious case. */
4610 if (e1 == e2)
4611 return true;
4613 /* If only one of them is null, they cannot be equal. */
4614 if (!e1 || !e2)
4615 return false;
4617 /* Now perform the actual comparison. */
4618 if (TREE_CODE (e1) == TREE_CODE (e2)
4619 && operand_equal_p (e1, e2, OEP_PURE_SAME))
4620 return true;
4622 return false;
4626 /* Return true if the nary operation NARY may trap. This is a copy
4627 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4629 bool
4630 vn_nary_may_trap (vn_nary_op_t nary)
4632 tree type;
4633 tree rhs2 = NULL_TREE;
4634 bool honor_nans = false;
4635 bool honor_snans = false;
4636 bool fp_operation = false;
4637 bool honor_trapv = false;
4638 bool handled, ret;
4639 unsigned i;
4641 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4642 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4643 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4645 type = nary->type;
4646 fp_operation = FLOAT_TYPE_P (type);
4647 if (fp_operation)
4649 honor_nans = flag_trapping_math && !flag_finite_math_only;
4650 honor_snans = flag_signaling_nans != 0;
4652 else if (INTEGRAL_TYPE_P (type)
4653 && TYPE_OVERFLOW_TRAPS (type))
4654 honor_trapv = true;
4656 if (nary->length >= 2)
4657 rhs2 = nary->op[1];
4658 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4659 honor_trapv,
4660 honor_nans, honor_snans, rhs2,
4661 &handled);
4662 if (handled
4663 && ret)
4664 return true;
4666 for (i = 0; i < nary->length; ++i)
4667 if (tree_could_trap_p (nary->op[i]))
4668 return true;
4670 return false;