2016-09-08 Steven G. Kargl <kargl@gcc.gnu.org>
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blob21b3d56605278f80b661c8ae8449fa6e5eb410db
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2016 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "insn-config.h"
32 #include "emit-rtl.h"
33 #include "cgraph.h"
34 #include "gimple-pretty-print.h"
35 #include "alias.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "cfganal.h"
39 #include "tree-inline.h"
40 #include "internal-fn.h"
41 #include "gimple-fold.h"
42 #include "tree-eh.h"
43 #include "gimplify.h"
44 #include "flags.h"
45 #include "dojump.h"
46 #include "explow.h"
47 #include "calls.h"
48 #include "varasm.h"
49 #include "stmt.h"
50 #include "expr.h"
51 #include "tree-dfa.h"
52 #include "tree-ssa.h"
53 #include "dumpfile.h"
54 #include "cfgloop.h"
55 #include "params.h"
56 #include "tree-ssa-propagate.h"
57 #include "tree-ssa-sccvn.h"
58 #include "tree-cfg.h"
59 #include "domwalk.h"
60 #include "gimple-iterator.h"
61 #include "gimple-match.h"
63 /* This algorithm is based on the SCC algorithm presented by Keith
64 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
65 (http://citeseer.ist.psu.edu/41805.html). In
66 straight line code, it is equivalent to a regular hash based value
67 numbering that is performed in reverse postorder.
69 For code with cycles, there are two alternatives, both of which
70 require keeping the hashtables separate from the actual list of
71 value numbers for SSA names.
73 1. Iterate value numbering in an RPO walk of the blocks, removing
74 all the entries from the hashtable after each iteration (but
75 keeping the SSA name->value number mapping between iterations).
76 Iterate until it does not change.
78 2. Perform value numbering as part of an SCC walk on the SSA graph,
79 iterating only the cycles in the SSA graph until they do not change
80 (using a separate, optimistic hashtable for value numbering the SCC
81 operands).
83 The second is not just faster in practice (because most SSA graph
84 cycles do not involve all the variables in the graph), it also has
85 some nice properties.
87 One of these nice properties is that when we pop an SCC off the
88 stack, we are guaranteed to have processed all the operands coming from
89 *outside of that SCC*, so we do not need to do anything special to
90 ensure they have value numbers.
92 Another nice property is that the SCC walk is done as part of a DFS
93 of the SSA graph, which makes it easy to perform combining and
94 simplifying operations at the same time.
96 The code below is deliberately written in a way that makes it easy
97 to separate the SCC walk from the other work it does.
99 In order to propagate constants through the code, we track which
100 expressions contain constants, and use those while folding. In
101 theory, we could also track expressions whose value numbers are
102 replaced, in case we end up folding based on expression
103 identities.
105 In order to value number memory, we assign value numbers to vuses.
106 This enables us to note that, for example, stores to the same
107 address of the same value from the same starting memory states are
108 equivalent.
109 TODO:
111 1. We can iterate only the changing portions of the SCC's, but
112 I have not seen an SCC big enough for this to be a win.
113 2. If you differentiate between phi nodes for loops and phi nodes
114 for if-then-else, you can properly consider phi nodes in different
115 blocks for equivalence.
116 3. We could value number vuses in more cases, particularly, whole
117 structure copies.
121 static tree *last_vuse_ptr;
122 static vn_lookup_kind vn_walk_kind;
123 static vn_lookup_kind default_vn_walk_kind;
124 bitmap const_parms;
126 /* vn_nary_op hashtable helpers. */
128 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
130 typedef vn_nary_op_s *compare_type;
131 static inline hashval_t hash (const vn_nary_op_s *);
132 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
135 /* Return the computed hashcode for nary operation P1. */
137 inline hashval_t
138 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
140 return vno1->hashcode;
143 /* Compare nary operations P1 and P2 and return true if they are
144 equivalent. */
146 inline bool
147 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
149 return vn_nary_op_eq (vno1, vno2);
152 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
153 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
156 /* vn_phi hashtable helpers. */
158 static int
159 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
161 struct vn_phi_hasher : pointer_hash <vn_phi_s>
163 static inline hashval_t hash (const vn_phi_s *);
164 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
165 static inline void remove (vn_phi_s *);
168 /* Return the computed hashcode for phi operation P1. */
170 inline hashval_t
171 vn_phi_hasher::hash (const vn_phi_s *vp1)
173 return vp1->hashcode;
176 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
178 inline bool
179 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
181 return vn_phi_eq (vp1, vp2);
184 /* Free a phi operation structure VP. */
186 inline void
187 vn_phi_hasher::remove (vn_phi_s *phi)
189 phi->phiargs.release ();
192 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
193 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
196 /* Compare two reference operands P1 and P2 for equality. Return true if
197 they are equal, and false otherwise. */
199 static int
200 vn_reference_op_eq (const void *p1, const void *p2)
202 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
203 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
205 return (vro1->opcode == vro2->opcode
206 /* We do not care for differences in type qualification. */
207 && (vro1->type == vro2->type
208 || (vro1->type && vro2->type
209 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
210 TYPE_MAIN_VARIANT (vro2->type))))
211 && expressions_equal_p (vro1->op0, vro2->op0)
212 && expressions_equal_p (vro1->op1, vro2->op1)
213 && expressions_equal_p (vro1->op2, vro2->op2));
216 /* Free a reference operation structure VP. */
218 static inline void
219 free_reference (vn_reference_s *vr)
221 vr->operands.release ();
225 /* vn_reference hashtable helpers. */
227 struct vn_reference_hasher : pointer_hash <vn_reference_s>
229 static inline hashval_t hash (const vn_reference_s *);
230 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
231 static inline void remove (vn_reference_s *);
234 /* Return the hashcode for a given reference operation P1. */
236 inline hashval_t
237 vn_reference_hasher::hash (const vn_reference_s *vr1)
239 return vr1->hashcode;
242 inline bool
243 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
245 return vn_reference_eq (v, c);
248 inline void
249 vn_reference_hasher::remove (vn_reference_s *v)
251 free_reference (v);
254 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
255 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
258 /* The set of hashtables and alloc_pool's for their items. */
260 typedef struct vn_tables_s
262 vn_nary_op_table_type *nary;
263 vn_phi_table_type *phis;
264 vn_reference_table_type *references;
265 struct obstack nary_obstack;
266 object_allocator<vn_phi_s> *phis_pool;
267 object_allocator<vn_reference_s> *references_pool;
268 } *vn_tables_t;
271 /* vn_constant hashtable helpers. */
273 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
275 static inline hashval_t hash (const vn_constant_s *);
276 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
279 /* Hash table hash function for vn_constant_t. */
281 inline hashval_t
282 vn_constant_hasher::hash (const vn_constant_s *vc1)
284 return vc1->hashcode;
287 /* Hash table equality function for vn_constant_t. */
289 inline bool
290 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
292 if (vc1->hashcode != vc2->hashcode)
293 return false;
295 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
298 static hash_table<vn_constant_hasher> *constant_to_value_id;
299 static bitmap constant_value_ids;
302 /* Valid hashtables storing information we have proven to be
303 correct. */
305 static vn_tables_t valid_info;
307 /* Optimistic hashtables storing information we are making assumptions about
308 during iterations. */
310 static vn_tables_t optimistic_info;
312 /* Pointer to the set of hashtables that is currently being used.
313 Should always point to either the optimistic_info, or the
314 valid_info. */
316 static vn_tables_t current_info;
319 /* Reverse post order index for each basic block. */
321 static int *rpo_numbers;
323 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
325 /* Return the SSA value of the VUSE x, supporting released VDEFs
326 during elimination which will value-number the VDEF to the
327 associated VUSE (but not substitute in the whole lattice). */
329 static inline tree
330 vuse_ssa_val (tree x)
332 if (!x)
333 return NULL_TREE;
337 x = SSA_VAL (x);
339 while (SSA_NAME_IN_FREE_LIST (x));
341 return x;
344 /* This represents the top of the VN lattice, which is the universal
345 value. */
347 tree VN_TOP;
349 /* Unique counter for our value ids. */
351 static unsigned int next_value_id;
353 /* Next DFS number and the stack for strongly connected component
354 detection. */
356 static unsigned int next_dfs_num;
357 static vec<tree> sccstack;
361 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
362 are allocated on an obstack for locality reasons, and to free them
363 without looping over the vec. */
365 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
366 static struct obstack vn_ssa_aux_obstack;
368 /* Return whether there is value numbering information for a given SSA name. */
370 bool
371 has_VN_INFO (tree name)
373 if (SSA_NAME_VERSION (name) < vn_ssa_aux_table.length ())
374 return vn_ssa_aux_table[SSA_NAME_VERSION (name)] != NULL;
375 return false;
378 /* Return the value numbering information for a given SSA name. */
380 vn_ssa_aux_t
381 VN_INFO (tree name)
383 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
384 gcc_checking_assert (res);
385 return res;
388 /* Set the value numbering info for a given SSA name to a given
389 value. */
391 static inline void
392 VN_INFO_SET (tree name, vn_ssa_aux_t value)
394 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
397 /* Initialize the value numbering info for a given SSA name.
398 This should be called just once for every SSA name. */
400 vn_ssa_aux_t
401 VN_INFO_GET (tree name)
403 vn_ssa_aux_t newinfo;
405 gcc_assert (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ()
406 || vn_ssa_aux_table[SSA_NAME_VERSION (name)] == NULL);
407 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
408 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
409 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
410 vn_ssa_aux_table.safe_grow_cleared (SSA_NAME_VERSION (name) + 1);
411 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
412 return newinfo;
416 /* Return the vn_kind the expression computed by the stmt should be
417 associated with. */
419 enum vn_kind
420 vn_get_stmt_kind (gimple *stmt)
422 switch (gimple_code (stmt))
424 case GIMPLE_CALL:
425 return VN_REFERENCE;
426 case GIMPLE_PHI:
427 return VN_PHI;
428 case GIMPLE_ASSIGN:
430 enum tree_code code = gimple_assign_rhs_code (stmt);
431 tree rhs1 = gimple_assign_rhs1 (stmt);
432 switch (get_gimple_rhs_class (code))
434 case GIMPLE_UNARY_RHS:
435 case GIMPLE_BINARY_RHS:
436 case GIMPLE_TERNARY_RHS:
437 return VN_NARY;
438 case GIMPLE_SINGLE_RHS:
439 switch (TREE_CODE_CLASS (code))
441 case tcc_reference:
442 /* VOP-less references can go through unary case. */
443 if ((code == REALPART_EXPR
444 || code == IMAGPART_EXPR
445 || code == VIEW_CONVERT_EXPR
446 || code == BIT_FIELD_REF)
447 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
448 return VN_NARY;
450 /* Fallthrough. */
451 case tcc_declaration:
452 return VN_REFERENCE;
454 case tcc_constant:
455 return VN_CONSTANT;
457 default:
458 if (code == ADDR_EXPR)
459 return (is_gimple_min_invariant (rhs1)
460 ? VN_CONSTANT : VN_REFERENCE);
461 else if (code == CONSTRUCTOR)
462 return VN_NARY;
463 return VN_NONE;
465 default:
466 return VN_NONE;
469 default:
470 return VN_NONE;
474 /* Lookup a value id for CONSTANT and return it. If it does not
475 exist returns 0. */
477 unsigned int
478 get_constant_value_id (tree constant)
480 vn_constant_s **slot;
481 struct vn_constant_s vc;
483 vc.hashcode = vn_hash_constant_with_type (constant);
484 vc.constant = constant;
485 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
486 if (slot)
487 return (*slot)->value_id;
488 return 0;
491 /* Lookup a value id for CONSTANT, and if it does not exist, create a
492 new one and return it. If it does exist, return it. */
494 unsigned int
495 get_or_alloc_constant_value_id (tree constant)
497 vn_constant_s **slot;
498 struct vn_constant_s vc;
499 vn_constant_t vcp;
501 vc.hashcode = vn_hash_constant_with_type (constant);
502 vc.constant = constant;
503 slot = constant_to_value_id->find_slot (&vc, INSERT);
504 if (*slot)
505 return (*slot)->value_id;
507 vcp = XNEW (struct vn_constant_s);
508 vcp->hashcode = vc.hashcode;
509 vcp->constant = constant;
510 vcp->value_id = get_next_value_id ();
511 *slot = vcp;
512 bitmap_set_bit (constant_value_ids, vcp->value_id);
513 return vcp->value_id;
516 /* Return true if V is a value id for a constant. */
518 bool
519 value_id_constant_p (unsigned int v)
521 return bitmap_bit_p (constant_value_ids, v);
524 /* Compute the hash for a reference operand VRO1. */
526 static void
527 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
529 hstate.add_int (vro1->opcode);
530 if (vro1->op0)
531 inchash::add_expr (vro1->op0, hstate);
532 if (vro1->op1)
533 inchash::add_expr (vro1->op1, hstate);
534 if (vro1->op2)
535 inchash::add_expr (vro1->op2, hstate);
538 /* Compute a hash for the reference operation VR1 and return it. */
540 static hashval_t
541 vn_reference_compute_hash (const vn_reference_t vr1)
543 inchash::hash hstate;
544 hashval_t result;
545 int i;
546 vn_reference_op_t vro;
547 HOST_WIDE_INT off = -1;
548 bool deref = false;
550 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
552 if (vro->opcode == MEM_REF)
553 deref = true;
554 else if (vro->opcode != ADDR_EXPR)
555 deref = false;
556 if (vro->off != -1)
558 if (off == -1)
559 off = 0;
560 off += vro->off;
562 else
564 if (off != -1
565 && off != 0)
566 hstate.add_int (off);
567 off = -1;
568 if (deref
569 && vro->opcode == ADDR_EXPR)
571 if (vro->op0)
573 tree op = TREE_OPERAND (vro->op0, 0);
574 hstate.add_int (TREE_CODE (op));
575 inchash::add_expr (op, hstate);
578 else
579 vn_reference_op_compute_hash (vro, hstate);
582 result = hstate.end ();
583 /* ??? We would ICE later if we hash instead of adding that in. */
584 if (vr1->vuse)
585 result += SSA_NAME_VERSION (vr1->vuse);
587 return result;
590 /* Return true if reference operations VR1 and VR2 are equivalent. This
591 means they have the same set of operands and vuses. */
593 bool
594 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
596 unsigned i, j;
598 /* Early out if this is not a hash collision. */
599 if (vr1->hashcode != vr2->hashcode)
600 return false;
602 /* The VOP needs to be the same. */
603 if (vr1->vuse != vr2->vuse)
604 return false;
606 /* If the operands are the same we are done. */
607 if (vr1->operands == vr2->operands)
608 return true;
610 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
611 return false;
613 if (INTEGRAL_TYPE_P (vr1->type)
614 && INTEGRAL_TYPE_P (vr2->type))
616 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
617 return false;
619 else if (INTEGRAL_TYPE_P (vr1->type)
620 && (TYPE_PRECISION (vr1->type)
621 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
622 return false;
623 else if (INTEGRAL_TYPE_P (vr2->type)
624 && (TYPE_PRECISION (vr2->type)
625 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
626 return false;
628 i = 0;
629 j = 0;
632 HOST_WIDE_INT off1 = 0, off2 = 0;
633 vn_reference_op_t vro1, vro2;
634 vn_reference_op_s tem1, tem2;
635 bool deref1 = false, deref2 = false;
636 for (; vr1->operands.iterate (i, &vro1); i++)
638 if (vro1->opcode == MEM_REF)
639 deref1 = true;
640 /* Do not look through a storage order barrier. */
641 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
642 return false;
643 if (vro1->off == -1)
644 break;
645 off1 += vro1->off;
647 for (; vr2->operands.iterate (j, &vro2); j++)
649 if (vro2->opcode == MEM_REF)
650 deref2 = true;
651 /* Do not look through a storage order barrier. */
652 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
653 return false;
654 if (vro2->off == -1)
655 break;
656 off2 += vro2->off;
658 if (off1 != off2)
659 return false;
660 if (deref1 && vro1->opcode == ADDR_EXPR)
662 memset (&tem1, 0, sizeof (tem1));
663 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
664 tem1.type = TREE_TYPE (tem1.op0);
665 tem1.opcode = TREE_CODE (tem1.op0);
666 vro1 = &tem1;
667 deref1 = false;
669 if (deref2 && vro2->opcode == ADDR_EXPR)
671 memset (&tem2, 0, sizeof (tem2));
672 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
673 tem2.type = TREE_TYPE (tem2.op0);
674 tem2.opcode = TREE_CODE (tem2.op0);
675 vro2 = &tem2;
676 deref2 = false;
678 if (deref1 != deref2)
679 return false;
680 if (!vn_reference_op_eq (vro1, vro2))
681 return false;
682 ++j;
683 ++i;
685 while (vr1->operands.length () != i
686 || vr2->operands.length () != j);
688 return true;
691 /* Copy the operations present in load/store REF into RESULT, a vector of
692 vn_reference_op_s's. */
694 static void
695 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
697 if (TREE_CODE (ref) == TARGET_MEM_REF)
699 vn_reference_op_s temp;
701 result->reserve (3);
703 memset (&temp, 0, sizeof (temp));
704 temp.type = TREE_TYPE (ref);
705 temp.opcode = TREE_CODE (ref);
706 temp.op0 = TMR_INDEX (ref);
707 temp.op1 = TMR_STEP (ref);
708 temp.op2 = TMR_OFFSET (ref);
709 temp.off = -1;
710 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
711 temp.base = MR_DEPENDENCE_BASE (ref);
712 result->quick_push (temp);
714 memset (&temp, 0, sizeof (temp));
715 temp.type = NULL_TREE;
716 temp.opcode = ERROR_MARK;
717 temp.op0 = TMR_INDEX2 (ref);
718 temp.off = -1;
719 result->quick_push (temp);
721 memset (&temp, 0, sizeof (temp));
722 temp.type = NULL_TREE;
723 temp.opcode = TREE_CODE (TMR_BASE (ref));
724 temp.op0 = TMR_BASE (ref);
725 temp.off = -1;
726 result->quick_push (temp);
727 return;
730 /* For non-calls, store the information that makes up the address. */
731 tree orig = ref;
732 while (ref)
734 vn_reference_op_s temp;
736 memset (&temp, 0, sizeof (temp));
737 temp.type = TREE_TYPE (ref);
738 temp.opcode = TREE_CODE (ref);
739 temp.off = -1;
741 switch (temp.opcode)
743 case MODIFY_EXPR:
744 temp.op0 = TREE_OPERAND (ref, 1);
745 break;
746 case WITH_SIZE_EXPR:
747 temp.op0 = TREE_OPERAND (ref, 1);
748 temp.off = 0;
749 break;
750 case MEM_REF:
751 /* The base address gets its own vn_reference_op_s structure. */
752 temp.op0 = TREE_OPERAND (ref, 1);
754 offset_int off = mem_ref_offset (ref);
755 if (wi::fits_shwi_p (off))
756 temp.off = off.to_shwi ();
758 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
759 temp.base = MR_DEPENDENCE_BASE (ref);
760 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
761 break;
762 case BIT_FIELD_REF:
763 /* Record bits, position and storage order. */
764 temp.op0 = TREE_OPERAND (ref, 1);
765 temp.op1 = TREE_OPERAND (ref, 2);
766 if (tree_fits_shwi_p (TREE_OPERAND (ref, 2)))
768 HOST_WIDE_INT off = tree_to_shwi (TREE_OPERAND (ref, 2));
769 if (off % BITS_PER_UNIT == 0)
770 temp.off = off / BITS_PER_UNIT;
772 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
773 break;
774 case COMPONENT_REF:
775 /* The field decl is enough to unambiguously specify the field,
776 a matching type is not necessary and a mismatching type
777 is always a spurious difference. */
778 temp.type = NULL_TREE;
779 temp.op0 = TREE_OPERAND (ref, 1);
780 temp.op1 = TREE_OPERAND (ref, 2);
782 tree this_offset = component_ref_field_offset (ref);
783 if (this_offset
784 && TREE_CODE (this_offset) == INTEGER_CST)
786 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
787 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
789 offset_int off
790 = (wi::to_offset (this_offset)
791 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
792 if (wi::fits_shwi_p (off)
793 /* Probibit value-numbering zero offset components
794 of addresses the same before the pass folding
795 __builtin_object_size had a chance to run
796 (checking cfun->after_inlining does the
797 trick here). */
798 && (TREE_CODE (orig) != ADDR_EXPR
799 || off != 0
800 || cfun->after_inlining))
801 temp.off = off.to_shwi ();
805 break;
806 case ARRAY_RANGE_REF:
807 case ARRAY_REF:
809 tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
810 /* Record index as operand. */
811 temp.op0 = TREE_OPERAND (ref, 1);
812 /* Always record lower bounds and element size. */
813 temp.op1 = array_ref_low_bound (ref);
814 /* But record element size in units of the type alignment. */
815 temp.op2 = TREE_OPERAND (ref, 3);
816 temp.align = eltype->type_common.align;
817 if (! temp.op2)
818 temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
819 size_int (TYPE_ALIGN_UNIT (eltype)));
820 if (TREE_CODE (temp.op0) == INTEGER_CST
821 && TREE_CODE (temp.op1) == INTEGER_CST
822 && TREE_CODE (temp.op2) == INTEGER_CST)
824 offset_int off = ((wi::to_offset (temp.op0)
825 - wi::to_offset (temp.op1))
826 * wi::to_offset (temp.op2)
827 * vn_ref_op_align_unit (&temp));
828 if (wi::fits_shwi_p (off))
829 temp.off = off.to_shwi();
832 break;
833 case VAR_DECL:
834 if (DECL_HARD_REGISTER (ref))
836 temp.op0 = ref;
837 break;
839 /* Fallthru. */
840 case PARM_DECL:
841 case CONST_DECL:
842 case RESULT_DECL:
843 /* Canonicalize decls to MEM[&decl] which is what we end up with
844 when valueizing MEM[ptr] with ptr = &decl. */
845 temp.opcode = MEM_REF;
846 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
847 temp.off = 0;
848 result->safe_push (temp);
849 temp.opcode = ADDR_EXPR;
850 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
851 temp.type = TREE_TYPE (temp.op0);
852 temp.off = -1;
853 break;
854 case STRING_CST:
855 case INTEGER_CST:
856 case COMPLEX_CST:
857 case VECTOR_CST:
858 case REAL_CST:
859 case FIXED_CST:
860 case CONSTRUCTOR:
861 case SSA_NAME:
862 temp.op0 = ref;
863 break;
864 case ADDR_EXPR:
865 if (is_gimple_min_invariant (ref))
867 temp.op0 = ref;
868 break;
870 break;
871 /* These are only interesting for their operands, their
872 existence, and their type. They will never be the last
873 ref in the chain of references (IE they require an
874 operand), so we don't have to put anything
875 for op* as it will be handled by the iteration */
876 case REALPART_EXPR:
877 temp.off = 0;
878 break;
879 case VIEW_CONVERT_EXPR:
880 temp.off = 0;
881 temp.reverse = storage_order_barrier_p (ref);
882 break;
883 case IMAGPART_EXPR:
884 /* This is only interesting for its constant offset. */
885 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
886 break;
887 default:
888 gcc_unreachable ();
890 result->safe_push (temp);
892 if (REFERENCE_CLASS_P (ref)
893 || TREE_CODE (ref) == MODIFY_EXPR
894 || TREE_CODE (ref) == WITH_SIZE_EXPR
895 || (TREE_CODE (ref) == ADDR_EXPR
896 && !is_gimple_min_invariant (ref)))
897 ref = TREE_OPERAND (ref, 0);
898 else
899 ref = NULL_TREE;
903 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
904 operands in *OPS, the reference alias set SET and the reference type TYPE.
905 Return true if something useful was produced. */
907 bool
908 ao_ref_init_from_vn_reference (ao_ref *ref,
909 alias_set_type set, tree type,
910 vec<vn_reference_op_s> ops)
912 vn_reference_op_t op;
913 unsigned i;
914 tree base = NULL_TREE;
915 tree *op0_p = &base;
916 offset_int offset = 0;
917 offset_int max_size;
918 offset_int size = -1;
919 tree size_tree = NULL_TREE;
920 alias_set_type base_alias_set = -1;
922 /* First get the final access size from just the outermost expression. */
923 op = &ops[0];
924 if (op->opcode == COMPONENT_REF)
925 size_tree = DECL_SIZE (op->op0);
926 else if (op->opcode == BIT_FIELD_REF)
927 size_tree = op->op0;
928 else
930 machine_mode mode = TYPE_MODE (type);
931 if (mode == BLKmode)
932 size_tree = TYPE_SIZE (type);
933 else
934 size = int (GET_MODE_BITSIZE (mode));
936 if (size_tree != NULL_TREE
937 && TREE_CODE (size_tree) == INTEGER_CST)
938 size = wi::to_offset (size_tree);
940 /* Initially, maxsize is the same as the accessed element size.
941 In the following it will only grow (or become -1). */
942 max_size = size;
944 /* Compute cumulative bit-offset for nested component-refs and array-refs,
945 and find the ultimate containing object. */
946 FOR_EACH_VEC_ELT (ops, i, op)
948 switch (op->opcode)
950 /* These may be in the reference ops, but we cannot do anything
951 sensible with them here. */
952 case ADDR_EXPR:
953 /* Apart from ADDR_EXPR arguments to MEM_REF. */
954 if (base != NULL_TREE
955 && TREE_CODE (base) == MEM_REF
956 && op->op0
957 && DECL_P (TREE_OPERAND (op->op0, 0)))
959 vn_reference_op_t pop = &ops[i-1];
960 base = TREE_OPERAND (op->op0, 0);
961 if (pop->off == -1)
963 max_size = -1;
964 offset = 0;
966 else
967 offset += pop->off * BITS_PER_UNIT;
968 op0_p = NULL;
969 break;
971 /* Fallthru. */
972 case CALL_EXPR:
973 return false;
975 /* Record the base objects. */
976 case MEM_REF:
977 base_alias_set = get_deref_alias_set (op->op0);
978 *op0_p = build2 (MEM_REF, op->type,
979 NULL_TREE, op->op0);
980 MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
981 MR_DEPENDENCE_BASE (*op0_p) = op->base;
982 op0_p = &TREE_OPERAND (*op0_p, 0);
983 break;
985 case VAR_DECL:
986 case PARM_DECL:
987 case RESULT_DECL:
988 case SSA_NAME:
989 *op0_p = op->op0;
990 op0_p = NULL;
991 break;
993 /* And now the usual component-reference style ops. */
994 case BIT_FIELD_REF:
995 offset += wi::to_offset (op->op1);
996 break;
998 case COMPONENT_REF:
1000 tree field = op->op0;
1001 /* We do not have a complete COMPONENT_REF tree here so we
1002 cannot use component_ref_field_offset. Do the interesting
1003 parts manually. */
1004 tree this_offset = DECL_FIELD_OFFSET (field);
1006 if (op->op1 || TREE_CODE (this_offset) != INTEGER_CST)
1007 max_size = -1;
1008 else
1010 offset_int woffset = (wi::to_offset (this_offset)
1011 << LOG2_BITS_PER_UNIT);
1012 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1013 offset += woffset;
1015 break;
1018 case ARRAY_RANGE_REF:
1019 case ARRAY_REF:
1020 /* We recorded the lower bound and the element size. */
1021 if (TREE_CODE (op->op0) != INTEGER_CST
1022 || TREE_CODE (op->op1) != INTEGER_CST
1023 || TREE_CODE (op->op2) != INTEGER_CST)
1024 max_size = -1;
1025 else
1027 offset_int woffset
1028 = wi::sext (wi::to_offset (op->op0) - wi::to_offset (op->op1),
1029 TYPE_PRECISION (TREE_TYPE (op->op0)));
1030 woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
1031 woffset <<= LOG2_BITS_PER_UNIT;
1032 offset += woffset;
1034 break;
1036 case REALPART_EXPR:
1037 break;
1039 case IMAGPART_EXPR:
1040 offset += size;
1041 break;
1043 case VIEW_CONVERT_EXPR:
1044 break;
1046 case STRING_CST:
1047 case INTEGER_CST:
1048 case COMPLEX_CST:
1049 case VECTOR_CST:
1050 case REAL_CST:
1051 case CONSTRUCTOR:
1052 case CONST_DECL:
1053 return false;
1055 default:
1056 return false;
1060 if (base == NULL_TREE)
1061 return false;
1063 ref->ref = NULL_TREE;
1064 ref->base = base;
1065 ref->ref_alias_set = set;
1066 if (base_alias_set != -1)
1067 ref->base_alias_set = base_alias_set;
1068 else
1069 ref->base_alias_set = get_alias_set (base);
1070 /* We discount volatiles from value-numbering elsewhere. */
1071 ref->volatile_p = false;
1073 if (!wi::fits_shwi_p (size) || wi::neg_p (size))
1075 ref->offset = 0;
1076 ref->size = -1;
1077 ref->max_size = -1;
1078 return true;
1081 ref->size = size.to_shwi ();
1083 if (!wi::fits_shwi_p (offset))
1085 ref->offset = 0;
1086 ref->max_size = -1;
1087 return true;
1090 ref->offset = offset.to_shwi ();
1092 if (!wi::fits_shwi_p (max_size) || wi::neg_p (max_size))
1093 ref->max_size = -1;
1094 else
1095 ref->max_size = max_size.to_shwi ();
1097 return true;
1100 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1101 vn_reference_op_s's. */
1103 static void
1104 copy_reference_ops_from_call (gcall *call,
1105 vec<vn_reference_op_s> *result)
1107 vn_reference_op_s temp;
1108 unsigned i;
1109 tree lhs = gimple_call_lhs (call);
1110 int lr;
1112 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1113 different. By adding the lhs here in the vector, we ensure that the
1114 hashcode is different, guaranteeing a different value number. */
1115 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1117 memset (&temp, 0, sizeof (temp));
1118 temp.opcode = MODIFY_EXPR;
1119 temp.type = TREE_TYPE (lhs);
1120 temp.op0 = lhs;
1121 temp.off = -1;
1122 result->safe_push (temp);
1125 /* Copy the type, opcode, function, static chain and EH region, if any. */
1126 memset (&temp, 0, sizeof (temp));
1127 temp.type = gimple_call_return_type (call);
1128 temp.opcode = CALL_EXPR;
1129 temp.op0 = gimple_call_fn (call);
1130 temp.op1 = gimple_call_chain (call);
1131 if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1132 temp.op2 = size_int (lr);
1133 temp.off = -1;
1134 if (gimple_call_with_bounds_p (call))
1135 temp.with_bounds = 1;
1136 result->safe_push (temp);
1138 /* Copy the call arguments. As they can be references as well,
1139 just chain them together. */
1140 for (i = 0; i < gimple_call_num_args (call); ++i)
1142 tree callarg = gimple_call_arg (call, i);
1143 copy_reference_ops_from_ref (callarg, result);
1147 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1148 *I_P to point to the last element of the replacement. */
1149 static bool
1150 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1151 unsigned int *i_p)
1153 unsigned int i = *i_p;
1154 vn_reference_op_t op = &(*ops)[i];
1155 vn_reference_op_t mem_op = &(*ops)[i - 1];
1156 tree addr_base;
1157 HOST_WIDE_INT addr_offset = 0;
1159 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1160 from .foo.bar to the preceding MEM_REF offset and replace the
1161 address with &OBJ. */
1162 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1163 &addr_offset);
1164 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1165 if (addr_base != TREE_OPERAND (op->op0, 0))
1167 offset_int off = offset_int::from (mem_op->op0, SIGNED);
1168 off += addr_offset;
1169 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1170 op->op0 = build_fold_addr_expr (addr_base);
1171 if (tree_fits_shwi_p (mem_op->op0))
1172 mem_op->off = tree_to_shwi (mem_op->op0);
1173 else
1174 mem_op->off = -1;
1175 return true;
1177 return false;
1180 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1181 *I_P to point to the last element of the replacement. */
1182 static bool
1183 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1184 unsigned int *i_p)
1186 unsigned int i = *i_p;
1187 vn_reference_op_t op = &(*ops)[i];
1188 vn_reference_op_t mem_op = &(*ops)[i - 1];
1189 gimple *def_stmt;
1190 enum tree_code code;
1191 offset_int off;
1193 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1194 if (!is_gimple_assign (def_stmt))
1195 return false;
1197 code = gimple_assign_rhs_code (def_stmt);
1198 if (code != ADDR_EXPR
1199 && code != POINTER_PLUS_EXPR)
1200 return false;
1202 off = offset_int::from (mem_op->op0, SIGNED);
1204 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1205 from .foo.bar to the preceding MEM_REF offset and replace the
1206 address with &OBJ. */
1207 if (code == ADDR_EXPR)
1209 tree addr, addr_base;
1210 HOST_WIDE_INT addr_offset;
1212 addr = gimple_assign_rhs1 (def_stmt);
1213 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1214 &addr_offset);
1215 /* If that didn't work because the address isn't invariant propagate
1216 the reference tree from the address operation in case the current
1217 dereference isn't offsetted. */
1218 if (!addr_base
1219 && *i_p == ops->length () - 1
1220 && off == 0
1221 /* This makes us disable this transform for PRE where the
1222 reference ops might be also used for code insertion which
1223 is invalid. */
1224 && default_vn_walk_kind == VN_WALKREWRITE)
1226 auto_vec<vn_reference_op_s, 32> tem;
1227 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1228 /* Make sure to preserve TBAA info. The only objects not
1229 wrapped in MEM_REFs that can have their address taken are
1230 STRING_CSTs. */
1231 if (tem.length () >= 2
1232 && tem[tem.length () - 2].opcode == MEM_REF)
1234 vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1235 new_mem_op->op0 = fold_convert (TREE_TYPE (mem_op->op0),
1236 new_mem_op->op0);
1238 else
1239 gcc_assert (tem.last ().opcode == STRING_CST);
1240 ops->pop ();
1241 ops->pop ();
1242 ops->safe_splice (tem);
1243 --*i_p;
1244 return true;
1246 if (!addr_base
1247 || TREE_CODE (addr_base) != MEM_REF)
1248 return false;
1250 off += addr_offset;
1251 off += mem_ref_offset (addr_base);
1252 op->op0 = TREE_OPERAND (addr_base, 0);
1254 else
1256 tree ptr, ptroff;
1257 ptr = gimple_assign_rhs1 (def_stmt);
1258 ptroff = gimple_assign_rhs2 (def_stmt);
1259 if (TREE_CODE (ptr) != SSA_NAME
1260 || TREE_CODE (ptroff) != INTEGER_CST)
1261 return false;
1263 off += wi::to_offset (ptroff);
1264 op->op0 = ptr;
1267 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1268 if (tree_fits_shwi_p (mem_op->op0))
1269 mem_op->off = tree_to_shwi (mem_op->op0);
1270 else
1271 mem_op->off = -1;
1272 if (TREE_CODE (op->op0) == SSA_NAME)
1273 op->op0 = SSA_VAL (op->op0);
1274 if (TREE_CODE (op->op0) != SSA_NAME)
1275 op->opcode = TREE_CODE (op->op0);
1277 /* And recurse. */
1278 if (TREE_CODE (op->op0) == SSA_NAME)
1279 vn_reference_maybe_forwprop_address (ops, i_p);
1280 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1281 vn_reference_fold_indirect (ops, i_p);
1282 return true;
1285 /* Optimize the reference REF to a constant if possible or return
1286 NULL_TREE if not. */
1288 tree
1289 fully_constant_vn_reference_p (vn_reference_t ref)
1291 vec<vn_reference_op_s> operands = ref->operands;
1292 vn_reference_op_t op;
1294 /* Try to simplify the translated expression if it is
1295 a call to a builtin function with at most two arguments. */
1296 op = &operands[0];
1297 if (op->opcode == CALL_EXPR
1298 && TREE_CODE (op->op0) == ADDR_EXPR
1299 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1300 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1301 && operands.length () >= 2
1302 && operands.length () <= 3)
1304 vn_reference_op_t arg0, arg1 = NULL;
1305 bool anyconst = false;
1306 arg0 = &operands[1];
1307 if (operands.length () > 2)
1308 arg1 = &operands[2];
1309 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1310 || (arg0->opcode == ADDR_EXPR
1311 && is_gimple_min_invariant (arg0->op0)))
1312 anyconst = true;
1313 if (arg1
1314 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1315 || (arg1->opcode == ADDR_EXPR
1316 && is_gimple_min_invariant (arg1->op0))))
1317 anyconst = true;
1318 if (anyconst)
1320 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1321 arg1 ? 2 : 1,
1322 arg0->op0,
1323 arg1 ? arg1->op0 : NULL);
1324 if (folded
1325 && TREE_CODE (folded) == NOP_EXPR)
1326 folded = TREE_OPERAND (folded, 0);
1327 if (folded
1328 && is_gimple_min_invariant (folded))
1329 return folded;
1333 /* Simplify reads from constants or constant initializers. */
1334 else if (BITS_PER_UNIT == 8
1335 && is_gimple_reg_type (ref->type)
1336 && (!INTEGRAL_TYPE_P (ref->type)
1337 || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
1339 HOST_WIDE_INT off = 0;
1340 HOST_WIDE_INT size;
1341 if (INTEGRAL_TYPE_P (ref->type))
1342 size = TYPE_PRECISION (ref->type);
1343 else
1344 size = tree_to_shwi (TYPE_SIZE (ref->type));
1345 if (size % BITS_PER_UNIT != 0
1346 || size > MAX_BITSIZE_MODE_ANY_MODE)
1347 return NULL_TREE;
1348 size /= BITS_PER_UNIT;
1349 unsigned i;
1350 for (i = 0; i < operands.length (); ++i)
1352 if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1354 ++i;
1355 break;
1357 if (operands[i].off == -1)
1358 return NULL_TREE;
1359 off += operands[i].off;
1360 if (operands[i].opcode == MEM_REF)
1362 ++i;
1363 break;
1366 vn_reference_op_t base = &operands[--i];
1367 tree ctor = error_mark_node;
1368 tree decl = NULL_TREE;
1369 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1370 ctor = base->op0;
1371 else if (base->opcode == MEM_REF
1372 && base[1].opcode == ADDR_EXPR
1373 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1374 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL))
1376 decl = TREE_OPERAND (base[1].op0, 0);
1377 ctor = ctor_for_folding (decl);
1379 if (ctor == NULL_TREE)
1380 return build_zero_cst (ref->type);
1381 else if (ctor != error_mark_node)
1383 if (decl)
1385 tree res = fold_ctor_reference (ref->type, ctor,
1386 off * BITS_PER_UNIT,
1387 size * BITS_PER_UNIT, decl);
1388 if (res)
1390 STRIP_USELESS_TYPE_CONVERSION (res);
1391 if (is_gimple_min_invariant (res))
1392 return res;
1395 else
1397 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1398 int len = native_encode_expr (ctor, buf, size, off);
1399 if (len > 0)
1400 return native_interpret_expr (ref->type, buf, len);
1405 return NULL_TREE;
1408 /* Return true if OPS contain a storage order barrier. */
1410 static bool
1411 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1413 vn_reference_op_t op;
1414 unsigned i;
1416 FOR_EACH_VEC_ELT (ops, i, op)
1417 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1418 return true;
1420 return false;
1423 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1424 structures into their value numbers. This is done in-place, and
1425 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1426 whether any operands were valueized. */
1428 static vec<vn_reference_op_s>
1429 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1431 vn_reference_op_t vro;
1432 unsigned int i;
1434 *valueized_anything = false;
1436 FOR_EACH_VEC_ELT (orig, i, vro)
1438 if (vro->opcode == SSA_NAME
1439 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1441 tree tem = SSA_VAL (vro->op0);
1442 if (tem != vro->op0)
1444 *valueized_anything = true;
1445 vro->op0 = tem;
1447 /* If it transforms from an SSA_NAME to a constant, update
1448 the opcode. */
1449 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1450 vro->opcode = TREE_CODE (vro->op0);
1452 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1454 tree tem = SSA_VAL (vro->op1);
1455 if (tem != vro->op1)
1457 *valueized_anything = true;
1458 vro->op1 = tem;
1461 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1463 tree tem = SSA_VAL (vro->op2);
1464 if (tem != vro->op2)
1466 *valueized_anything = true;
1467 vro->op2 = tem;
1470 /* If it transforms from an SSA_NAME to an address, fold with
1471 a preceding indirect reference. */
1472 if (i > 0
1473 && vro->op0
1474 && TREE_CODE (vro->op0) == ADDR_EXPR
1475 && orig[i - 1].opcode == MEM_REF)
1477 if (vn_reference_fold_indirect (&orig, &i))
1478 *valueized_anything = true;
1480 else if (i > 0
1481 && vro->opcode == SSA_NAME
1482 && orig[i - 1].opcode == MEM_REF)
1484 if (vn_reference_maybe_forwprop_address (&orig, &i))
1485 *valueized_anything = true;
1487 /* If it transforms a non-constant ARRAY_REF into a constant
1488 one, adjust the constant offset. */
1489 else if (vro->opcode == ARRAY_REF
1490 && vro->off == -1
1491 && TREE_CODE (vro->op0) == INTEGER_CST
1492 && TREE_CODE (vro->op1) == INTEGER_CST
1493 && TREE_CODE (vro->op2) == INTEGER_CST)
1495 offset_int off = ((wi::to_offset (vro->op0)
1496 - wi::to_offset (vro->op1))
1497 * wi::to_offset (vro->op2)
1498 * vn_ref_op_align_unit (vro));
1499 if (wi::fits_shwi_p (off))
1500 vro->off = off.to_shwi ();
1504 return orig;
1507 static vec<vn_reference_op_s>
1508 valueize_refs (vec<vn_reference_op_s> orig)
1510 bool tem;
1511 return valueize_refs_1 (orig, &tem);
1514 static vec<vn_reference_op_s> shared_lookup_references;
1516 /* Create a vector of vn_reference_op_s structures from REF, a
1517 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1518 this function. *VALUEIZED_ANYTHING will specify whether any
1519 operands were valueized. */
1521 static vec<vn_reference_op_s>
1522 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1524 if (!ref)
1525 return vNULL;
1526 shared_lookup_references.truncate (0);
1527 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1528 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1529 valueized_anything);
1530 return shared_lookup_references;
1533 /* Create a vector of vn_reference_op_s structures from CALL, a
1534 call statement. The vector is shared among all callers of
1535 this function. */
1537 static vec<vn_reference_op_s>
1538 valueize_shared_reference_ops_from_call (gcall *call)
1540 if (!call)
1541 return vNULL;
1542 shared_lookup_references.truncate (0);
1543 copy_reference_ops_from_call (call, &shared_lookup_references);
1544 shared_lookup_references = valueize_refs (shared_lookup_references);
1545 return shared_lookup_references;
1548 /* Lookup a SCCVN reference operation VR in the current hash table.
1549 Returns the resulting value number if it exists in the hash table,
1550 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1551 vn_reference_t stored in the hashtable if something is found. */
1553 static tree
1554 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1556 vn_reference_s **slot;
1557 hashval_t hash;
1559 hash = vr->hashcode;
1560 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1561 if (!slot && current_info == optimistic_info)
1562 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1563 if (slot)
1565 if (vnresult)
1566 *vnresult = (vn_reference_t)*slot;
1567 return ((vn_reference_t)*slot)->result;
1570 return NULL_TREE;
1573 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1574 with the current VUSE and performs the expression lookup. */
1576 static void *
1577 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1578 unsigned int cnt, void *vr_)
1580 vn_reference_t vr = (vn_reference_t)vr_;
1581 vn_reference_s **slot;
1582 hashval_t hash;
1584 /* This bounds the stmt walks we perform on reference lookups
1585 to O(1) instead of O(N) where N is the number of dominating
1586 stores. */
1587 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1588 return (void *)-1;
1590 if (last_vuse_ptr)
1591 *last_vuse_ptr = vuse;
1593 /* Fixup vuse and hash. */
1594 if (vr->vuse)
1595 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1596 vr->vuse = vuse_ssa_val (vuse);
1597 if (vr->vuse)
1598 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1600 hash = vr->hashcode;
1601 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1602 if (!slot && current_info == optimistic_info)
1603 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1604 if (slot)
1605 return *slot;
1607 return NULL;
1610 /* Lookup an existing or insert a new vn_reference entry into the
1611 value table for the VUSE, SET, TYPE, OPERANDS reference which
1612 has the value VALUE which is either a constant or an SSA name. */
1614 static vn_reference_t
1615 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1616 alias_set_type set,
1617 tree type,
1618 vec<vn_reference_op_s,
1619 va_heap> operands,
1620 tree value)
1622 vn_reference_s vr1;
1623 vn_reference_t result;
1624 unsigned value_id;
1625 vr1.vuse = vuse;
1626 vr1.operands = operands;
1627 vr1.type = type;
1628 vr1.set = set;
1629 vr1.hashcode = vn_reference_compute_hash (&vr1);
1630 if (vn_reference_lookup_1 (&vr1, &result))
1631 return result;
1632 if (TREE_CODE (value) == SSA_NAME)
1633 value_id = VN_INFO (value)->value_id;
1634 else
1635 value_id = get_or_alloc_constant_value_id (value);
1636 return vn_reference_insert_pieces (vuse, set, type,
1637 operands.copy (), value, value_id);
1640 static vn_nary_op_t vn_nary_op_insert_stmt (gimple *stmt, tree result);
1642 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
1644 static tree
1645 vn_lookup_simplify_result (code_helper rcode, tree type, tree *ops)
1647 if (!rcode.is_tree_code ())
1648 return NULL_TREE;
1649 vn_nary_op_t vnresult = NULL;
1650 return vn_nary_op_lookup_pieces (TREE_CODE_LENGTH ((tree_code) rcode),
1651 (tree_code) rcode, type, ops, &vnresult);
1654 /* Return a value-number for RCODE OPS... either by looking up an existing
1655 value-number for the simplified result or by inserting the operation if
1656 INSERT is true. */
1658 static tree
1659 vn_nary_build_or_lookup_1 (code_helper rcode, tree type, tree *ops,
1660 bool insert)
1662 tree result = NULL_TREE;
1663 /* We will be creating a value number for
1664 RCODE (OPS...).
1665 So first simplify and lookup this expression to see if it
1666 is already available. */
1667 mprts_hook = vn_lookup_simplify_result;
1668 bool res = false;
1669 switch (TREE_CODE_LENGTH ((tree_code) rcode))
1671 case 1:
1672 res = gimple_resimplify1 (NULL, &rcode, type, ops, vn_valueize);
1673 break;
1674 case 2:
1675 res = gimple_resimplify2 (NULL, &rcode, type, ops, vn_valueize);
1676 break;
1677 case 3:
1678 res = gimple_resimplify3 (NULL, &rcode, type, ops, vn_valueize);
1679 break;
1681 mprts_hook = NULL;
1682 gimple *new_stmt = NULL;
1683 if (res
1684 && gimple_simplified_result_is_gimple_val (rcode, ops))
1685 /* The expression is already available. */
1686 result = ops[0];
1687 else
1689 tree val = vn_lookup_simplify_result (rcode, type, ops);
1690 if (!val && insert)
1692 gimple_seq stmts = NULL;
1693 result = maybe_push_res_to_seq (rcode, type, ops, &stmts);
1694 if (result)
1696 gcc_assert (gimple_seq_singleton_p (stmts));
1697 new_stmt = gimple_seq_first_stmt (stmts);
1700 else
1701 /* The expression is already available. */
1702 result = val;
1704 if (new_stmt)
1706 /* The expression is not yet available, value-number lhs to
1707 the new SSA_NAME we created. */
1708 /* Initialize value-number information properly. */
1709 VN_INFO_GET (result)->valnum = result;
1710 VN_INFO (result)->value_id = get_next_value_id ();
1711 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
1712 new_stmt);
1713 VN_INFO (result)->needs_insertion = true;
1714 /* ??? PRE phi-translation inserts NARYs without corresponding
1715 SSA name result. Re-use those but set their result according
1716 to the stmt we just built. */
1717 vn_nary_op_t nary = NULL;
1718 vn_nary_op_lookup_stmt (new_stmt, &nary);
1719 if (nary)
1721 gcc_assert (nary->result == NULL_TREE);
1722 nary->result = gimple_assign_lhs (new_stmt);
1724 /* As all "inserted" statements are singleton SCCs, insert
1725 to the valid table. This is strictly needed to
1726 avoid re-generating new value SSA_NAMEs for the same
1727 expression during SCC iteration over and over (the
1728 optimistic table gets cleared after each iteration).
1729 We do not need to insert into the optimistic table, as
1730 lookups there will fall back to the valid table. */
1731 else if (current_info == optimistic_info)
1733 current_info = valid_info;
1734 vn_nary_op_insert_stmt (new_stmt, result);
1735 current_info = optimistic_info;
1737 else
1738 vn_nary_op_insert_stmt (new_stmt, result);
1739 if (dump_file && (dump_flags & TDF_DETAILS))
1741 fprintf (dump_file, "Inserting name ");
1742 print_generic_expr (dump_file, result, 0);
1743 fprintf (dump_file, " for expression ");
1744 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
1745 fprintf (dump_file, "\n");
1748 return result;
1751 /* Return a value-number for RCODE OPS... either by looking up an existing
1752 value-number for the simplified result or by inserting the operation. */
1754 static tree
1755 vn_nary_build_or_lookup (code_helper rcode, tree type, tree *ops)
1757 return vn_nary_build_or_lookup_1 (rcode, type, ops, true);
1760 /* Try to simplify the expression RCODE OPS... of type TYPE and return
1761 its value if present. */
1763 tree
1764 vn_nary_simplify (vn_nary_op_t nary)
1766 if (nary->length > 3)
1767 return NULL_TREE;
1768 tree ops[3];
1769 memcpy (ops, nary->op, sizeof (tree) * nary->length);
1770 return vn_nary_build_or_lookup_1 (nary->opcode, nary->type, ops, false);
1774 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1775 from the statement defining VUSE and if not successful tries to
1776 translate *REFP and VR_ through an aggregate copy at the definition
1777 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
1778 of *REF and *VR. If only disambiguation was performed then
1779 *DISAMBIGUATE_ONLY is set to true. */
1781 static void *
1782 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1783 bool *disambiguate_only)
1785 vn_reference_t vr = (vn_reference_t)vr_;
1786 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
1787 tree base = ao_ref_base (ref);
1788 HOST_WIDE_INT offset, maxsize;
1789 static vec<vn_reference_op_s>
1790 lhs_ops = vNULL;
1791 ao_ref lhs_ref;
1792 bool lhs_ref_ok = false;
1794 /* If the reference is based on a parameter that was determined as
1795 pointing to readonly memory it doesn't change. */
1796 if (TREE_CODE (base) == MEM_REF
1797 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
1798 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
1799 && bitmap_bit_p (const_parms,
1800 SSA_NAME_VERSION (TREE_OPERAND (base, 0))))
1802 *disambiguate_only = true;
1803 return NULL;
1806 /* First try to disambiguate after value-replacing in the definitions LHS. */
1807 if (is_gimple_assign (def_stmt))
1809 tree lhs = gimple_assign_lhs (def_stmt);
1810 bool valueized_anything = false;
1811 /* Avoid re-allocation overhead. */
1812 lhs_ops.truncate (0);
1813 copy_reference_ops_from_ref (lhs, &lhs_ops);
1814 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1815 if (valueized_anything)
1817 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1818 get_alias_set (lhs),
1819 TREE_TYPE (lhs), lhs_ops);
1820 if (lhs_ref_ok
1821 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1823 *disambiguate_only = true;
1824 return NULL;
1827 else
1829 ao_ref_init (&lhs_ref, lhs);
1830 lhs_ref_ok = true;
1833 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1834 && gimple_call_num_args (def_stmt) <= 4)
1836 /* For builtin calls valueize its arguments and call the
1837 alias oracle again. Valueization may improve points-to
1838 info of pointers and constify size and position arguments.
1839 Originally this was motivated by PR61034 which has
1840 conditional calls to free falsely clobbering ref because
1841 of imprecise points-to info of the argument. */
1842 tree oldargs[4];
1843 bool valueized_anything = false;
1844 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1846 oldargs[i] = gimple_call_arg (def_stmt, i);
1847 if (TREE_CODE (oldargs[i]) == SSA_NAME
1848 && VN_INFO (oldargs[i])->valnum != oldargs[i])
1850 gimple_call_set_arg (def_stmt, i, VN_INFO (oldargs[i])->valnum);
1851 valueized_anything = true;
1854 if (valueized_anything)
1856 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1857 ref);
1858 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1859 gimple_call_set_arg (def_stmt, i, oldargs[i]);
1860 if (!res)
1862 *disambiguate_only = true;
1863 return NULL;
1868 if (*disambiguate_only)
1869 return (void *)-1;
1871 offset = ref->offset;
1872 maxsize = ref->max_size;
1874 /* If we cannot constrain the size of the reference we cannot
1875 test if anything kills it. */
1876 if (maxsize == -1)
1877 return (void *)-1;
1879 /* We can't deduce anything useful from clobbers. */
1880 if (gimple_clobber_p (def_stmt))
1881 return (void *)-1;
1883 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1884 from that definition.
1885 1) Memset. */
1886 if (is_gimple_reg_type (vr->type)
1887 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1888 && integer_zerop (gimple_call_arg (def_stmt, 1))
1889 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2))
1890 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1892 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1893 tree base2;
1894 HOST_WIDE_INT offset2, size2, maxsize2;
1895 bool reverse;
1896 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
1897 &reverse);
1898 size2 = tree_to_uhwi (gimple_call_arg (def_stmt, 2)) * 8;
1899 if ((unsigned HOST_WIDE_INT)size2 / 8
1900 == tree_to_uhwi (gimple_call_arg (def_stmt, 2))
1901 && maxsize2 != -1
1902 && operand_equal_p (base, base2, 0)
1903 && offset2 <= offset
1904 && offset2 + size2 >= offset + maxsize)
1906 tree val = build_zero_cst (vr->type);
1907 return vn_reference_lookup_or_insert_for_pieces
1908 (vuse, vr->set, vr->type, vr->operands, val);
1912 /* 2) Assignment from an empty CONSTRUCTOR. */
1913 else if (is_gimple_reg_type (vr->type)
1914 && gimple_assign_single_p (def_stmt)
1915 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1916 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1918 tree base2;
1919 HOST_WIDE_INT offset2, size2, maxsize2;
1920 bool reverse;
1921 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1922 &offset2, &size2, &maxsize2, &reverse);
1923 if (maxsize2 != -1
1924 && operand_equal_p (base, base2, 0)
1925 && offset2 <= offset
1926 && offset2 + size2 >= offset + maxsize)
1928 tree val = build_zero_cst (vr->type);
1929 return vn_reference_lookup_or_insert_for_pieces
1930 (vuse, vr->set, vr->type, vr->operands, val);
1934 /* 3) Assignment from a constant. We can use folds native encode/interpret
1935 routines to extract the assigned bits. */
1936 else if (ref->size == maxsize
1937 && is_gimple_reg_type (vr->type)
1938 && !contains_storage_order_barrier_p (vr->operands)
1939 && gimple_assign_single_p (def_stmt)
1940 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1941 && maxsize % BITS_PER_UNIT == 0
1942 && offset % BITS_PER_UNIT == 0
1943 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
1944 || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
1945 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
1947 tree base2;
1948 HOST_WIDE_INT offset2, size2, maxsize2;
1949 bool reverse;
1950 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1951 &offset2, &size2, &maxsize2, &reverse);
1952 if (!reverse
1953 && maxsize2 != -1
1954 && maxsize2 == size2
1955 && size2 % BITS_PER_UNIT == 0
1956 && offset2 % BITS_PER_UNIT == 0
1957 && operand_equal_p (base, base2, 0)
1958 && offset2 <= offset
1959 && offset2 + size2 >= offset + maxsize)
1961 /* We support up to 512-bit values (for V8DFmode). */
1962 unsigned char buffer[64];
1963 int len;
1965 tree rhs = gimple_assign_rhs1 (def_stmt);
1966 if (TREE_CODE (rhs) == SSA_NAME)
1967 rhs = SSA_VAL (rhs);
1968 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1969 buffer, sizeof (buffer));
1970 if (len > 0)
1972 tree type = vr->type;
1973 /* Make sure to interpret in a type that has a range
1974 covering the whole access size. */
1975 if (INTEGRAL_TYPE_P (vr->type)
1976 && ref->size != TYPE_PRECISION (vr->type))
1977 type = build_nonstandard_integer_type (ref->size,
1978 TYPE_UNSIGNED (type));
1979 tree val = native_interpret_expr (type,
1980 buffer
1981 + ((offset - offset2)
1982 / BITS_PER_UNIT),
1983 ref->size / BITS_PER_UNIT);
1984 /* If we chop off bits because the types precision doesn't
1985 match the memory access size this is ok when optimizing
1986 reads but not when called from the DSE code during
1987 elimination. */
1988 if (val
1989 && type != vr->type)
1991 if (! int_fits_type_p (val, vr->type))
1992 val = NULL_TREE;
1993 else
1994 val = fold_convert (vr->type, val);
1997 if (val)
1998 return vn_reference_lookup_or_insert_for_pieces
1999 (vuse, vr->set, vr->type, vr->operands, val);
2004 /* 4) Assignment from an SSA name which definition we may be able
2005 to access pieces from. */
2006 else if (ref->size == maxsize
2007 && is_gimple_reg_type (vr->type)
2008 && !contains_storage_order_barrier_p (vr->operands)
2009 && gimple_assign_single_p (def_stmt)
2010 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
2012 tree base2;
2013 HOST_WIDE_INT offset2, size2, maxsize2;
2014 bool reverse;
2015 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2016 &offset2, &size2, &maxsize2,
2017 &reverse);
2018 if (!reverse
2019 && maxsize2 != -1
2020 && maxsize2 == size2
2021 && operand_equal_p (base, base2, 0)
2022 && offset2 <= offset
2023 && offset2 + size2 >= offset + maxsize
2024 /* ??? We can't handle bitfield precision extracts without
2025 either using an alternate type for the BIT_FIELD_REF and
2026 then doing a conversion or possibly adjusting the offset
2027 according to endianess. */
2028 && (! INTEGRAL_TYPE_P (vr->type)
2029 || ref->size == TYPE_PRECISION (vr->type))
2030 && ref->size % BITS_PER_UNIT == 0)
2032 code_helper rcode = BIT_FIELD_REF;
2033 tree ops[3];
2034 ops[0] = SSA_VAL (gimple_assign_rhs1 (def_stmt));
2035 ops[1] = bitsize_int (ref->size);
2036 ops[2] = bitsize_int (offset - offset2);
2037 tree val = vn_nary_build_or_lookup (rcode, vr->type, ops);
2038 if (val)
2040 vn_reference_t res = vn_reference_lookup_or_insert_for_pieces
2041 (vuse, vr->set, vr->type, vr->operands, val);
2042 return res;
2047 /* 5) For aggregate copies translate the reference through them if
2048 the copy kills ref. */
2049 else if (vn_walk_kind == VN_WALKREWRITE
2050 && gimple_assign_single_p (def_stmt)
2051 && (DECL_P (gimple_assign_rhs1 (def_stmt))
2052 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
2053 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
2055 tree base2;
2056 HOST_WIDE_INT maxsize2;
2057 int i, j, k;
2058 auto_vec<vn_reference_op_s> rhs;
2059 vn_reference_op_t vro;
2060 ao_ref r;
2062 if (!lhs_ref_ok)
2063 return (void *)-1;
2065 /* See if the assignment kills REF. */
2066 base2 = ao_ref_base (&lhs_ref);
2067 maxsize2 = lhs_ref.max_size;
2068 if (maxsize2 == -1
2069 || (base != base2
2070 && (TREE_CODE (base) != MEM_REF
2071 || TREE_CODE (base2) != MEM_REF
2072 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
2073 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
2074 TREE_OPERAND (base2, 1))))
2075 || !stmt_kills_ref_p (def_stmt, ref))
2076 return (void *)-1;
2078 /* Find the common base of ref and the lhs. lhs_ops already
2079 contains valueized operands for the lhs. */
2080 i = vr->operands.length () - 1;
2081 j = lhs_ops.length () - 1;
2082 while (j >= 0 && i >= 0
2083 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
2085 i--;
2086 j--;
2089 /* ??? The innermost op should always be a MEM_REF and we already
2090 checked that the assignment to the lhs kills vr. Thus for
2091 aggregate copies using char[] types the vn_reference_op_eq
2092 may fail when comparing types for compatibility. But we really
2093 don't care here - further lookups with the rewritten operands
2094 will simply fail if we messed up types too badly. */
2095 HOST_WIDE_INT extra_off = 0;
2096 if (j == 0 && i >= 0
2097 && lhs_ops[0].opcode == MEM_REF
2098 && lhs_ops[0].off != -1)
2100 if (lhs_ops[0].off == vr->operands[i].off)
2101 i--, j--;
2102 else if (vr->operands[i].opcode == MEM_REF
2103 && vr->operands[i].off != -1)
2105 extra_off = vr->operands[i].off - lhs_ops[0].off;
2106 i--, j--;
2110 /* i now points to the first additional op.
2111 ??? LHS may not be completely contained in VR, one or more
2112 VIEW_CONVERT_EXPRs could be in its way. We could at least
2113 try handling outermost VIEW_CONVERT_EXPRs. */
2114 if (j != -1)
2115 return (void *)-1;
2117 /* Punt if the additional ops contain a storage order barrier. */
2118 for (k = i; k >= 0; k--)
2120 vro = &vr->operands[k];
2121 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
2122 return (void *)-1;
2125 /* Now re-write REF to be based on the rhs of the assignment. */
2126 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
2128 /* Apply an extra offset to the inner MEM_REF of the RHS. */
2129 if (extra_off != 0)
2131 if (rhs.length () < 2
2132 || rhs[0].opcode != MEM_REF
2133 || rhs[0].off == -1)
2134 return (void *)-1;
2135 rhs[0].off += extra_off;
2136 rhs[0].op0 = int_const_binop (PLUS_EXPR, rhs[0].op0,
2137 build_int_cst (TREE_TYPE (rhs[0].op0),
2138 extra_off));
2141 /* We need to pre-pend vr->operands[0..i] to rhs. */
2142 vec<vn_reference_op_s> old = vr->operands;
2143 if (i + 1 + rhs.length () > vr->operands.length ())
2144 vr->operands.safe_grow (i + 1 + rhs.length ());
2145 else
2146 vr->operands.truncate (i + 1 + rhs.length ());
2147 FOR_EACH_VEC_ELT (rhs, j, vro)
2148 vr->operands[i + 1 + j] = *vro;
2149 vr->operands = valueize_refs (vr->operands);
2150 if (old == shared_lookup_references)
2151 shared_lookup_references = vr->operands;
2152 vr->hashcode = vn_reference_compute_hash (vr);
2154 /* Try folding the new reference to a constant. */
2155 tree val = fully_constant_vn_reference_p (vr);
2156 if (val)
2157 return vn_reference_lookup_or_insert_for_pieces
2158 (vuse, vr->set, vr->type, vr->operands, val);
2160 /* Adjust *ref from the new operands. */
2161 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2162 return (void *)-1;
2163 /* This can happen with bitfields. */
2164 if (ref->size != r.size)
2165 return (void *)-1;
2166 *ref = r;
2168 /* Do not update last seen VUSE after translating. */
2169 last_vuse_ptr = NULL;
2171 /* Keep looking for the adjusted *REF / VR pair. */
2172 return NULL;
2175 /* 6) For memcpy copies translate the reference through them if
2176 the copy kills ref. */
2177 else if (vn_walk_kind == VN_WALKREWRITE
2178 && is_gimple_reg_type (vr->type)
2179 /* ??? Handle BCOPY as well. */
2180 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2181 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2182 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2183 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2184 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2185 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2186 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2187 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2)))
2189 tree lhs, rhs;
2190 ao_ref r;
2191 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
2192 vn_reference_op_s op;
2193 HOST_WIDE_INT at;
2195 /* Only handle non-variable, addressable refs. */
2196 if (ref->size != maxsize
2197 || offset % BITS_PER_UNIT != 0
2198 || ref->size % BITS_PER_UNIT != 0)
2199 return (void *)-1;
2201 /* Extract a pointer base and an offset for the destination. */
2202 lhs = gimple_call_arg (def_stmt, 0);
2203 lhs_offset = 0;
2204 if (TREE_CODE (lhs) == SSA_NAME)
2206 lhs = SSA_VAL (lhs);
2207 if (TREE_CODE (lhs) == SSA_NAME)
2209 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
2210 if (gimple_assign_single_p (def_stmt)
2211 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2212 lhs = gimple_assign_rhs1 (def_stmt);
2215 if (TREE_CODE (lhs) == ADDR_EXPR)
2217 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2218 &lhs_offset);
2219 if (!tem)
2220 return (void *)-1;
2221 if (TREE_CODE (tem) == MEM_REF
2222 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2224 lhs = TREE_OPERAND (tem, 0);
2225 if (TREE_CODE (lhs) == SSA_NAME)
2226 lhs = SSA_VAL (lhs);
2227 lhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2229 else if (DECL_P (tem))
2230 lhs = build_fold_addr_expr (tem);
2231 else
2232 return (void *)-1;
2234 if (TREE_CODE (lhs) != SSA_NAME
2235 && TREE_CODE (lhs) != ADDR_EXPR)
2236 return (void *)-1;
2238 /* Extract a pointer base and an offset for the source. */
2239 rhs = gimple_call_arg (def_stmt, 1);
2240 rhs_offset = 0;
2241 if (TREE_CODE (rhs) == SSA_NAME)
2242 rhs = SSA_VAL (rhs);
2243 if (TREE_CODE (rhs) == ADDR_EXPR)
2245 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2246 &rhs_offset);
2247 if (!tem)
2248 return (void *)-1;
2249 if (TREE_CODE (tem) == MEM_REF
2250 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2252 rhs = TREE_OPERAND (tem, 0);
2253 rhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2255 else if (DECL_P (tem))
2256 rhs = build_fold_addr_expr (tem);
2257 else
2258 return (void *)-1;
2260 if (TREE_CODE (rhs) != SSA_NAME
2261 && TREE_CODE (rhs) != ADDR_EXPR)
2262 return (void *)-1;
2264 copy_size = tree_to_uhwi (gimple_call_arg (def_stmt, 2));
2266 /* The bases of the destination and the references have to agree. */
2267 if ((TREE_CODE (base) != MEM_REF
2268 && !DECL_P (base))
2269 || (TREE_CODE (base) == MEM_REF
2270 && (TREE_OPERAND (base, 0) != lhs
2271 || !tree_fits_uhwi_p (TREE_OPERAND (base, 1))))
2272 || (DECL_P (base)
2273 && (TREE_CODE (lhs) != ADDR_EXPR
2274 || TREE_OPERAND (lhs, 0) != base)))
2275 return (void *)-1;
2277 at = offset / BITS_PER_UNIT;
2278 if (TREE_CODE (base) == MEM_REF)
2279 at += tree_to_uhwi (TREE_OPERAND (base, 1));
2280 /* If the access is completely outside of the memcpy destination
2281 area there is no aliasing. */
2282 if (lhs_offset >= at + maxsize / BITS_PER_UNIT
2283 || lhs_offset + copy_size <= at)
2284 return NULL;
2285 /* And the access has to be contained within the memcpy destination. */
2286 if (lhs_offset > at
2287 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
2288 return (void *)-1;
2290 /* Make room for 2 operands in the new reference. */
2291 if (vr->operands.length () < 2)
2293 vec<vn_reference_op_s> old = vr->operands;
2294 vr->operands.safe_grow_cleared (2);
2295 if (old == shared_lookup_references)
2296 shared_lookup_references = vr->operands;
2298 else
2299 vr->operands.truncate (2);
2301 /* The looked-through reference is a simple MEM_REF. */
2302 memset (&op, 0, sizeof (op));
2303 op.type = vr->type;
2304 op.opcode = MEM_REF;
2305 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
2306 op.off = at - lhs_offset + rhs_offset;
2307 vr->operands[0] = op;
2308 op.type = TREE_TYPE (rhs);
2309 op.opcode = TREE_CODE (rhs);
2310 op.op0 = rhs;
2311 op.off = -1;
2312 vr->operands[1] = op;
2313 vr->hashcode = vn_reference_compute_hash (vr);
2315 /* Try folding the new reference to a constant. */
2316 tree val = fully_constant_vn_reference_p (vr);
2317 if (val)
2318 return vn_reference_lookup_or_insert_for_pieces
2319 (vuse, vr->set, vr->type, vr->operands, val);
2321 /* Adjust *ref from the new operands. */
2322 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2323 return (void *)-1;
2324 /* This can happen with bitfields. */
2325 if (ref->size != r.size)
2326 return (void *)-1;
2327 *ref = r;
2329 /* Do not update last seen VUSE after translating. */
2330 last_vuse_ptr = NULL;
2332 /* Keep looking for the adjusted *REF / VR pair. */
2333 return NULL;
2336 /* Bail out and stop walking. */
2337 return (void *)-1;
2340 /* Return a reference op vector from OP that can be used for
2341 vn_reference_lookup_pieces. The caller is responsible for releasing
2342 the vector. */
2344 vec<vn_reference_op_s>
2345 vn_reference_operands_for_lookup (tree op)
2347 bool valueized;
2348 return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
2351 /* Lookup a reference operation by it's parts, in the current hash table.
2352 Returns the resulting value number if it exists in the hash table,
2353 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2354 vn_reference_t stored in the hashtable if something is found. */
2356 tree
2357 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2358 vec<vn_reference_op_s> operands,
2359 vn_reference_t *vnresult, vn_lookup_kind kind)
2361 struct vn_reference_s vr1;
2362 vn_reference_t tmp;
2363 tree cst;
2365 if (!vnresult)
2366 vnresult = &tmp;
2367 *vnresult = NULL;
2369 vr1.vuse = vuse_ssa_val (vuse);
2370 shared_lookup_references.truncate (0);
2371 shared_lookup_references.safe_grow (operands.length ());
2372 memcpy (shared_lookup_references.address (),
2373 operands.address (),
2374 sizeof (vn_reference_op_s)
2375 * operands.length ());
2376 vr1.operands = operands = shared_lookup_references
2377 = valueize_refs (shared_lookup_references);
2378 vr1.type = type;
2379 vr1.set = set;
2380 vr1.hashcode = vn_reference_compute_hash (&vr1);
2381 if ((cst = fully_constant_vn_reference_p (&vr1)))
2382 return cst;
2384 vn_reference_lookup_1 (&vr1, vnresult);
2385 if (!*vnresult
2386 && kind != VN_NOWALK
2387 && vr1.vuse)
2389 ao_ref r;
2390 vn_walk_kind = kind;
2391 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2392 *vnresult =
2393 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2394 vn_reference_lookup_2,
2395 vn_reference_lookup_3,
2396 vuse_ssa_val, &vr1);
2397 gcc_checking_assert (vr1.operands == shared_lookup_references);
2400 if (*vnresult)
2401 return (*vnresult)->result;
2403 return NULL_TREE;
2406 /* Lookup OP in the current hash table, and return the resulting value
2407 number if it exists in the hash table. Return NULL_TREE if it does
2408 not exist in the hash table or if the result field of the structure
2409 was NULL.. VNRESULT will be filled in with the vn_reference_t
2410 stored in the hashtable if one exists. When TBAA_P is false assume
2411 we are looking up a store and treat it as having alias-set zero. */
2413 tree
2414 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2415 vn_reference_t *vnresult, bool tbaa_p)
2417 vec<vn_reference_op_s> operands;
2418 struct vn_reference_s vr1;
2419 tree cst;
2420 bool valuezied_anything;
2422 if (vnresult)
2423 *vnresult = NULL;
2425 vr1.vuse = vuse_ssa_val (vuse);
2426 vr1.operands = operands
2427 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2428 vr1.type = TREE_TYPE (op);
2429 vr1.set = tbaa_p ? get_alias_set (op) : 0;
2430 vr1.hashcode = vn_reference_compute_hash (&vr1);
2431 if ((cst = fully_constant_vn_reference_p (&vr1)))
2432 return cst;
2434 if (kind != VN_NOWALK
2435 && vr1.vuse)
2437 vn_reference_t wvnresult;
2438 ao_ref r;
2439 /* Make sure to use a valueized reference if we valueized anything.
2440 Otherwise preserve the full reference for advanced TBAA. */
2441 if (!valuezied_anything
2442 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2443 vr1.operands))
2444 ao_ref_init (&r, op);
2445 if (! tbaa_p)
2446 r.ref_alias_set = r.base_alias_set = 0;
2447 vn_walk_kind = kind;
2448 wvnresult =
2449 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2450 vn_reference_lookup_2,
2451 vn_reference_lookup_3,
2452 vuse_ssa_val, &vr1);
2453 gcc_checking_assert (vr1.operands == shared_lookup_references);
2454 if (wvnresult)
2456 if (vnresult)
2457 *vnresult = wvnresult;
2458 return wvnresult->result;
2461 return NULL_TREE;
2464 return vn_reference_lookup_1 (&vr1, vnresult);
2467 /* Lookup CALL in the current hash table and return the entry in
2468 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2470 void
2471 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2472 vn_reference_t vr)
2474 if (vnresult)
2475 *vnresult = NULL;
2477 tree vuse = gimple_vuse (call);
2479 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2480 vr->operands = valueize_shared_reference_ops_from_call (call);
2481 vr->type = gimple_expr_type (call);
2482 vr->set = 0;
2483 vr->hashcode = vn_reference_compute_hash (vr);
2484 vn_reference_lookup_1 (vr, vnresult);
2487 /* Insert OP into the current hash table with a value number of
2488 RESULT, and return the resulting reference structure we created. */
2490 static vn_reference_t
2491 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2493 vn_reference_s **slot;
2494 vn_reference_t vr1;
2495 bool tem;
2497 vr1 = current_info->references_pool->allocate ();
2498 if (TREE_CODE (result) == SSA_NAME)
2499 vr1->value_id = VN_INFO (result)->value_id;
2500 else
2501 vr1->value_id = get_or_alloc_constant_value_id (result);
2502 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2503 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2504 vr1->type = TREE_TYPE (op);
2505 vr1->set = get_alias_set (op);
2506 vr1->hashcode = vn_reference_compute_hash (vr1);
2507 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2508 vr1->result_vdef = vdef;
2510 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2511 INSERT);
2513 /* Because we lookup stores using vuses, and value number failures
2514 using the vdefs (see visit_reference_op_store for how and why),
2515 it's possible that on failure we may try to insert an already
2516 inserted store. This is not wrong, there is no ssa name for a
2517 store that we could use as a differentiator anyway. Thus, unlike
2518 the other lookup functions, you cannot gcc_assert (!*slot)
2519 here. */
2521 /* But free the old slot in case of a collision. */
2522 if (*slot)
2523 free_reference (*slot);
2525 *slot = vr1;
2526 return vr1;
2529 /* Insert a reference by it's pieces into the current hash table with
2530 a value number of RESULT. Return the resulting reference
2531 structure we created. */
2533 vn_reference_t
2534 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2535 vec<vn_reference_op_s> operands,
2536 tree result, unsigned int value_id)
2539 vn_reference_s **slot;
2540 vn_reference_t vr1;
2542 vr1 = current_info->references_pool->allocate ();
2543 vr1->value_id = value_id;
2544 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2545 vr1->operands = valueize_refs (operands);
2546 vr1->type = type;
2547 vr1->set = set;
2548 vr1->hashcode = vn_reference_compute_hash (vr1);
2549 if (result && TREE_CODE (result) == SSA_NAME)
2550 result = SSA_VAL (result);
2551 vr1->result = result;
2553 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2554 INSERT);
2556 /* At this point we should have all the things inserted that we have
2557 seen before, and we should never try inserting something that
2558 already exists. */
2559 gcc_assert (!*slot);
2560 if (*slot)
2561 free_reference (*slot);
2563 *slot = vr1;
2564 return vr1;
2567 /* Compute and return the hash value for nary operation VBO1. */
2569 static hashval_t
2570 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2572 inchash::hash hstate;
2573 unsigned i;
2575 for (i = 0; i < vno1->length; ++i)
2576 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2577 vno1->op[i] = SSA_VAL (vno1->op[i]);
2579 if (((vno1->length == 2
2580 && commutative_tree_code (vno1->opcode))
2581 || (vno1->length == 3
2582 && commutative_ternary_tree_code (vno1->opcode)))
2583 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2584 std::swap (vno1->op[0], vno1->op[1]);
2585 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
2586 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2588 std::swap (vno1->op[0], vno1->op[1]);
2589 vno1->opcode = swap_tree_comparison (vno1->opcode);
2592 hstate.add_int (vno1->opcode);
2593 for (i = 0; i < vno1->length; ++i)
2594 inchash::add_expr (vno1->op[i], hstate);
2596 return hstate.end ();
2599 /* Compare nary operations VNO1 and VNO2 and return true if they are
2600 equivalent. */
2602 bool
2603 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2605 unsigned i;
2607 if (vno1->hashcode != vno2->hashcode)
2608 return false;
2610 if (vno1->length != vno2->length)
2611 return false;
2613 if (vno1->opcode != vno2->opcode
2614 || !types_compatible_p (vno1->type, vno2->type))
2615 return false;
2617 for (i = 0; i < vno1->length; ++i)
2618 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2619 return false;
2621 return true;
2624 /* Initialize VNO from the pieces provided. */
2626 static void
2627 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2628 enum tree_code code, tree type, tree *ops)
2630 vno->opcode = code;
2631 vno->length = length;
2632 vno->type = type;
2633 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2636 /* Initialize VNO from OP. */
2638 static void
2639 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2641 unsigned i;
2643 vno->opcode = TREE_CODE (op);
2644 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2645 vno->type = TREE_TYPE (op);
2646 for (i = 0; i < vno->length; ++i)
2647 vno->op[i] = TREE_OPERAND (op, i);
2650 /* Return the number of operands for a vn_nary ops structure from STMT. */
2652 static unsigned int
2653 vn_nary_length_from_stmt (gimple *stmt)
2655 switch (gimple_assign_rhs_code (stmt))
2657 case REALPART_EXPR:
2658 case IMAGPART_EXPR:
2659 case VIEW_CONVERT_EXPR:
2660 return 1;
2662 case BIT_FIELD_REF:
2663 return 3;
2665 case CONSTRUCTOR:
2666 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2668 default:
2669 return gimple_num_ops (stmt) - 1;
2673 /* Initialize VNO from STMT. */
2675 static void
2676 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
2678 unsigned i;
2680 vno->opcode = gimple_assign_rhs_code (stmt);
2681 vno->type = gimple_expr_type (stmt);
2682 switch (vno->opcode)
2684 case REALPART_EXPR:
2685 case IMAGPART_EXPR:
2686 case VIEW_CONVERT_EXPR:
2687 vno->length = 1;
2688 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2689 break;
2691 case BIT_FIELD_REF:
2692 vno->length = 3;
2693 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2694 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2695 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2696 break;
2698 case CONSTRUCTOR:
2699 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2700 for (i = 0; i < vno->length; ++i)
2701 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2702 break;
2704 default:
2705 gcc_checking_assert (!gimple_assign_single_p (stmt));
2706 vno->length = gimple_num_ops (stmt) - 1;
2707 for (i = 0; i < vno->length; ++i)
2708 vno->op[i] = gimple_op (stmt, i + 1);
2712 /* Compute the hashcode for VNO and look for it in the hash table;
2713 return the resulting value number if it exists in the hash table.
2714 Return NULL_TREE if it does not exist in the hash table or if the
2715 result field of the operation is NULL. VNRESULT will contain the
2716 vn_nary_op_t from the hashtable if it exists. */
2718 static tree
2719 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2721 vn_nary_op_s **slot;
2723 if (vnresult)
2724 *vnresult = NULL;
2726 vno->hashcode = vn_nary_op_compute_hash (vno);
2727 slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2728 NO_INSERT);
2729 if (!slot && current_info == optimistic_info)
2730 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2731 NO_INSERT);
2732 if (!slot)
2733 return NULL_TREE;
2734 if (vnresult)
2735 *vnresult = *slot;
2736 return (*slot)->result;
2739 /* Lookup a n-ary operation by its pieces and return the resulting value
2740 number if it exists in the hash table. Return NULL_TREE if it does
2741 not exist in the hash table or if the result field of the operation
2742 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2743 if it exists. */
2745 tree
2746 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2747 tree type, tree *ops, vn_nary_op_t *vnresult)
2749 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2750 sizeof_vn_nary_op (length));
2751 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2752 return vn_nary_op_lookup_1 (vno1, vnresult);
2755 /* Lookup OP in the current hash table, and return the resulting value
2756 number if it exists in the hash table. Return NULL_TREE if it does
2757 not exist in the hash table or if the result field of the operation
2758 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2759 if it exists. */
2761 tree
2762 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2764 vn_nary_op_t vno1
2765 = XALLOCAVAR (struct vn_nary_op_s,
2766 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2767 init_vn_nary_op_from_op (vno1, op);
2768 return vn_nary_op_lookup_1 (vno1, vnresult);
2771 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2772 value number if it exists in the hash table. Return NULL_TREE if
2773 it does not exist in the hash table. VNRESULT will contain the
2774 vn_nary_op_t from the hashtable if it exists. */
2776 tree
2777 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
2779 vn_nary_op_t vno1
2780 = XALLOCAVAR (struct vn_nary_op_s,
2781 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2782 init_vn_nary_op_from_stmt (vno1, stmt);
2783 return vn_nary_op_lookup_1 (vno1, vnresult);
2786 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2788 static vn_nary_op_t
2789 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2791 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2794 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2795 obstack. */
2797 static vn_nary_op_t
2798 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2800 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2801 &current_info->nary_obstack);
2803 vno1->value_id = value_id;
2804 vno1->length = length;
2805 vno1->result = result;
2807 return vno1;
2810 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2811 VNO->HASHCODE first. */
2813 static vn_nary_op_t
2814 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
2815 bool compute_hash)
2817 vn_nary_op_s **slot;
2819 if (compute_hash)
2820 vno->hashcode = vn_nary_op_compute_hash (vno);
2822 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
2823 gcc_assert (!*slot);
2825 *slot = vno;
2826 return vno;
2829 /* Insert a n-ary operation into the current hash table using it's
2830 pieces. Return the vn_nary_op_t structure we created and put in
2831 the hashtable. */
2833 vn_nary_op_t
2834 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2835 tree type, tree *ops,
2836 tree result, unsigned int value_id)
2838 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2839 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2840 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2843 /* Insert OP into the current hash table with a value number of
2844 RESULT. Return the vn_nary_op_t structure we created and put in
2845 the hashtable. */
2847 vn_nary_op_t
2848 vn_nary_op_insert (tree op, tree result)
2850 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2851 vn_nary_op_t vno1;
2853 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2854 init_vn_nary_op_from_op (vno1, op);
2855 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2858 /* Insert the rhs of STMT into the current hash table with a value number of
2859 RESULT. */
2861 static vn_nary_op_t
2862 vn_nary_op_insert_stmt (gimple *stmt, tree result)
2864 vn_nary_op_t vno1
2865 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2866 result, VN_INFO (result)->value_id);
2867 init_vn_nary_op_from_stmt (vno1, stmt);
2868 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2871 /* Compute a hashcode for PHI operation VP1 and return it. */
2873 static inline hashval_t
2874 vn_phi_compute_hash (vn_phi_t vp1)
2876 inchash::hash hstate (vp1->phiargs.length () > 2
2877 ? vp1->block->index : vp1->phiargs.length ());
2878 tree phi1op;
2879 tree type;
2880 edge e;
2881 edge_iterator ei;
2883 /* If all PHI arguments are constants we need to distinguish
2884 the PHI node via its type. */
2885 type = vp1->type;
2886 hstate.merge_hash (vn_hash_type (type));
2888 FOR_EACH_EDGE (e, ei, vp1->block->preds)
2890 /* Don't hash backedge values they need to be handled as VN_TOP
2891 for optimistic value-numbering. */
2892 if (e->flags & EDGE_DFS_BACK)
2893 continue;
2895 phi1op = vp1->phiargs[e->dest_idx];
2896 if (phi1op == VN_TOP)
2897 continue;
2898 inchash::add_expr (phi1op, hstate);
2901 return hstate.end ();
2905 /* Return true if COND1 and COND2 represent the same condition, set
2906 *INVERTED_P if one needs to be inverted to make it the same as
2907 the other. */
2909 static bool
2910 cond_stmts_equal_p (gcond *cond1, gcond *cond2, bool *inverted_p)
2912 enum tree_code code1 = gimple_cond_code (cond1);
2913 enum tree_code code2 = gimple_cond_code (cond2);
2914 tree lhs1 = gimple_cond_lhs (cond1);
2915 tree lhs2 = gimple_cond_lhs (cond2);
2916 tree rhs1 = gimple_cond_rhs (cond1);
2917 tree rhs2 = gimple_cond_rhs (cond2);
2919 *inverted_p = false;
2920 if (code1 == code2)
2922 else if (code1 == swap_tree_comparison (code2))
2923 std::swap (lhs2, rhs2);
2924 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
2925 *inverted_p = true;
2926 else if (code1 == invert_tree_comparison
2927 (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
2929 std::swap (lhs2, rhs2);
2930 *inverted_p = true;
2932 else
2933 return false;
2935 lhs1 = vn_valueize (lhs1);
2936 rhs1 = vn_valueize (rhs1);
2937 lhs2 = vn_valueize (lhs2);
2938 rhs2 = vn_valueize (rhs2);
2939 return ((expressions_equal_p (lhs1, lhs2)
2940 && expressions_equal_p (rhs1, rhs2))
2941 || (commutative_tree_code (code1)
2942 && expressions_equal_p (lhs1, rhs2)
2943 && expressions_equal_p (rhs1, lhs2)));
2946 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2948 static int
2949 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
2951 if (vp1->hashcode != vp2->hashcode)
2952 return false;
2954 if (vp1->block != vp2->block)
2956 if (vp1->phiargs.length () != vp2->phiargs.length ())
2957 return false;
2959 switch (vp1->phiargs.length ())
2961 case 1:
2962 /* Single-arg PHIs are just copies. */
2963 break;
2965 case 2:
2967 /* Rule out backedges into the PHI. */
2968 if (vp1->block->loop_father->header == vp1->block
2969 || vp2->block->loop_father->header == vp2->block)
2970 return false;
2972 /* If the PHI nodes do not have compatible types
2973 they are not the same. */
2974 if (!types_compatible_p (vp1->type, vp2->type))
2975 return false;
2977 basic_block idom1
2978 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
2979 basic_block idom2
2980 = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
2981 /* If the immediate dominator end in switch stmts multiple
2982 values may end up in the same PHI arg via intermediate
2983 CFG merges. */
2984 if (EDGE_COUNT (idom1->succs) != 2
2985 || EDGE_COUNT (idom2->succs) != 2)
2986 return false;
2988 /* Verify the controlling stmt is the same. */
2989 gimple *last1 = last_stmt (idom1);
2990 gimple *last2 = last_stmt (idom2);
2991 if (gimple_code (last1) != GIMPLE_COND
2992 || gimple_code (last2) != GIMPLE_COND)
2993 return false;
2994 bool inverted_p;
2995 if (! cond_stmts_equal_p (as_a <gcond *> (last1),
2996 as_a <gcond *> (last2), &inverted_p))
2997 return false;
2999 /* Get at true/false controlled edges into the PHI. */
3000 edge te1, te2, fe1, fe2;
3001 if (! extract_true_false_controlled_edges (idom1, vp1->block,
3002 &te1, &fe1)
3003 || ! extract_true_false_controlled_edges (idom2, vp2->block,
3004 &te2, &fe2))
3005 return false;
3007 /* Swap edges if the second condition is the inverted of the
3008 first. */
3009 if (inverted_p)
3010 std::swap (te2, fe2);
3012 /* ??? Handle VN_TOP specially. */
3013 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
3014 vp2->phiargs[te2->dest_idx])
3015 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
3016 vp2->phiargs[fe2->dest_idx]))
3017 return false;
3019 return true;
3022 default:
3023 return false;
3027 /* If the PHI nodes do not have compatible types
3028 they are not the same. */
3029 if (!types_compatible_p (vp1->type, vp2->type))
3030 return false;
3032 /* Any phi in the same block will have it's arguments in the
3033 same edge order, because of how we store phi nodes. */
3034 int i;
3035 tree phi1op;
3036 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
3038 tree phi2op = vp2->phiargs[i];
3039 if (phi1op == VN_TOP || phi2op == VN_TOP)
3040 continue;
3041 if (!expressions_equal_p (phi1op, phi2op))
3042 return false;
3045 return true;
3048 static vec<tree> shared_lookup_phiargs;
3050 /* Lookup PHI in the current hash table, and return the resulting
3051 value number if it exists in the hash table. Return NULL_TREE if
3052 it does not exist in the hash table. */
3054 static tree
3055 vn_phi_lookup (gimple *phi)
3057 vn_phi_s **slot;
3058 struct vn_phi_s vp1;
3059 edge e;
3060 edge_iterator ei;
3062 shared_lookup_phiargs.truncate (0);
3063 shared_lookup_phiargs.safe_grow (gimple_phi_num_args (phi));
3065 /* Canonicalize the SSA_NAME's to their value number. */
3066 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3068 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3069 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3070 shared_lookup_phiargs[e->dest_idx] = def;
3072 vp1.type = TREE_TYPE (gimple_phi_result (phi));
3073 vp1.phiargs = shared_lookup_phiargs;
3074 vp1.block = gimple_bb (phi);
3075 vp1.hashcode = vn_phi_compute_hash (&vp1);
3076 slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3077 NO_INSERT);
3078 if (!slot && current_info == optimistic_info)
3079 slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3080 NO_INSERT);
3081 if (!slot)
3082 return NULL_TREE;
3083 return (*slot)->result;
3086 /* Insert PHI into the current hash table with a value number of
3087 RESULT. */
3089 static vn_phi_t
3090 vn_phi_insert (gimple *phi, tree result)
3092 vn_phi_s **slot;
3093 vn_phi_t vp1 = current_info->phis_pool->allocate ();
3094 vec<tree> args = vNULL;
3095 edge e;
3096 edge_iterator ei;
3098 args.safe_grow (gimple_phi_num_args (phi));
3100 /* Canonicalize the SSA_NAME's to their value number. */
3101 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3103 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3104 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3105 args[e->dest_idx] = def;
3107 vp1->value_id = VN_INFO (result)->value_id;
3108 vp1->type = TREE_TYPE (gimple_phi_result (phi));
3109 vp1->phiargs = args;
3110 vp1->block = gimple_bb (phi);
3111 vp1->result = result;
3112 vp1->hashcode = vn_phi_compute_hash (vp1);
3114 slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
3116 /* Because we iterate over phi operations more than once, it's
3117 possible the slot might already exist here, hence no assert.*/
3118 *slot = vp1;
3119 return vp1;
3123 /* Print set of components in strongly connected component SCC to OUT. */
3125 static void
3126 print_scc (FILE *out, vec<tree> scc)
3128 tree var;
3129 unsigned int i;
3131 fprintf (out, "SCC consists of:");
3132 FOR_EACH_VEC_ELT (scc, i, var)
3134 fprintf (out, " ");
3135 print_generic_expr (out, var, 0);
3137 fprintf (out, "\n");
3140 /* Return true if BB1 is dominated by BB2 taking into account edges
3141 that are not executable. */
3143 static bool
3144 dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
3146 edge_iterator ei;
3147 edge e;
3149 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3150 return true;
3152 /* Before iterating we'd like to know if there exists a
3153 (executable) path from bb2 to bb1 at all, if not we can
3154 directly return false. For now simply iterate once. */
3156 /* Iterate to the single executable bb1 predecessor. */
3157 if (EDGE_COUNT (bb1->preds) > 1)
3159 edge prede = NULL;
3160 FOR_EACH_EDGE (e, ei, bb1->preds)
3161 if (e->flags & EDGE_EXECUTABLE)
3163 if (prede)
3165 prede = NULL;
3166 break;
3168 prede = e;
3170 if (prede)
3172 bb1 = prede->src;
3174 /* Re-do the dominance check with changed bb1. */
3175 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3176 return true;
3180 /* Iterate to the single executable bb2 successor. */
3181 edge succe = NULL;
3182 FOR_EACH_EDGE (e, ei, bb2->succs)
3183 if (e->flags & EDGE_EXECUTABLE)
3185 if (succe)
3187 succe = NULL;
3188 break;
3190 succe = e;
3192 if (succe)
3194 /* Verify the reached block is only reached through succe.
3195 If there is only one edge we can spare us the dominator
3196 check and iterate directly. */
3197 if (EDGE_COUNT (succe->dest->preds) > 1)
3199 FOR_EACH_EDGE (e, ei, succe->dest->preds)
3200 if (e != succe
3201 && (e->flags & EDGE_EXECUTABLE))
3203 succe = NULL;
3204 break;
3207 if (succe)
3209 bb2 = succe->dest;
3211 /* Re-do the dominance check with changed bb2. */
3212 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3213 return true;
3217 /* We could now iterate updating bb1 / bb2. */
3218 return false;
3221 /* Set the value number of FROM to TO, return true if it has changed
3222 as a result. */
3224 static inline bool
3225 set_ssa_val_to (tree from, tree to)
3227 tree currval = SSA_VAL (from);
3228 HOST_WIDE_INT toff, coff;
3230 /* The only thing we allow as value numbers are ssa_names
3231 and invariants. So assert that here. We don't allow VN_TOP
3232 as visiting a stmt should produce a value-number other than
3233 that.
3234 ??? Still VN_TOP can happen for unreachable code, so force
3235 it to varying in that case. Not all code is prepared to
3236 get VN_TOP on valueization. */
3237 if (to == VN_TOP)
3239 if (dump_file && (dump_flags & TDF_DETAILS))
3240 fprintf (dump_file, "Forcing value number to varying on "
3241 "receiving VN_TOP\n");
3242 to = from;
3245 gcc_assert (to != NULL_TREE
3246 && ((TREE_CODE (to) == SSA_NAME
3247 && (to == from || SSA_VAL (to) == to))
3248 || is_gimple_min_invariant (to)));
3250 if (from != to)
3252 if (currval == from)
3254 if (dump_file && (dump_flags & TDF_DETAILS))
3256 fprintf (dump_file, "Not changing value number of ");
3257 print_generic_expr (dump_file, from, 0);
3258 fprintf (dump_file, " from VARYING to ");
3259 print_generic_expr (dump_file, to, 0);
3260 fprintf (dump_file, "\n");
3262 return false;
3264 else if (TREE_CODE (to) == SSA_NAME
3265 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
3266 to = from;
3269 if (dump_file && (dump_flags & TDF_DETAILS))
3271 fprintf (dump_file, "Setting value number of ");
3272 print_generic_expr (dump_file, from, 0);
3273 fprintf (dump_file, " to ");
3274 print_generic_expr (dump_file, to, 0);
3277 if (currval != to
3278 && !operand_equal_p (currval, to, 0)
3279 /* ??? For addresses involving volatile objects or types operand_equal_p
3280 does not reliably detect ADDR_EXPRs as equal. We know we are only
3281 getting invariant gimple addresses here, so can use
3282 get_addr_base_and_unit_offset to do this comparison. */
3283 && !(TREE_CODE (currval) == ADDR_EXPR
3284 && TREE_CODE (to) == ADDR_EXPR
3285 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
3286 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
3287 && coff == toff))
3289 /* If we equate two SSA names we have to make the side-band info
3290 of the leader conservative (and remember whatever original value
3291 was present). */
3292 if (TREE_CODE (to) == SSA_NAME)
3294 if (INTEGRAL_TYPE_P (TREE_TYPE (to))
3295 && SSA_NAME_RANGE_INFO (to))
3297 if (SSA_NAME_IS_DEFAULT_DEF (to)
3298 || dominated_by_p_w_unex
3299 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3300 gimple_bb (SSA_NAME_DEF_STMT (to))))
3301 /* Keep the info from the dominator. */
3303 else if (SSA_NAME_IS_DEFAULT_DEF (from)
3304 || dominated_by_p_w_unex
3305 (gimple_bb (SSA_NAME_DEF_STMT (to)),
3306 gimple_bb (SSA_NAME_DEF_STMT (from))))
3308 /* Save old info. */
3309 if (! VN_INFO (to)->info.range_info)
3311 VN_INFO (to)->info.range_info = SSA_NAME_RANGE_INFO (to);
3312 VN_INFO (to)->range_info_anti_range_p
3313 = SSA_NAME_ANTI_RANGE_P (to);
3315 /* Use that from the dominator. */
3316 SSA_NAME_RANGE_INFO (to) = SSA_NAME_RANGE_INFO (from);
3317 SSA_NAME_ANTI_RANGE_P (to) = SSA_NAME_ANTI_RANGE_P (from);
3319 else
3321 /* Save old info. */
3322 if (! VN_INFO (to)->info.range_info)
3324 VN_INFO (to)->info.range_info = SSA_NAME_RANGE_INFO (to);
3325 VN_INFO (to)->range_info_anti_range_p
3326 = SSA_NAME_ANTI_RANGE_P (to);
3328 /* Rather than allocating memory and unioning the info
3329 just clear it. */
3330 SSA_NAME_RANGE_INFO (to) = NULL;
3333 else if (POINTER_TYPE_P (TREE_TYPE (to))
3334 && SSA_NAME_PTR_INFO (to))
3336 if (SSA_NAME_IS_DEFAULT_DEF (to)
3337 || dominated_by_p_w_unex
3338 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3339 gimple_bb (SSA_NAME_DEF_STMT (to))))
3340 /* Keep the info from the dominator. */
3342 else if (SSA_NAME_IS_DEFAULT_DEF (from)
3343 || dominated_by_p_w_unex
3344 (gimple_bb (SSA_NAME_DEF_STMT (to)),
3345 gimple_bb (SSA_NAME_DEF_STMT (from))))
3347 /* Save old info. */
3348 if (! VN_INFO (to)->info.ptr_info)
3349 VN_INFO (to)->info.ptr_info = SSA_NAME_PTR_INFO (to);
3350 /* Use that from the dominator. */
3351 SSA_NAME_PTR_INFO (to) = SSA_NAME_PTR_INFO (from);
3353 else if (! SSA_NAME_PTR_INFO (from)
3354 /* Handle the case of trivially equivalent info. */
3355 || memcmp (SSA_NAME_PTR_INFO (to),
3356 SSA_NAME_PTR_INFO (from),
3357 sizeof (ptr_info_def)) != 0)
3359 /* Save old info. */
3360 if (! VN_INFO (to)->info.ptr_info)
3361 VN_INFO (to)->info.ptr_info = SSA_NAME_PTR_INFO (to);
3362 /* Rather than allocating memory and unioning the info
3363 just clear it. */
3364 SSA_NAME_PTR_INFO (to) = NULL;
3369 VN_INFO (from)->valnum = to;
3370 if (dump_file && (dump_flags & TDF_DETAILS))
3371 fprintf (dump_file, " (changed)\n");
3372 return true;
3374 if (dump_file && (dump_flags & TDF_DETAILS))
3375 fprintf (dump_file, "\n");
3376 return false;
3379 /* Mark as processed all the definitions in the defining stmt of USE, or
3380 the USE itself. */
3382 static void
3383 mark_use_processed (tree use)
3385 ssa_op_iter iter;
3386 def_operand_p defp;
3387 gimple *stmt = SSA_NAME_DEF_STMT (use);
3389 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
3391 VN_INFO (use)->use_processed = true;
3392 return;
3395 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3397 tree def = DEF_FROM_PTR (defp);
3399 VN_INFO (def)->use_processed = true;
3403 /* Set all definitions in STMT to value number to themselves.
3404 Return true if a value number changed. */
3406 static bool
3407 defs_to_varying (gimple *stmt)
3409 bool changed = false;
3410 ssa_op_iter iter;
3411 def_operand_p defp;
3413 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3415 tree def = DEF_FROM_PTR (defp);
3416 changed |= set_ssa_val_to (def, def);
3418 return changed;
3421 /* Visit a copy between LHS and RHS, return true if the value number
3422 changed. */
3424 static bool
3425 visit_copy (tree lhs, tree rhs)
3427 /* Valueize. */
3428 rhs = SSA_VAL (rhs);
3430 return set_ssa_val_to (lhs, rhs);
3433 /* Visit a nary operator RHS, value number it, and return true if the
3434 value number of LHS has changed as a result. */
3436 static bool
3437 visit_nary_op (tree lhs, gimple *stmt)
3439 bool changed = false;
3440 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
3442 if (result)
3443 changed = set_ssa_val_to (lhs, result);
3444 else
3446 changed = set_ssa_val_to (lhs, lhs);
3447 vn_nary_op_insert_stmt (stmt, lhs);
3450 return changed;
3453 /* Visit a call STMT storing into LHS. Return true if the value number
3454 of the LHS has changed as a result. */
3456 static bool
3457 visit_reference_op_call (tree lhs, gcall *stmt)
3459 bool changed = false;
3460 struct vn_reference_s vr1;
3461 vn_reference_t vnresult = NULL;
3462 tree vdef = gimple_vdef (stmt);
3464 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
3465 if (lhs && TREE_CODE (lhs) != SSA_NAME)
3466 lhs = NULL_TREE;
3468 vn_reference_lookup_call (stmt, &vnresult, &vr1);
3469 if (vnresult)
3471 if (vnresult->result_vdef && vdef)
3472 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
3474 if (!vnresult->result && lhs)
3475 vnresult->result = lhs;
3477 if (vnresult->result && lhs)
3478 changed |= set_ssa_val_to (lhs, vnresult->result);
3480 else
3482 vn_reference_t vr2;
3483 vn_reference_s **slot;
3484 if (vdef)
3485 changed |= set_ssa_val_to (vdef, vdef);
3486 if (lhs)
3487 changed |= set_ssa_val_to (lhs, lhs);
3488 vr2 = current_info->references_pool->allocate ();
3489 vr2->vuse = vr1.vuse;
3490 /* As we are not walking the virtual operand chain we know the
3491 shared_lookup_references are still original so we can re-use
3492 them here. */
3493 vr2->operands = vr1.operands.copy ();
3494 vr2->type = vr1.type;
3495 vr2->set = vr1.set;
3496 vr2->hashcode = vr1.hashcode;
3497 vr2->result = lhs;
3498 vr2->result_vdef = vdef;
3499 slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3500 INSERT);
3501 gcc_assert (!*slot);
3502 *slot = vr2;
3505 return changed;
3508 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3509 and return true if the value number of the LHS has changed as a result. */
3511 static bool
3512 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
3514 bool changed = false;
3515 tree last_vuse;
3516 tree result;
3518 last_vuse = gimple_vuse (stmt);
3519 last_vuse_ptr = &last_vuse;
3520 result = vn_reference_lookup (op, gimple_vuse (stmt),
3521 default_vn_walk_kind, NULL, true);
3522 last_vuse_ptr = NULL;
3524 /* We handle type-punning through unions by value-numbering based
3525 on offset and size of the access. Be prepared to handle a
3526 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3527 if (result
3528 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3530 /* We will be setting the value number of lhs to the value number
3531 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3532 So first simplify and lookup this expression to see if it
3533 is already available. */
3534 code_helper rcode = VIEW_CONVERT_EXPR;
3535 tree ops[3] = { result };
3536 result = vn_nary_build_or_lookup (rcode, TREE_TYPE (op), ops);
3539 if (result)
3540 changed = set_ssa_val_to (lhs, result);
3541 else
3543 changed = set_ssa_val_to (lhs, lhs);
3544 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
3547 return changed;
3551 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3552 and return true if the value number of the LHS has changed as a result. */
3554 static bool
3555 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
3557 bool changed = false;
3558 vn_reference_t vnresult = NULL;
3559 tree result, assign;
3560 bool resultsame = false;
3561 tree vuse = gimple_vuse (stmt);
3562 tree vdef = gimple_vdef (stmt);
3564 if (TREE_CODE (op) == SSA_NAME)
3565 op = SSA_VAL (op);
3567 /* First we want to lookup using the *vuses* from the store and see
3568 if there the last store to this location with the same address
3569 had the same value.
3571 The vuses represent the memory state before the store. If the
3572 memory state, address, and value of the store is the same as the
3573 last store to this location, then this store will produce the
3574 same memory state as that store.
3576 In this case the vdef versions for this store are value numbered to those
3577 vuse versions, since they represent the same memory state after
3578 this store.
3580 Otherwise, the vdefs for the store are used when inserting into
3581 the table, since the store generates a new memory state. */
3583 result = vn_reference_lookup (lhs, vuse, VN_NOWALK, NULL, false);
3585 if (result)
3587 if (TREE_CODE (result) == SSA_NAME)
3588 result = SSA_VAL (result);
3589 resultsame = expressions_equal_p (result, op);
3592 if ((!result || !resultsame)
3593 /* Only perform the following when being called from PRE
3594 which embeds tail merging. */
3595 && default_vn_walk_kind == VN_WALK)
3597 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3598 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
3599 if (vnresult)
3601 VN_INFO (vdef)->use_processed = true;
3602 return set_ssa_val_to (vdef, vnresult->result_vdef);
3606 if (!result || !resultsame)
3608 if (dump_file && (dump_flags & TDF_DETAILS))
3610 fprintf (dump_file, "No store match\n");
3611 fprintf (dump_file, "Value numbering store ");
3612 print_generic_expr (dump_file, lhs, 0);
3613 fprintf (dump_file, " to ");
3614 print_generic_expr (dump_file, op, 0);
3615 fprintf (dump_file, "\n");
3617 /* Have to set value numbers before insert, since insert is
3618 going to valueize the references in-place. */
3619 if (vdef)
3621 changed |= set_ssa_val_to (vdef, vdef);
3624 /* Do not insert structure copies into the tables. */
3625 if (is_gimple_min_invariant (op)
3626 || is_gimple_reg (op))
3627 vn_reference_insert (lhs, op, vdef, NULL);
3629 /* Only perform the following when being called from PRE
3630 which embeds tail merging. */
3631 if (default_vn_walk_kind == VN_WALK)
3633 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3634 vn_reference_insert (assign, lhs, vuse, vdef);
3637 else
3639 /* We had a match, so value number the vdef to have the value
3640 number of the vuse it came from. */
3642 if (dump_file && (dump_flags & TDF_DETAILS))
3643 fprintf (dump_file, "Store matched earlier value,"
3644 "value numbering store vdefs to matching vuses.\n");
3646 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3649 return changed;
3652 /* Visit and value number PHI, return true if the value number
3653 changed. */
3655 static bool
3656 visit_phi (gimple *phi)
3658 bool changed = false;
3659 tree result;
3660 tree sameval = VN_TOP;
3661 bool allsame = true;
3662 unsigned n_executable = 0;
3664 /* TODO: We could check for this in init_sccvn, and replace this
3665 with a gcc_assert. */
3666 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3667 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3669 /* See if all non-TOP arguments have the same value. TOP is
3670 equivalent to everything, so we can ignore it. */
3671 edge_iterator ei;
3672 edge e;
3673 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3674 if (e->flags & EDGE_EXECUTABLE)
3676 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3678 ++n_executable;
3679 if (TREE_CODE (def) == SSA_NAME)
3680 def = SSA_VAL (def);
3681 if (def == VN_TOP)
3682 continue;
3683 if (sameval == VN_TOP)
3684 sameval = def;
3685 else if (!expressions_equal_p (def, sameval))
3687 allsame = false;
3688 break;
3692 /* If none of the edges was executable or all incoming values are
3693 undefined keep the value-number at VN_TOP. If only a single edge
3694 is exectuable use its value. */
3695 if (sameval == VN_TOP
3696 || n_executable == 1)
3697 return set_ssa_val_to (PHI_RESULT (phi), sameval);
3699 /* First see if it is equivalent to a phi node in this block. We prefer
3700 this as it allows IV elimination - see PRs 66502 and 67167. */
3701 result = vn_phi_lookup (phi);
3702 if (result)
3703 changed = set_ssa_val_to (PHI_RESULT (phi), result);
3704 /* Otherwise all value numbered to the same value, the phi node has that
3705 value. */
3706 else if (allsame)
3707 changed = set_ssa_val_to (PHI_RESULT (phi), sameval);
3708 else
3710 vn_phi_insert (phi, PHI_RESULT (phi));
3711 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3714 return changed;
3717 /* Try to simplify RHS using equivalences and constant folding. */
3719 static tree
3720 try_to_simplify (gassign *stmt)
3722 enum tree_code code = gimple_assign_rhs_code (stmt);
3723 tree tem;
3725 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3726 in this case, there is no point in doing extra work. */
3727 if (code == SSA_NAME)
3728 return NULL_TREE;
3730 /* First try constant folding based on our current lattice. */
3731 mprts_hook = vn_lookup_simplify_result;
3732 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
3733 mprts_hook = NULL;
3734 if (tem
3735 && (TREE_CODE (tem) == SSA_NAME
3736 || is_gimple_min_invariant (tem)))
3737 return tem;
3739 return NULL_TREE;
3742 /* Visit and value number USE, return true if the value number
3743 changed. */
3745 static bool
3746 visit_use (tree use)
3748 bool changed = false;
3749 gimple *stmt = SSA_NAME_DEF_STMT (use);
3751 mark_use_processed (use);
3753 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3754 if (dump_file && (dump_flags & TDF_DETAILS)
3755 && !SSA_NAME_IS_DEFAULT_DEF (use))
3757 fprintf (dump_file, "Value numbering ");
3758 print_generic_expr (dump_file, use, 0);
3759 fprintf (dump_file, " stmt = ");
3760 print_gimple_stmt (dump_file, stmt, 0, 0);
3763 /* Handle uninitialized uses. */
3764 if (SSA_NAME_IS_DEFAULT_DEF (use))
3765 changed = set_ssa_val_to (use, use);
3766 else if (gimple_code (stmt) == GIMPLE_PHI)
3767 changed = visit_phi (stmt);
3768 else if (gimple_has_volatile_ops (stmt))
3769 changed = defs_to_varying (stmt);
3770 else if (gassign *ass = dyn_cast <gassign *> (stmt))
3772 enum tree_code code = gimple_assign_rhs_code (ass);
3773 tree lhs = gimple_assign_lhs (ass);
3774 tree rhs1 = gimple_assign_rhs1 (ass);
3775 tree simplified;
3777 /* Shortcut for copies. Simplifying copies is pointless,
3778 since we copy the expression and value they represent. */
3779 if (code == SSA_NAME
3780 && TREE_CODE (lhs) == SSA_NAME)
3782 changed = visit_copy (lhs, rhs1);
3783 goto done;
3785 simplified = try_to_simplify (ass);
3786 if (simplified)
3788 if (dump_file && (dump_flags & TDF_DETAILS))
3790 fprintf (dump_file, "RHS ");
3791 print_gimple_expr (dump_file, ass, 0, 0);
3792 fprintf (dump_file, " simplified to ");
3793 print_generic_expr (dump_file, simplified, 0);
3794 fprintf (dump_file, "\n");
3797 /* Setting value numbers to constants will occasionally
3798 screw up phi congruence because constants are not
3799 uniquely associated with a single ssa name that can be
3800 looked up. */
3801 if (simplified
3802 && is_gimple_min_invariant (simplified)
3803 && TREE_CODE (lhs) == SSA_NAME)
3805 changed = set_ssa_val_to (lhs, simplified);
3806 goto done;
3808 else if (simplified
3809 && TREE_CODE (simplified) == SSA_NAME
3810 && TREE_CODE (lhs) == SSA_NAME)
3812 changed = visit_copy (lhs, simplified);
3813 goto done;
3816 if ((TREE_CODE (lhs) == SSA_NAME
3817 /* We can substitute SSA_NAMEs that are live over
3818 abnormal edges with their constant value. */
3819 && !(gimple_assign_copy_p (ass)
3820 && is_gimple_min_invariant (rhs1))
3821 && !(simplified
3822 && is_gimple_min_invariant (simplified))
3823 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3824 /* Stores or copies from SSA_NAMEs that are live over
3825 abnormal edges are a problem. */
3826 || (code == SSA_NAME
3827 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3828 changed = defs_to_varying (ass);
3829 else if (REFERENCE_CLASS_P (lhs)
3830 || DECL_P (lhs))
3831 changed = visit_reference_op_store (lhs, rhs1, ass);
3832 else if (TREE_CODE (lhs) == SSA_NAME)
3834 if ((gimple_assign_copy_p (ass)
3835 && is_gimple_min_invariant (rhs1))
3836 || (simplified
3837 && is_gimple_min_invariant (simplified)))
3839 if (simplified)
3840 changed = set_ssa_val_to (lhs, simplified);
3841 else
3842 changed = set_ssa_val_to (lhs, rhs1);
3844 else
3846 /* Visit the original statement. */
3847 switch (vn_get_stmt_kind (ass))
3849 case VN_NARY:
3850 changed = visit_nary_op (lhs, ass);
3851 break;
3852 case VN_REFERENCE:
3853 changed = visit_reference_op_load (lhs, rhs1, ass);
3854 break;
3855 default:
3856 changed = defs_to_varying (ass);
3857 break;
3861 else
3862 changed = defs_to_varying (ass);
3864 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
3866 tree lhs = gimple_call_lhs (call_stmt);
3867 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3869 /* Try constant folding based on our current lattice. */
3870 tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
3871 vn_valueize);
3872 if (simplified)
3874 if (dump_file && (dump_flags & TDF_DETAILS))
3876 fprintf (dump_file, "call ");
3877 print_gimple_expr (dump_file, call_stmt, 0, 0);
3878 fprintf (dump_file, " simplified to ");
3879 print_generic_expr (dump_file, simplified, 0);
3880 fprintf (dump_file, "\n");
3883 /* Setting value numbers to constants will occasionally
3884 screw up phi congruence because constants are not
3885 uniquely associated with a single ssa name that can be
3886 looked up. */
3887 if (simplified
3888 && is_gimple_min_invariant (simplified))
3890 changed = set_ssa_val_to (lhs, simplified);
3891 if (gimple_vdef (call_stmt))
3892 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
3893 SSA_VAL (gimple_vuse (call_stmt)));
3894 goto done;
3896 else if (simplified
3897 && TREE_CODE (simplified) == SSA_NAME)
3899 changed = visit_copy (lhs, simplified);
3900 if (gimple_vdef (call_stmt))
3901 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
3902 SSA_VAL (gimple_vuse (call_stmt)));
3903 goto done;
3905 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3907 changed = defs_to_varying (call_stmt);
3908 goto done;
3912 if (!gimple_call_internal_p (call_stmt)
3913 && (/* Calls to the same function with the same vuse
3914 and the same operands do not necessarily return the same
3915 value, unless they're pure or const. */
3916 gimple_call_flags (call_stmt) & (ECF_PURE | ECF_CONST)
3917 /* If calls have a vdef, subsequent calls won't have
3918 the same incoming vuse. So, if 2 calls with vdef have the
3919 same vuse, we know they're not subsequent.
3920 We can value number 2 calls to the same function with the
3921 same vuse and the same operands which are not subsequent
3922 the same, because there is no code in the program that can
3923 compare the 2 values... */
3924 || (gimple_vdef (call_stmt)
3925 /* ... unless the call returns a pointer which does
3926 not alias with anything else. In which case the
3927 information that the values are distinct are encoded
3928 in the IL. */
3929 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
3930 /* Only perform the following when being called from PRE
3931 which embeds tail merging. */
3932 && default_vn_walk_kind == VN_WALK)))
3933 changed = visit_reference_op_call (lhs, call_stmt);
3934 else
3935 changed = defs_to_varying (call_stmt);
3937 else
3938 changed = defs_to_varying (stmt);
3939 done:
3940 return changed;
3943 /* Compare two operands by reverse postorder index */
3945 static int
3946 compare_ops (const void *pa, const void *pb)
3948 const tree opa = *((const tree *)pa);
3949 const tree opb = *((const tree *)pb);
3950 gimple *opstmta = SSA_NAME_DEF_STMT (opa);
3951 gimple *opstmtb = SSA_NAME_DEF_STMT (opb);
3952 basic_block bba;
3953 basic_block bbb;
3955 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3956 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3957 else if (gimple_nop_p (opstmta))
3958 return -1;
3959 else if (gimple_nop_p (opstmtb))
3960 return 1;
3962 bba = gimple_bb (opstmta);
3963 bbb = gimple_bb (opstmtb);
3965 if (!bba && !bbb)
3966 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3967 else if (!bba)
3968 return -1;
3969 else if (!bbb)
3970 return 1;
3972 if (bba == bbb)
3974 if (gimple_code (opstmta) == GIMPLE_PHI
3975 && gimple_code (opstmtb) == GIMPLE_PHI)
3976 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3977 else if (gimple_code (opstmta) == GIMPLE_PHI)
3978 return -1;
3979 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3980 return 1;
3981 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3982 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3983 else
3984 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3986 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3989 /* Sort an array containing members of a strongly connected component
3990 SCC so that the members are ordered by RPO number.
3991 This means that when the sort is complete, iterating through the
3992 array will give you the members in RPO order. */
3994 static void
3995 sort_scc (vec<tree> scc)
3997 scc.qsort (compare_ops);
4000 /* Insert the no longer used nary ONARY to the hash INFO. */
4002 static void
4003 copy_nary (vn_nary_op_t onary, vn_tables_t info)
4005 size_t size = sizeof_vn_nary_op (onary->length);
4006 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
4007 &info->nary_obstack);
4008 memcpy (nary, onary, size);
4009 vn_nary_op_insert_into (nary, info->nary, false);
4012 /* Insert the no longer used phi OPHI to the hash INFO. */
4014 static void
4015 copy_phi (vn_phi_t ophi, vn_tables_t info)
4017 vn_phi_t phi = info->phis_pool->allocate ();
4018 vn_phi_s **slot;
4019 memcpy (phi, ophi, sizeof (*phi));
4020 ophi->phiargs.create (0);
4021 slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
4022 gcc_assert (!*slot);
4023 *slot = phi;
4026 /* Insert the no longer used reference OREF to the hash INFO. */
4028 static void
4029 copy_reference (vn_reference_t oref, vn_tables_t info)
4031 vn_reference_t ref;
4032 vn_reference_s **slot;
4033 ref = info->references_pool->allocate ();
4034 memcpy (ref, oref, sizeof (*ref));
4035 oref->operands.create (0);
4036 slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
4037 if (*slot)
4038 free_reference (*slot);
4039 *slot = ref;
4042 /* Process a strongly connected component in the SSA graph. */
4044 static void
4045 process_scc (vec<tree> scc)
4047 tree var;
4048 unsigned int i;
4049 unsigned int iterations = 0;
4050 bool changed = true;
4051 vn_nary_op_iterator_type hin;
4052 vn_phi_iterator_type hip;
4053 vn_reference_iterator_type hir;
4054 vn_nary_op_t nary;
4055 vn_phi_t phi;
4056 vn_reference_t ref;
4058 /* If the SCC has a single member, just visit it. */
4059 if (scc.length () == 1)
4061 tree use = scc[0];
4062 if (VN_INFO (use)->use_processed)
4063 return;
4064 /* We need to make sure it doesn't form a cycle itself, which can
4065 happen for self-referential PHI nodes. In that case we would
4066 end up inserting an expression with VN_TOP operands into the
4067 valid table which makes us derive bogus equivalences later.
4068 The cheapest way to check this is to assume it for all PHI nodes. */
4069 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
4070 /* Fallthru to iteration. */ ;
4071 else
4073 visit_use (use);
4074 return;
4078 if (dump_file && (dump_flags & TDF_DETAILS))
4079 print_scc (dump_file, scc);
4081 /* Iterate over the SCC with the optimistic table until it stops
4082 changing. */
4083 current_info = optimistic_info;
4084 while (changed)
4086 changed = false;
4087 iterations++;
4088 if (dump_file && (dump_flags & TDF_DETAILS))
4089 fprintf (dump_file, "Starting iteration %d\n", iterations);
4090 /* As we are value-numbering optimistically we have to
4091 clear the expression tables and the simplified expressions
4092 in each iteration until we converge. */
4093 optimistic_info->nary->empty ();
4094 optimistic_info->phis->empty ();
4095 optimistic_info->references->empty ();
4096 obstack_free (&optimistic_info->nary_obstack, NULL);
4097 gcc_obstack_init (&optimistic_info->nary_obstack);
4098 optimistic_info->phis_pool->release ();
4099 optimistic_info->references_pool->release ();
4100 FOR_EACH_VEC_ELT (scc, i, var)
4101 gcc_assert (!VN_INFO (var)->needs_insertion
4102 && VN_INFO (var)->expr == NULL);
4103 FOR_EACH_VEC_ELT (scc, i, var)
4104 changed |= visit_use (var);
4107 if (dump_file && (dump_flags & TDF_DETAILS))
4108 fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
4109 statistics_histogram_event (cfun, "SCC iterations", iterations);
4111 /* Finally, copy the contents of the no longer used optimistic
4112 table to the valid table. */
4113 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
4114 copy_nary (nary, valid_info);
4115 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
4116 copy_phi (phi, valid_info);
4117 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
4118 ref, vn_reference_t, hir)
4119 copy_reference (ref, valid_info);
4121 current_info = valid_info;
4125 /* Pop the components of the found SCC for NAME off the SCC stack
4126 and process them. Returns true if all went well, false if
4127 we run into resource limits. */
4129 static bool
4130 extract_and_process_scc_for_name (tree name)
4132 auto_vec<tree> scc;
4133 tree x;
4135 /* Found an SCC, pop the components off the SCC stack and
4136 process them. */
4139 x = sccstack.pop ();
4141 VN_INFO (x)->on_sccstack = false;
4142 scc.safe_push (x);
4143 } while (x != name);
4145 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
4146 if (scc.length ()
4147 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
4149 if (dump_file)
4150 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
4151 "SCC size %u exceeding %u\n", scc.length (),
4152 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
4154 return false;
4157 if (scc.length () > 1)
4158 sort_scc (scc);
4160 process_scc (scc);
4162 return true;
4165 /* Depth first search on NAME to discover and process SCC's in the SSA
4166 graph.
4167 Execution of this algorithm relies on the fact that the SCC's are
4168 popped off the stack in topological order.
4169 Returns true if successful, false if we stopped processing SCC's due
4170 to resource constraints. */
4172 static bool
4173 DFS (tree name)
4175 auto_vec<ssa_op_iter> itervec;
4176 auto_vec<tree> namevec;
4177 use_operand_p usep = NULL;
4178 gimple *defstmt;
4179 tree use;
4180 ssa_op_iter iter;
4182 start_over:
4183 /* SCC info */
4184 VN_INFO (name)->dfsnum = next_dfs_num++;
4185 VN_INFO (name)->visited = true;
4186 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
4188 sccstack.safe_push (name);
4189 VN_INFO (name)->on_sccstack = true;
4190 defstmt = SSA_NAME_DEF_STMT (name);
4192 /* Recursively DFS on our operands, looking for SCC's. */
4193 if (!gimple_nop_p (defstmt))
4195 /* Push a new iterator. */
4196 if (gphi *phi = dyn_cast <gphi *> (defstmt))
4197 usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
4198 else
4199 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
4201 else
4202 clear_and_done_ssa_iter (&iter);
4204 while (1)
4206 /* If we are done processing uses of a name, go up the stack
4207 of iterators and process SCCs as we found them. */
4208 if (op_iter_done (&iter))
4210 /* See if we found an SCC. */
4211 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
4212 if (!extract_and_process_scc_for_name (name))
4213 return false;
4215 /* Check if we are done. */
4216 if (namevec.is_empty ())
4217 return true;
4219 /* Restore the last use walker and continue walking there. */
4220 use = name;
4221 name = namevec.pop ();
4222 memcpy (&iter, &itervec.last (),
4223 sizeof (ssa_op_iter));
4224 itervec.pop ();
4225 goto continue_walking;
4228 use = USE_FROM_PTR (usep);
4230 /* Since we handle phi nodes, we will sometimes get
4231 invariants in the use expression. */
4232 if (TREE_CODE (use) == SSA_NAME)
4234 if (! (VN_INFO (use)->visited))
4236 /* Recurse by pushing the current use walking state on
4237 the stack and starting over. */
4238 itervec.safe_push (iter);
4239 namevec.safe_push (name);
4240 name = use;
4241 goto start_over;
4243 continue_walking:
4244 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
4245 VN_INFO (use)->low);
4247 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
4248 && VN_INFO (use)->on_sccstack)
4250 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
4251 VN_INFO (name)->low);
4255 usep = op_iter_next_use (&iter);
4259 /* Allocate a value number table. */
4261 static void
4262 allocate_vn_table (vn_tables_t table)
4264 table->phis = new vn_phi_table_type (23);
4265 table->nary = new vn_nary_op_table_type (23);
4266 table->references = new vn_reference_table_type (23);
4268 gcc_obstack_init (&table->nary_obstack);
4269 table->phis_pool = new object_allocator<vn_phi_s> ("VN phis");
4270 table->references_pool = new object_allocator<vn_reference_s>
4271 ("VN references");
4274 /* Free a value number table. */
4276 static void
4277 free_vn_table (vn_tables_t table)
4279 delete table->phis;
4280 table->phis = NULL;
4281 delete table->nary;
4282 table->nary = NULL;
4283 delete table->references;
4284 table->references = NULL;
4285 obstack_free (&table->nary_obstack, NULL);
4286 delete table->phis_pool;
4287 delete table->references_pool;
4290 static void
4291 init_scc_vn (void)
4293 size_t i;
4294 int j;
4295 int *rpo_numbers_temp;
4297 calculate_dominance_info (CDI_DOMINATORS);
4298 mark_dfs_back_edges ();
4300 sccstack.create (0);
4301 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
4303 constant_value_ids = BITMAP_ALLOC (NULL);
4305 next_dfs_num = 1;
4306 next_value_id = 1;
4308 vn_ssa_aux_table.create (num_ssa_names + 1);
4309 /* VEC_alloc doesn't actually grow it to the right size, it just
4310 preallocates the space to do so. */
4311 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
4312 gcc_obstack_init (&vn_ssa_aux_obstack);
4314 shared_lookup_phiargs.create (0);
4315 shared_lookup_references.create (0);
4316 rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
4317 rpo_numbers_temp =
4318 XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
4319 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4321 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4322 the i'th block in RPO order is bb. We want to map bb's to RPO
4323 numbers, so we need to rearrange this array. */
4324 for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
4325 rpo_numbers[rpo_numbers_temp[j]] = j;
4327 XDELETE (rpo_numbers_temp);
4329 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
4331 renumber_gimple_stmt_uids ();
4333 /* Create the valid and optimistic value numbering tables. */
4334 valid_info = XCNEW (struct vn_tables_s);
4335 allocate_vn_table (valid_info);
4336 optimistic_info = XCNEW (struct vn_tables_s);
4337 allocate_vn_table (optimistic_info);
4338 current_info = valid_info;
4340 /* Create the VN_INFO structures, and initialize value numbers to
4341 TOP or VARYING for parameters. */
4342 for (i = 1; i < num_ssa_names; i++)
4344 tree name = ssa_name (i);
4345 if (!name)
4346 continue;
4348 VN_INFO_GET (name)->valnum = VN_TOP;
4349 VN_INFO (name)->needs_insertion = false;
4350 VN_INFO (name)->expr = NULL;
4351 VN_INFO (name)->value_id = 0;
4353 if (!SSA_NAME_IS_DEFAULT_DEF (name))
4354 continue;
4356 switch (TREE_CODE (SSA_NAME_VAR (name)))
4358 case VAR_DECL:
4359 /* Undefined vars keep TOP. */
4360 break;
4362 case PARM_DECL:
4363 /* Parameters are VARYING but we can record a condition
4364 if we know it is a non-NULL pointer. */
4365 VN_INFO (name)->visited = true;
4366 VN_INFO (name)->valnum = name;
4367 if (POINTER_TYPE_P (TREE_TYPE (name))
4368 && nonnull_arg_p (SSA_NAME_VAR (name)))
4370 tree ops[2];
4371 ops[0] = name;
4372 ops[1] = build_int_cst (TREE_TYPE (name), 0);
4373 vn_nary_op_insert_pieces (2, NE_EXPR, boolean_type_node, ops,
4374 boolean_true_node, 0);
4375 if (dump_file && (dump_flags & TDF_DETAILS))
4377 fprintf (dump_file, "Recording ");
4378 print_generic_expr (dump_file, name, TDF_SLIM);
4379 fprintf (dump_file, " != 0\n");
4382 break;
4384 case RESULT_DECL:
4385 /* If the result is passed by invisible reference the default
4386 def is initialized, otherwise it's uninitialized. */
4387 if (DECL_BY_REFERENCE (SSA_NAME_VAR (name)))
4389 VN_INFO (name)->visited = true;
4390 VN_INFO (name)->valnum = name;
4392 break;
4394 default:
4395 gcc_unreachable ();
4400 /* Restore SSA info that has been reset on value leaders. */
4402 void
4403 scc_vn_restore_ssa_info (void)
4405 for (unsigned i = 0; i < num_ssa_names; i++)
4407 tree name = ssa_name (i);
4408 if (name
4409 && has_VN_INFO (name))
4411 if (VN_INFO (name)->needs_insertion)
4413 else if (POINTER_TYPE_P (TREE_TYPE (name))
4414 && VN_INFO (name)->info.ptr_info)
4415 SSA_NAME_PTR_INFO (name) = VN_INFO (name)->info.ptr_info;
4416 else if (INTEGRAL_TYPE_P (TREE_TYPE (name))
4417 && VN_INFO (name)->info.range_info)
4419 SSA_NAME_RANGE_INFO (name) = VN_INFO (name)->info.range_info;
4420 SSA_NAME_ANTI_RANGE_P (name)
4421 = VN_INFO (name)->range_info_anti_range_p;
4427 void
4428 free_scc_vn (void)
4430 size_t i;
4432 delete constant_to_value_id;
4433 constant_to_value_id = NULL;
4434 BITMAP_FREE (constant_value_ids);
4435 shared_lookup_phiargs.release ();
4436 shared_lookup_references.release ();
4437 XDELETEVEC (rpo_numbers);
4439 for (i = 0; i < num_ssa_names; i++)
4441 tree name = ssa_name (i);
4442 if (name
4443 && has_VN_INFO (name)
4444 && VN_INFO (name)->needs_insertion)
4445 release_ssa_name (name);
4447 obstack_free (&vn_ssa_aux_obstack, NULL);
4448 vn_ssa_aux_table.release ();
4450 sccstack.release ();
4451 free_vn_table (valid_info);
4452 XDELETE (valid_info);
4453 free_vn_table (optimistic_info);
4454 XDELETE (optimistic_info);
4456 BITMAP_FREE (const_parms);
4459 /* Set *ID according to RESULT. */
4461 static void
4462 set_value_id_for_result (tree result, unsigned int *id)
4464 if (result && TREE_CODE (result) == SSA_NAME)
4465 *id = VN_INFO (result)->value_id;
4466 else if (result && is_gimple_min_invariant (result))
4467 *id = get_or_alloc_constant_value_id (result);
4468 else
4469 *id = get_next_value_id ();
4472 /* Set the value ids in the valid hash tables. */
4474 static void
4475 set_hashtable_value_ids (void)
4477 vn_nary_op_iterator_type hin;
4478 vn_phi_iterator_type hip;
4479 vn_reference_iterator_type hir;
4480 vn_nary_op_t vno;
4481 vn_reference_t vr;
4482 vn_phi_t vp;
4484 /* Now set the value ids of the things we had put in the hash
4485 table. */
4487 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4488 set_value_id_for_result (vno->result, &vno->value_id);
4490 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4491 set_value_id_for_result (vp->result, &vp->value_id);
4493 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4494 hir)
4495 set_value_id_for_result (vr->result, &vr->value_id);
4498 class sccvn_dom_walker : public dom_walker
4500 public:
4501 sccvn_dom_walker ()
4502 : dom_walker (CDI_DOMINATORS, true), fail (false), cond_stack (0) {}
4504 virtual edge before_dom_children (basic_block);
4505 virtual void after_dom_children (basic_block);
4507 void record_cond (basic_block,
4508 enum tree_code code, tree lhs, tree rhs, bool value);
4509 void record_conds (basic_block,
4510 enum tree_code code, tree lhs, tree rhs, bool value);
4512 bool fail;
4513 auto_vec<std::pair <basic_block, std::pair <vn_nary_op_t, vn_nary_op_t> > >
4514 cond_stack;
4517 /* Record a temporary condition for the BB and its dominated blocks. */
4519 void
4520 sccvn_dom_walker::record_cond (basic_block bb,
4521 enum tree_code code, tree lhs, tree rhs,
4522 bool value)
4524 tree ops[2] = { lhs, rhs };
4525 vn_nary_op_t old = NULL;
4526 if (vn_nary_op_lookup_pieces (2, code, boolean_type_node, ops, &old))
4527 current_info->nary->remove_elt_with_hash (old, old->hashcode);
4528 vn_nary_op_t cond
4529 = vn_nary_op_insert_pieces (2, code, boolean_type_node, ops,
4530 value
4531 ? boolean_true_node
4532 : boolean_false_node, 0);
4533 if (dump_file && (dump_flags & TDF_DETAILS))
4535 fprintf (dump_file, "Recording temporarily ");
4536 print_generic_expr (dump_file, ops[0], TDF_SLIM);
4537 fprintf (dump_file, " %s ", get_tree_code_name (code));
4538 print_generic_expr (dump_file, ops[1], TDF_SLIM);
4539 fprintf (dump_file, " == %s%s\n",
4540 value ? "true" : "false",
4541 old ? " (old entry saved)" : "");
4543 cond_stack.safe_push (std::make_pair (bb, std::make_pair (cond, old)));
4546 /* Record temporary conditions for the BB and its dominated blocks
4547 according to LHS CODE RHS == VALUE and its dominated conditions. */
4549 void
4550 sccvn_dom_walker::record_conds (basic_block bb,
4551 enum tree_code code, tree lhs, tree rhs,
4552 bool value)
4554 /* Record the original condition. */
4555 record_cond (bb, code, lhs, rhs, value);
4557 if (!value)
4558 return;
4560 /* Record dominated conditions if the condition is true. Note that
4561 the inversion is already recorded. */
4562 switch (code)
4564 case LT_EXPR:
4565 case GT_EXPR:
4566 record_cond (bb, code == LT_EXPR ? LE_EXPR : GE_EXPR, lhs, rhs, true);
4567 record_cond (bb, NE_EXPR, lhs, rhs, true);
4568 record_cond (bb, EQ_EXPR, lhs, rhs, false);
4569 break;
4571 case EQ_EXPR:
4572 record_cond (bb, LE_EXPR, lhs, rhs, true);
4573 record_cond (bb, GE_EXPR, lhs, rhs, true);
4574 record_cond (bb, LT_EXPR, lhs, rhs, false);
4575 record_cond (bb, GT_EXPR, lhs, rhs, false);
4576 break;
4578 default:
4579 break;
4583 /* Restore expressions and values derived from conditionals. */
4585 void
4586 sccvn_dom_walker::after_dom_children (basic_block bb)
4588 while (!cond_stack.is_empty ()
4589 && cond_stack.last ().first == bb)
4591 vn_nary_op_t cond = cond_stack.last ().second.first;
4592 vn_nary_op_t old = cond_stack.last ().second.second;
4593 current_info->nary->remove_elt_with_hash (cond, cond->hashcode);
4594 if (old)
4595 vn_nary_op_insert_into (old, current_info->nary, false);
4596 cond_stack.pop ();
4600 /* Value number all statements in BB. */
4602 edge
4603 sccvn_dom_walker::before_dom_children (basic_block bb)
4605 edge e;
4606 edge_iterator ei;
4608 if (fail)
4609 return NULL;
4611 if (dump_file && (dump_flags & TDF_DETAILS))
4612 fprintf (dump_file, "Visiting BB %d\n", bb->index);
4614 /* If we have a single predecessor record the equivalence from a
4615 possible condition on the predecessor edge. */
4616 edge pred_e = NULL;
4617 FOR_EACH_EDGE (e, ei, bb->preds)
4619 /* Ignore simple backedges from this to allow recording conditions
4620 in loop headers. */
4621 if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
4622 continue;
4623 if (! pred_e)
4624 pred_e = e;
4625 else
4627 pred_e = NULL;
4628 break;
4631 if (pred_e)
4633 /* Check if there are multiple executable successor edges in
4634 the source block. Otherwise there is no additional info
4635 to be recorded. */
4636 edge e2;
4637 FOR_EACH_EDGE (e2, ei, pred_e->src->succs)
4638 if (e2 != pred_e
4639 && e2->flags & EDGE_EXECUTABLE)
4640 break;
4641 if (e2 && (e2->flags & EDGE_EXECUTABLE))
4643 gimple *stmt = last_stmt (pred_e->src);
4644 if (stmt
4645 && gimple_code (stmt) == GIMPLE_COND)
4647 enum tree_code code = gimple_cond_code (stmt);
4648 tree lhs = gimple_cond_lhs (stmt);
4649 tree rhs = gimple_cond_rhs (stmt);
4650 record_conds (bb, code, lhs, rhs,
4651 (pred_e->flags & EDGE_TRUE_VALUE) != 0);
4652 code = invert_tree_comparison (code, HONOR_NANS (lhs));
4653 if (code != ERROR_MARK)
4654 record_conds (bb, code, lhs, rhs,
4655 (pred_e->flags & EDGE_TRUE_VALUE) == 0);
4660 /* Value-number all defs in the basic-block. */
4661 for (gphi_iterator gsi = gsi_start_phis (bb);
4662 !gsi_end_p (gsi); gsi_next (&gsi))
4664 gphi *phi = gsi.phi ();
4665 tree res = PHI_RESULT (phi);
4666 if (!VN_INFO (res)->visited
4667 && !DFS (res))
4669 fail = true;
4670 return NULL;
4673 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
4674 !gsi_end_p (gsi); gsi_next (&gsi))
4676 ssa_op_iter i;
4677 tree op;
4678 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
4679 if (!VN_INFO (op)->visited
4680 && !DFS (op))
4682 fail = true;
4683 return NULL;
4687 /* Finally look at the last stmt. */
4688 gimple *stmt = last_stmt (bb);
4689 if (!stmt)
4690 return NULL;
4692 enum gimple_code code = gimple_code (stmt);
4693 if (code != GIMPLE_COND
4694 && code != GIMPLE_SWITCH
4695 && code != GIMPLE_GOTO)
4696 return NULL;
4698 if (dump_file && (dump_flags & TDF_DETAILS))
4700 fprintf (dump_file, "Visiting control stmt ending BB %d: ", bb->index);
4701 print_gimple_stmt (dump_file, stmt, 0, 0);
4704 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
4705 if value-numbering can prove they are not reachable. Handling
4706 computed gotos is also possible. */
4707 tree val;
4708 switch (code)
4710 case GIMPLE_COND:
4712 tree lhs = vn_valueize (gimple_cond_lhs (stmt));
4713 tree rhs = vn_valueize (gimple_cond_rhs (stmt));
4714 val = gimple_simplify (gimple_cond_code (stmt),
4715 boolean_type_node, lhs, rhs,
4716 NULL, vn_valueize);
4717 /* If that didn't simplify to a constant see if we have recorded
4718 temporary expressions from taken edges. */
4719 if (!val || TREE_CODE (val) != INTEGER_CST)
4721 tree ops[2];
4722 ops[0] = lhs;
4723 ops[1] = rhs;
4724 val = vn_nary_op_lookup_pieces (2, gimple_cond_code (stmt),
4725 boolean_type_node, ops, NULL);
4727 break;
4729 case GIMPLE_SWITCH:
4730 val = gimple_switch_index (as_a <gswitch *> (stmt));
4731 break;
4732 case GIMPLE_GOTO:
4733 val = gimple_goto_dest (stmt);
4734 break;
4735 default:
4736 gcc_unreachable ();
4738 if (!val)
4739 return NULL;
4741 edge taken = find_taken_edge (bb, vn_valueize (val));
4742 if (!taken)
4743 return NULL;
4745 if (dump_file && (dump_flags & TDF_DETAILS))
4746 fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
4747 "not executable\n", bb->index, bb->index, taken->dest->index);
4749 return taken;
4752 /* Do SCCVN. Returns true if it finished, false if we bailed out
4753 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
4754 how we use the alias oracle walking during the VN process. */
4756 bool
4757 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
4759 size_t i;
4761 default_vn_walk_kind = default_vn_walk_kind_;
4763 init_scc_vn ();
4765 /* Collect pointers we know point to readonly memory. */
4766 const_parms = BITMAP_ALLOC (NULL);
4767 tree fnspec = lookup_attribute ("fn spec",
4768 TYPE_ATTRIBUTES (TREE_TYPE (cfun->decl)));
4769 if (fnspec)
4771 fnspec = TREE_VALUE (TREE_VALUE (fnspec));
4772 i = 1;
4773 for (tree arg = DECL_ARGUMENTS (cfun->decl);
4774 arg; arg = DECL_CHAIN (arg), ++i)
4776 if (i >= (unsigned) TREE_STRING_LENGTH (fnspec))
4777 break;
4778 if (TREE_STRING_POINTER (fnspec)[i] == 'R'
4779 || TREE_STRING_POINTER (fnspec)[i] == 'r')
4781 tree name = ssa_default_def (cfun, arg);
4782 if (name)
4783 bitmap_set_bit (const_parms, SSA_NAME_VERSION (name));
4788 /* Walk all blocks in dominator order, value-numbering stmts
4789 SSA defs and decide whether outgoing edges are not executable. */
4790 sccvn_dom_walker walker;
4791 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
4792 if (walker.fail)
4794 free_scc_vn ();
4795 return false;
4798 /* Initialize the value ids and prune out remaining VN_TOPs
4799 from dead code. */
4800 for (i = 1; i < num_ssa_names; ++i)
4802 tree name = ssa_name (i);
4803 vn_ssa_aux_t info;
4804 if (!name)
4805 continue;
4806 info = VN_INFO (name);
4807 if (!info->visited)
4808 info->valnum = name;
4809 if (info->valnum == name
4810 || info->valnum == VN_TOP)
4811 info->value_id = get_next_value_id ();
4812 else if (is_gimple_min_invariant (info->valnum))
4813 info->value_id = get_or_alloc_constant_value_id (info->valnum);
4816 /* Propagate. */
4817 for (i = 1; i < num_ssa_names; ++i)
4819 tree name = ssa_name (i);
4820 vn_ssa_aux_t info;
4821 if (!name)
4822 continue;
4823 info = VN_INFO (name);
4824 if (TREE_CODE (info->valnum) == SSA_NAME
4825 && info->valnum != name
4826 && info->value_id != VN_INFO (info->valnum)->value_id)
4827 info->value_id = VN_INFO (info->valnum)->value_id;
4830 set_hashtable_value_ids ();
4832 if (dump_file && (dump_flags & TDF_DETAILS))
4834 fprintf (dump_file, "Value numbers:\n");
4835 for (i = 0; i < num_ssa_names; i++)
4837 tree name = ssa_name (i);
4838 if (name
4839 && VN_INFO (name)->visited
4840 && SSA_VAL (name) != name)
4842 print_generic_expr (dump_file, name, 0);
4843 fprintf (dump_file, " = ");
4844 print_generic_expr (dump_file, SSA_VAL (name), 0);
4845 fprintf (dump_file, "\n");
4850 return true;
4853 /* Return the maximum value id we have ever seen. */
4855 unsigned int
4856 get_max_value_id (void)
4858 return next_value_id;
4861 /* Return the next unique value id. */
4863 unsigned int
4864 get_next_value_id (void)
4866 return next_value_id++;
4870 /* Compare two expressions E1 and E2 and return true if they are equal. */
4872 bool
4873 expressions_equal_p (tree e1, tree e2)
4875 /* The obvious case. */
4876 if (e1 == e2)
4877 return true;
4879 /* If either one is VN_TOP consider them equal. */
4880 if (e1 == VN_TOP || e2 == VN_TOP)
4881 return true;
4883 /* If only one of them is null, they cannot be equal. */
4884 if (!e1 || !e2)
4885 return false;
4887 /* Now perform the actual comparison. */
4888 if (TREE_CODE (e1) == TREE_CODE (e2)
4889 && operand_equal_p (e1, e2, OEP_PURE_SAME))
4890 return true;
4892 return false;
4896 /* Return true if the nary operation NARY may trap. This is a copy
4897 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4899 bool
4900 vn_nary_may_trap (vn_nary_op_t nary)
4902 tree type;
4903 tree rhs2 = NULL_TREE;
4904 bool honor_nans = false;
4905 bool honor_snans = false;
4906 bool fp_operation = false;
4907 bool honor_trapv = false;
4908 bool handled, ret;
4909 unsigned i;
4911 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4912 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4913 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4915 type = nary->type;
4916 fp_operation = FLOAT_TYPE_P (type);
4917 if (fp_operation)
4919 honor_nans = flag_trapping_math && !flag_finite_math_only;
4920 honor_snans = flag_signaling_nans != 0;
4922 else if (INTEGRAL_TYPE_P (type)
4923 && TYPE_OVERFLOW_TRAPS (type))
4924 honor_trapv = true;
4926 if (nary->length >= 2)
4927 rhs2 = nary->op[1];
4928 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4929 honor_trapv,
4930 honor_nans, honor_snans, rhs2,
4931 &handled);
4932 if (handled
4933 && ret)
4934 return true;
4936 for (i = 0; i < nary->length; ++i)
4937 if (tree_could_trap_p (nary->op[i]))
4938 return true;
4940 return false;