2016-07-28 Steven G. Kargl <kargl@gcc.gnu.org>
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blobe889d6063e6ec99a1a00ee778ecd0816dcf3e3d7
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2016 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "insn-config.h"
32 #include "emit-rtl.h"
33 #include "cgraph.h"
34 #include "gimple-pretty-print.h"
35 #include "alias.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "cfganal.h"
39 #include "tree-inline.h"
40 #include "internal-fn.h"
41 #include "gimple-fold.h"
42 #include "tree-eh.h"
43 #include "gimplify.h"
44 #include "flags.h"
45 #include "dojump.h"
46 #include "explow.h"
47 #include "calls.h"
48 #include "varasm.h"
49 #include "stmt.h"
50 #include "expr.h"
51 #include "tree-dfa.h"
52 #include "tree-ssa.h"
53 #include "dumpfile.h"
54 #include "cfgloop.h"
55 #include "params.h"
56 #include "tree-ssa-propagate.h"
57 #include "tree-ssa-sccvn.h"
58 #include "tree-cfg.h"
59 #include "domwalk.h"
60 #include "gimple-iterator.h"
61 #include "gimple-match.h"
63 /* This algorithm is based on the SCC algorithm presented by Keith
64 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
65 (http://citeseer.ist.psu.edu/41805.html). In
66 straight line code, it is equivalent to a regular hash based value
67 numbering that is performed in reverse postorder.
69 For code with cycles, there are two alternatives, both of which
70 require keeping the hashtables separate from the actual list of
71 value numbers for SSA names.
73 1. Iterate value numbering in an RPO walk of the blocks, removing
74 all the entries from the hashtable after each iteration (but
75 keeping the SSA name->value number mapping between iterations).
76 Iterate until it does not change.
78 2. Perform value numbering as part of an SCC walk on the SSA graph,
79 iterating only the cycles in the SSA graph until they do not change
80 (using a separate, optimistic hashtable for value numbering the SCC
81 operands).
83 The second is not just faster in practice (because most SSA graph
84 cycles do not involve all the variables in the graph), it also has
85 some nice properties.
87 One of these nice properties is that when we pop an SCC off the
88 stack, we are guaranteed to have processed all the operands coming from
89 *outside of that SCC*, so we do not need to do anything special to
90 ensure they have value numbers.
92 Another nice property is that the SCC walk is done as part of a DFS
93 of the SSA graph, which makes it easy to perform combining and
94 simplifying operations at the same time.
96 The code below is deliberately written in a way that makes it easy
97 to separate the SCC walk from the other work it does.
99 In order to propagate constants through the code, we track which
100 expressions contain constants, and use those while folding. In
101 theory, we could also track expressions whose value numbers are
102 replaced, in case we end up folding based on expression
103 identities.
105 In order to value number memory, we assign value numbers to vuses.
106 This enables us to note that, for example, stores to the same
107 address of the same value from the same starting memory states are
108 equivalent.
109 TODO:
111 1. We can iterate only the changing portions of the SCC's, but
112 I have not seen an SCC big enough for this to be a win.
113 2. If you differentiate between phi nodes for loops and phi nodes
114 for if-then-else, you can properly consider phi nodes in different
115 blocks for equivalence.
116 3. We could value number vuses in more cases, particularly, whole
117 structure copies.
121 static tree *last_vuse_ptr;
122 static vn_lookup_kind vn_walk_kind;
123 static vn_lookup_kind default_vn_walk_kind;
124 bitmap const_parms;
126 /* vn_nary_op hashtable helpers. */
128 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
130 typedef vn_nary_op_s *compare_type;
131 static inline hashval_t hash (const vn_nary_op_s *);
132 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
135 /* Return the computed hashcode for nary operation P1. */
137 inline hashval_t
138 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
140 return vno1->hashcode;
143 /* Compare nary operations P1 and P2 and return true if they are
144 equivalent. */
146 inline bool
147 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
149 return vn_nary_op_eq (vno1, vno2);
152 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
153 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
156 /* vn_phi hashtable helpers. */
158 static int
159 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
161 struct vn_phi_hasher : pointer_hash <vn_phi_s>
163 static inline hashval_t hash (const vn_phi_s *);
164 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
165 static inline void remove (vn_phi_s *);
168 /* Return the computed hashcode for phi operation P1. */
170 inline hashval_t
171 vn_phi_hasher::hash (const vn_phi_s *vp1)
173 return vp1->hashcode;
176 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
178 inline bool
179 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
181 return vn_phi_eq (vp1, vp2);
184 /* Free a phi operation structure VP. */
186 inline void
187 vn_phi_hasher::remove (vn_phi_s *phi)
189 phi->phiargs.release ();
192 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
193 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
196 /* Compare two reference operands P1 and P2 for equality. Return true if
197 they are equal, and false otherwise. */
199 static int
200 vn_reference_op_eq (const void *p1, const void *p2)
202 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
203 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
205 return (vro1->opcode == vro2->opcode
206 /* We do not care for differences in type qualification. */
207 && (vro1->type == vro2->type
208 || (vro1->type && vro2->type
209 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
210 TYPE_MAIN_VARIANT (vro2->type))))
211 && expressions_equal_p (vro1->op0, vro2->op0)
212 && expressions_equal_p (vro1->op1, vro2->op1)
213 && expressions_equal_p (vro1->op2, vro2->op2));
216 /* Free a reference operation structure VP. */
218 static inline void
219 free_reference (vn_reference_s *vr)
221 vr->operands.release ();
225 /* vn_reference hashtable helpers. */
227 struct vn_reference_hasher : pointer_hash <vn_reference_s>
229 static inline hashval_t hash (const vn_reference_s *);
230 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
231 static inline void remove (vn_reference_s *);
234 /* Return the hashcode for a given reference operation P1. */
236 inline hashval_t
237 vn_reference_hasher::hash (const vn_reference_s *vr1)
239 return vr1->hashcode;
242 inline bool
243 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
245 return vn_reference_eq (v, c);
248 inline void
249 vn_reference_hasher::remove (vn_reference_s *v)
251 free_reference (v);
254 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
255 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
258 /* The set of hashtables and alloc_pool's for their items. */
260 typedef struct vn_tables_s
262 vn_nary_op_table_type *nary;
263 vn_phi_table_type *phis;
264 vn_reference_table_type *references;
265 struct obstack nary_obstack;
266 object_allocator<vn_phi_s> *phis_pool;
267 object_allocator<vn_reference_s> *references_pool;
268 } *vn_tables_t;
271 /* vn_constant hashtable helpers. */
273 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
275 static inline hashval_t hash (const vn_constant_s *);
276 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
279 /* Hash table hash function for vn_constant_t. */
281 inline hashval_t
282 vn_constant_hasher::hash (const vn_constant_s *vc1)
284 return vc1->hashcode;
287 /* Hash table equality function for vn_constant_t. */
289 inline bool
290 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
292 if (vc1->hashcode != vc2->hashcode)
293 return false;
295 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
298 static hash_table<vn_constant_hasher> *constant_to_value_id;
299 static bitmap constant_value_ids;
302 /* Valid hashtables storing information we have proven to be
303 correct. */
305 static vn_tables_t valid_info;
307 /* Optimistic hashtables storing information we are making assumptions about
308 during iterations. */
310 static vn_tables_t optimistic_info;
312 /* Pointer to the set of hashtables that is currently being used.
313 Should always point to either the optimistic_info, or the
314 valid_info. */
316 static vn_tables_t current_info;
319 /* Reverse post order index for each basic block. */
321 static int *rpo_numbers;
323 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
325 /* Return the SSA value of the VUSE x, supporting released VDEFs
326 during elimination which will value-number the VDEF to the
327 associated VUSE (but not substitute in the whole lattice). */
329 static inline tree
330 vuse_ssa_val (tree x)
332 if (!x)
333 return NULL_TREE;
337 x = SSA_VAL (x);
339 while (SSA_NAME_IN_FREE_LIST (x));
341 return x;
344 /* This represents the top of the VN lattice, which is the universal
345 value. */
347 tree VN_TOP;
349 /* Unique counter for our value ids. */
351 static unsigned int next_value_id;
353 /* Next DFS number and the stack for strongly connected component
354 detection. */
356 static unsigned int next_dfs_num;
357 static vec<tree> sccstack;
361 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
362 are allocated on an obstack for locality reasons, and to free them
363 without looping over the vec. */
365 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
366 static struct obstack vn_ssa_aux_obstack;
368 /* Return whether there is value numbering information for a given SSA name. */
370 bool
371 has_VN_INFO (tree name)
373 if (SSA_NAME_VERSION (name) < vn_ssa_aux_table.length ())
374 return vn_ssa_aux_table[SSA_NAME_VERSION (name)] != NULL;
375 return false;
378 /* Return the value numbering information for a given SSA name. */
380 vn_ssa_aux_t
381 VN_INFO (tree name)
383 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
384 gcc_checking_assert (res);
385 return res;
388 /* Set the value numbering info for a given SSA name to a given
389 value. */
391 static inline void
392 VN_INFO_SET (tree name, vn_ssa_aux_t value)
394 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
397 /* Initialize the value numbering info for a given SSA name.
398 This should be called just once for every SSA name. */
400 vn_ssa_aux_t
401 VN_INFO_GET (tree name)
403 vn_ssa_aux_t newinfo;
405 gcc_assert (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ()
406 || vn_ssa_aux_table[SSA_NAME_VERSION (name)] == NULL);
407 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
408 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
409 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
410 vn_ssa_aux_table.safe_grow_cleared (SSA_NAME_VERSION (name) + 1);
411 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
412 return newinfo;
416 /* Return the vn_kind the expression computed by the stmt should be
417 associated with. */
419 enum vn_kind
420 vn_get_stmt_kind (gimple *stmt)
422 switch (gimple_code (stmt))
424 case GIMPLE_CALL:
425 return VN_REFERENCE;
426 case GIMPLE_PHI:
427 return VN_PHI;
428 case GIMPLE_ASSIGN:
430 enum tree_code code = gimple_assign_rhs_code (stmt);
431 tree rhs1 = gimple_assign_rhs1 (stmt);
432 switch (get_gimple_rhs_class (code))
434 case GIMPLE_UNARY_RHS:
435 case GIMPLE_BINARY_RHS:
436 case GIMPLE_TERNARY_RHS:
437 return VN_NARY;
438 case GIMPLE_SINGLE_RHS:
439 switch (TREE_CODE_CLASS (code))
441 case tcc_reference:
442 /* VOP-less references can go through unary case. */
443 if ((code == REALPART_EXPR
444 || code == IMAGPART_EXPR
445 || code == VIEW_CONVERT_EXPR
446 || code == BIT_FIELD_REF)
447 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
448 return VN_NARY;
450 /* Fallthrough. */
451 case tcc_declaration:
452 return VN_REFERENCE;
454 case tcc_constant:
455 return VN_CONSTANT;
457 default:
458 if (code == ADDR_EXPR)
459 return (is_gimple_min_invariant (rhs1)
460 ? VN_CONSTANT : VN_REFERENCE);
461 else if (code == CONSTRUCTOR)
462 return VN_NARY;
463 return VN_NONE;
465 default:
466 return VN_NONE;
469 default:
470 return VN_NONE;
474 /* Lookup a value id for CONSTANT and return it. If it does not
475 exist returns 0. */
477 unsigned int
478 get_constant_value_id (tree constant)
480 vn_constant_s **slot;
481 struct vn_constant_s vc;
483 vc.hashcode = vn_hash_constant_with_type (constant);
484 vc.constant = constant;
485 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
486 if (slot)
487 return (*slot)->value_id;
488 return 0;
491 /* Lookup a value id for CONSTANT, and if it does not exist, create a
492 new one and return it. If it does exist, return it. */
494 unsigned int
495 get_or_alloc_constant_value_id (tree constant)
497 vn_constant_s **slot;
498 struct vn_constant_s vc;
499 vn_constant_t vcp;
501 vc.hashcode = vn_hash_constant_with_type (constant);
502 vc.constant = constant;
503 slot = constant_to_value_id->find_slot (&vc, INSERT);
504 if (*slot)
505 return (*slot)->value_id;
507 vcp = XNEW (struct vn_constant_s);
508 vcp->hashcode = vc.hashcode;
509 vcp->constant = constant;
510 vcp->value_id = get_next_value_id ();
511 *slot = vcp;
512 bitmap_set_bit (constant_value_ids, vcp->value_id);
513 return vcp->value_id;
516 /* Return true if V is a value id for a constant. */
518 bool
519 value_id_constant_p (unsigned int v)
521 return bitmap_bit_p (constant_value_ids, v);
524 /* Compute the hash for a reference operand VRO1. */
526 static void
527 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
529 hstate.add_int (vro1->opcode);
530 if (vro1->op0)
531 inchash::add_expr (vro1->op0, hstate);
532 if (vro1->op1)
533 inchash::add_expr (vro1->op1, hstate);
534 if (vro1->op2)
535 inchash::add_expr (vro1->op2, hstate);
538 /* Compute a hash for the reference operation VR1 and return it. */
540 static hashval_t
541 vn_reference_compute_hash (const vn_reference_t vr1)
543 inchash::hash hstate;
544 hashval_t result;
545 int i;
546 vn_reference_op_t vro;
547 HOST_WIDE_INT off = -1;
548 bool deref = false;
550 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
552 if (vro->opcode == MEM_REF)
553 deref = true;
554 else if (vro->opcode != ADDR_EXPR)
555 deref = false;
556 if (vro->off != -1)
558 if (off == -1)
559 off = 0;
560 off += vro->off;
562 else
564 if (off != -1
565 && off != 0)
566 hstate.add_int (off);
567 off = -1;
568 if (deref
569 && vro->opcode == ADDR_EXPR)
571 if (vro->op0)
573 tree op = TREE_OPERAND (vro->op0, 0);
574 hstate.add_int (TREE_CODE (op));
575 inchash::add_expr (op, hstate);
578 else
579 vn_reference_op_compute_hash (vro, hstate);
582 result = hstate.end ();
583 /* ??? We would ICE later if we hash instead of adding that in. */
584 if (vr1->vuse)
585 result += SSA_NAME_VERSION (vr1->vuse);
587 return result;
590 /* Return true if reference operations VR1 and VR2 are equivalent. This
591 means they have the same set of operands and vuses. */
593 bool
594 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
596 unsigned i, j;
598 /* Early out if this is not a hash collision. */
599 if (vr1->hashcode != vr2->hashcode)
600 return false;
602 /* The VOP needs to be the same. */
603 if (vr1->vuse != vr2->vuse)
604 return false;
606 /* If the operands are the same we are done. */
607 if (vr1->operands == vr2->operands)
608 return true;
610 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
611 return false;
613 if (INTEGRAL_TYPE_P (vr1->type)
614 && INTEGRAL_TYPE_P (vr2->type))
616 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
617 return false;
619 else if (INTEGRAL_TYPE_P (vr1->type)
620 && (TYPE_PRECISION (vr1->type)
621 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
622 return false;
623 else if (INTEGRAL_TYPE_P (vr2->type)
624 && (TYPE_PRECISION (vr2->type)
625 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
626 return false;
628 i = 0;
629 j = 0;
632 HOST_WIDE_INT off1 = 0, off2 = 0;
633 vn_reference_op_t vro1, vro2;
634 vn_reference_op_s tem1, tem2;
635 bool deref1 = false, deref2 = false;
636 for (; vr1->operands.iterate (i, &vro1); i++)
638 if (vro1->opcode == MEM_REF)
639 deref1 = true;
640 /* Do not look through a storage order barrier. */
641 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
642 return false;
643 if (vro1->off == -1)
644 break;
645 off1 += vro1->off;
647 for (; vr2->operands.iterate (j, &vro2); j++)
649 if (vro2->opcode == MEM_REF)
650 deref2 = true;
651 /* Do not look through a storage order barrier. */
652 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
653 return false;
654 if (vro2->off == -1)
655 break;
656 off2 += vro2->off;
658 if (off1 != off2)
659 return false;
660 if (deref1 && vro1->opcode == ADDR_EXPR)
662 memset (&tem1, 0, sizeof (tem1));
663 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
664 tem1.type = TREE_TYPE (tem1.op0);
665 tem1.opcode = TREE_CODE (tem1.op0);
666 vro1 = &tem1;
667 deref1 = false;
669 if (deref2 && vro2->opcode == ADDR_EXPR)
671 memset (&tem2, 0, sizeof (tem2));
672 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
673 tem2.type = TREE_TYPE (tem2.op0);
674 tem2.opcode = TREE_CODE (tem2.op0);
675 vro2 = &tem2;
676 deref2 = false;
678 if (deref1 != deref2)
679 return false;
680 if (!vn_reference_op_eq (vro1, vro2))
681 return false;
682 ++j;
683 ++i;
685 while (vr1->operands.length () != i
686 || vr2->operands.length () != j);
688 return true;
691 /* Copy the operations present in load/store REF into RESULT, a vector of
692 vn_reference_op_s's. */
694 static void
695 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
697 if (TREE_CODE (ref) == TARGET_MEM_REF)
699 vn_reference_op_s temp;
701 result->reserve (3);
703 memset (&temp, 0, sizeof (temp));
704 temp.type = TREE_TYPE (ref);
705 temp.opcode = TREE_CODE (ref);
706 temp.op0 = TMR_INDEX (ref);
707 temp.op1 = TMR_STEP (ref);
708 temp.op2 = TMR_OFFSET (ref);
709 temp.off = -1;
710 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
711 temp.base = MR_DEPENDENCE_BASE (ref);
712 result->quick_push (temp);
714 memset (&temp, 0, sizeof (temp));
715 temp.type = NULL_TREE;
716 temp.opcode = ERROR_MARK;
717 temp.op0 = TMR_INDEX2 (ref);
718 temp.off = -1;
719 result->quick_push (temp);
721 memset (&temp, 0, sizeof (temp));
722 temp.type = NULL_TREE;
723 temp.opcode = TREE_CODE (TMR_BASE (ref));
724 temp.op0 = TMR_BASE (ref);
725 temp.off = -1;
726 result->quick_push (temp);
727 return;
730 /* For non-calls, store the information that makes up the address. */
731 tree orig = ref;
732 while (ref)
734 vn_reference_op_s temp;
736 memset (&temp, 0, sizeof (temp));
737 temp.type = TREE_TYPE (ref);
738 temp.opcode = TREE_CODE (ref);
739 temp.off = -1;
741 switch (temp.opcode)
743 case MODIFY_EXPR:
744 temp.op0 = TREE_OPERAND (ref, 1);
745 break;
746 case WITH_SIZE_EXPR:
747 temp.op0 = TREE_OPERAND (ref, 1);
748 temp.off = 0;
749 break;
750 case MEM_REF:
751 /* The base address gets its own vn_reference_op_s structure. */
752 temp.op0 = TREE_OPERAND (ref, 1);
754 offset_int off = mem_ref_offset (ref);
755 if (wi::fits_shwi_p (off))
756 temp.off = off.to_shwi ();
758 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
759 temp.base = MR_DEPENDENCE_BASE (ref);
760 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
761 break;
762 case BIT_FIELD_REF:
763 /* Record bits, position and storage order. */
764 temp.op0 = TREE_OPERAND (ref, 1);
765 temp.op1 = TREE_OPERAND (ref, 2);
766 if (tree_fits_shwi_p (TREE_OPERAND (ref, 2)))
768 HOST_WIDE_INT off = tree_to_shwi (TREE_OPERAND (ref, 2));
769 if (off % BITS_PER_UNIT == 0)
770 temp.off = off / BITS_PER_UNIT;
772 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
773 break;
774 case COMPONENT_REF:
775 /* The field decl is enough to unambiguously specify the field,
776 a matching type is not necessary and a mismatching type
777 is always a spurious difference. */
778 temp.type = NULL_TREE;
779 temp.op0 = TREE_OPERAND (ref, 1);
780 temp.op1 = TREE_OPERAND (ref, 2);
782 tree this_offset = component_ref_field_offset (ref);
783 if (this_offset
784 && TREE_CODE (this_offset) == INTEGER_CST)
786 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
787 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
789 offset_int off
790 = (wi::to_offset (this_offset)
791 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
792 if (wi::fits_shwi_p (off)
793 /* Probibit value-numbering zero offset components
794 of addresses the same before the pass folding
795 __builtin_object_size had a chance to run
796 (checking cfun->after_inlining does the
797 trick here). */
798 && (TREE_CODE (orig) != ADDR_EXPR
799 || off != 0
800 || cfun->after_inlining))
801 temp.off = off.to_shwi ();
805 break;
806 case ARRAY_RANGE_REF:
807 case ARRAY_REF:
809 tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
810 /* Record index as operand. */
811 temp.op0 = TREE_OPERAND (ref, 1);
812 /* Always record lower bounds and element size. */
813 temp.op1 = array_ref_low_bound (ref);
814 /* But record element size in units of the type alignment. */
815 temp.op2 = TREE_OPERAND (ref, 3);
816 temp.align = eltype->type_common.align;
817 if (! temp.op2)
818 temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
819 size_int (TYPE_ALIGN_UNIT (eltype)));
820 if (TREE_CODE (temp.op0) == INTEGER_CST
821 && TREE_CODE (temp.op1) == INTEGER_CST
822 && TREE_CODE (temp.op2) == INTEGER_CST)
824 offset_int off = ((wi::to_offset (temp.op0)
825 - wi::to_offset (temp.op1))
826 * wi::to_offset (temp.op2)
827 * vn_ref_op_align_unit (&temp));
828 if (wi::fits_shwi_p (off))
829 temp.off = off.to_shwi();
832 break;
833 case VAR_DECL:
834 if (DECL_HARD_REGISTER (ref))
836 temp.op0 = ref;
837 break;
839 /* Fallthru. */
840 case PARM_DECL:
841 case CONST_DECL:
842 case RESULT_DECL:
843 /* Canonicalize decls to MEM[&decl] which is what we end up with
844 when valueizing MEM[ptr] with ptr = &decl. */
845 temp.opcode = MEM_REF;
846 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
847 temp.off = 0;
848 result->safe_push (temp);
849 temp.opcode = ADDR_EXPR;
850 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
851 temp.type = TREE_TYPE (temp.op0);
852 temp.off = -1;
853 break;
854 case STRING_CST:
855 case INTEGER_CST:
856 case COMPLEX_CST:
857 case VECTOR_CST:
858 case REAL_CST:
859 case FIXED_CST:
860 case CONSTRUCTOR:
861 case SSA_NAME:
862 temp.op0 = ref;
863 break;
864 case ADDR_EXPR:
865 if (is_gimple_min_invariant (ref))
867 temp.op0 = ref;
868 break;
870 break;
871 /* These are only interesting for their operands, their
872 existence, and their type. They will never be the last
873 ref in the chain of references (IE they require an
874 operand), so we don't have to put anything
875 for op* as it will be handled by the iteration */
876 case REALPART_EXPR:
877 temp.off = 0;
878 break;
879 case VIEW_CONVERT_EXPR:
880 temp.off = 0;
881 temp.reverse = storage_order_barrier_p (ref);
882 break;
883 case IMAGPART_EXPR:
884 /* This is only interesting for its constant offset. */
885 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
886 break;
887 default:
888 gcc_unreachable ();
890 result->safe_push (temp);
892 if (REFERENCE_CLASS_P (ref)
893 || TREE_CODE (ref) == MODIFY_EXPR
894 || TREE_CODE (ref) == WITH_SIZE_EXPR
895 || (TREE_CODE (ref) == ADDR_EXPR
896 && !is_gimple_min_invariant (ref)))
897 ref = TREE_OPERAND (ref, 0);
898 else
899 ref = NULL_TREE;
903 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
904 operands in *OPS, the reference alias set SET and the reference type TYPE.
905 Return true if something useful was produced. */
907 bool
908 ao_ref_init_from_vn_reference (ao_ref *ref,
909 alias_set_type set, tree type,
910 vec<vn_reference_op_s> ops)
912 vn_reference_op_t op;
913 unsigned i;
914 tree base = NULL_TREE;
915 tree *op0_p = &base;
916 offset_int offset = 0;
917 offset_int max_size;
918 offset_int size = -1;
919 tree size_tree = NULL_TREE;
920 alias_set_type base_alias_set = -1;
922 /* First get the final access size from just the outermost expression. */
923 op = &ops[0];
924 if (op->opcode == COMPONENT_REF)
925 size_tree = DECL_SIZE (op->op0);
926 else if (op->opcode == BIT_FIELD_REF)
927 size_tree = op->op0;
928 else
930 machine_mode mode = TYPE_MODE (type);
931 if (mode == BLKmode)
932 size_tree = TYPE_SIZE (type);
933 else
934 size = int (GET_MODE_BITSIZE (mode));
936 if (size_tree != NULL_TREE
937 && TREE_CODE (size_tree) == INTEGER_CST)
938 size = wi::to_offset (size_tree);
940 /* Initially, maxsize is the same as the accessed element size.
941 In the following it will only grow (or become -1). */
942 max_size = size;
944 /* Compute cumulative bit-offset for nested component-refs and array-refs,
945 and find the ultimate containing object. */
946 FOR_EACH_VEC_ELT (ops, i, op)
948 switch (op->opcode)
950 /* These may be in the reference ops, but we cannot do anything
951 sensible with them here. */
952 case ADDR_EXPR:
953 /* Apart from ADDR_EXPR arguments to MEM_REF. */
954 if (base != NULL_TREE
955 && TREE_CODE (base) == MEM_REF
956 && op->op0
957 && DECL_P (TREE_OPERAND (op->op0, 0)))
959 vn_reference_op_t pop = &ops[i-1];
960 base = TREE_OPERAND (op->op0, 0);
961 if (pop->off == -1)
963 max_size = -1;
964 offset = 0;
966 else
967 offset += pop->off * BITS_PER_UNIT;
968 op0_p = NULL;
969 break;
971 /* Fallthru. */
972 case CALL_EXPR:
973 return false;
975 /* Record the base objects. */
976 case MEM_REF:
977 base_alias_set = get_deref_alias_set (op->op0);
978 *op0_p = build2 (MEM_REF, op->type,
979 NULL_TREE, op->op0);
980 MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
981 MR_DEPENDENCE_BASE (*op0_p) = op->base;
982 op0_p = &TREE_OPERAND (*op0_p, 0);
983 break;
985 case VAR_DECL:
986 case PARM_DECL:
987 case RESULT_DECL:
988 case SSA_NAME:
989 *op0_p = op->op0;
990 op0_p = NULL;
991 break;
993 /* And now the usual component-reference style ops. */
994 case BIT_FIELD_REF:
995 offset += wi::to_offset (op->op1);
996 break;
998 case COMPONENT_REF:
1000 tree field = op->op0;
1001 /* We do not have a complete COMPONENT_REF tree here so we
1002 cannot use component_ref_field_offset. Do the interesting
1003 parts manually. */
1004 tree this_offset = DECL_FIELD_OFFSET (field);
1006 if (op->op1 || TREE_CODE (this_offset) != INTEGER_CST)
1007 max_size = -1;
1008 else
1010 offset_int woffset = (wi::to_offset (this_offset)
1011 << LOG2_BITS_PER_UNIT);
1012 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1013 offset += woffset;
1015 break;
1018 case ARRAY_RANGE_REF:
1019 case ARRAY_REF:
1020 /* We recorded the lower bound and the element size. */
1021 if (TREE_CODE (op->op0) != INTEGER_CST
1022 || TREE_CODE (op->op1) != INTEGER_CST
1023 || TREE_CODE (op->op2) != INTEGER_CST)
1024 max_size = -1;
1025 else
1027 offset_int woffset
1028 = wi::sext (wi::to_offset (op->op0) - wi::to_offset (op->op1),
1029 TYPE_PRECISION (TREE_TYPE (op->op0)));
1030 woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
1031 woffset <<= LOG2_BITS_PER_UNIT;
1032 offset += woffset;
1034 break;
1036 case REALPART_EXPR:
1037 break;
1039 case IMAGPART_EXPR:
1040 offset += size;
1041 break;
1043 case VIEW_CONVERT_EXPR:
1044 break;
1046 case STRING_CST:
1047 case INTEGER_CST:
1048 case COMPLEX_CST:
1049 case VECTOR_CST:
1050 case REAL_CST:
1051 case CONSTRUCTOR:
1052 case CONST_DECL:
1053 return false;
1055 default:
1056 return false;
1060 if (base == NULL_TREE)
1061 return false;
1063 ref->ref = NULL_TREE;
1064 ref->base = base;
1065 ref->ref_alias_set = set;
1066 if (base_alias_set != -1)
1067 ref->base_alias_set = base_alias_set;
1068 else
1069 ref->base_alias_set = get_alias_set (base);
1070 /* We discount volatiles from value-numbering elsewhere. */
1071 ref->volatile_p = false;
1073 if (!wi::fits_shwi_p (size) || wi::neg_p (size))
1075 ref->offset = 0;
1076 ref->size = -1;
1077 ref->max_size = -1;
1078 return true;
1081 ref->size = size.to_shwi ();
1083 if (!wi::fits_shwi_p (offset))
1085 ref->offset = 0;
1086 ref->max_size = -1;
1087 return true;
1090 ref->offset = offset.to_shwi ();
1092 if (!wi::fits_shwi_p (max_size) || wi::neg_p (max_size))
1093 ref->max_size = -1;
1094 else
1095 ref->max_size = max_size.to_shwi ();
1097 return true;
1100 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1101 vn_reference_op_s's. */
1103 static void
1104 copy_reference_ops_from_call (gcall *call,
1105 vec<vn_reference_op_s> *result)
1107 vn_reference_op_s temp;
1108 unsigned i;
1109 tree lhs = gimple_call_lhs (call);
1110 int lr;
1112 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1113 different. By adding the lhs here in the vector, we ensure that the
1114 hashcode is different, guaranteeing a different value number. */
1115 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1117 memset (&temp, 0, sizeof (temp));
1118 temp.opcode = MODIFY_EXPR;
1119 temp.type = TREE_TYPE (lhs);
1120 temp.op0 = lhs;
1121 temp.off = -1;
1122 result->safe_push (temp);
1125 /* Copy the type, opcode, function, static chain and EH region, if any. */
1126 memset (&temp, 0, sizeof (temp));
1127 temp.type = gimple_call_return_type (call);
1128 temp.opcode = CALL_EXPR;
1129 temp.op0 = gimple_call_fn (call);
1130 temp.op1 = gimple_call_chain (call);
1131 if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1132 temp.op2 = size_int (lr);
1133 temp.off = -1;
1134 if (gimple_call_with_bounds_p (call))
1135 temp.with_bounds = 1;
1136 result->safe_push (temp);
1138 /* Copy the call arguments. As they can be references as well,
1139 just chain them together. */
1140 for (i = 0; i < gimple_call_num_args (call); ++i)
1142 tree callarg = gimple_call_arg (call, i);
1143 copy_reference_ops_from_ref (callarg, result);
1147 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1148 *I_P to point to the last element of the replacement. */
1149 static bool
1150 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1151 unsigned int *i_p)
1153 unsigned int i = *i_p;
1154 vn_reference_op_t op = &(*ops)[i];
1155 vn_reference_op_t mem_op = &(*ops)[i - 1];
1156 tree addr_base;
1157 HOST_WIDE_INT addr_offset = 0;
1159 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1160 from .foo.bar to the preceding MEM_REF offset and replace the
1161 address with &OBJ. */
1162 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1163 &addr_offset);
1164 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1165 if (addr_base != TREE_OPERAND (op->op0, 0))
1167 offset_int off = offset_int::from (mem_op->op0, SIGNED);
1168 off += addr_offset;
1169 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1170 op->op0 = build_fold_addr_expr (addr_base);
1171 if (tree_fits_shwi_p (mem_op->op0))
1172 mem_op->off = tree_to_shwi (mem_op->op0);
1173 else
1174 mem_op->off = -1;
1175 return true;
1177 return false;
1180 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1181 *I_P to point to the last element of the replacement. */
1182 static bool
1183 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1184 unsigned int *i_p)
1186 unsigned int i = *i_p;
1187 vn_reference_op_t op = &(*ops)[i];
1188 vn_reference_op_t mem_op = &(*ops)[i - 1];
1189 gimple *def_stmt;
1190 enum tree_code code;
1191 offset_int off;
1193 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1194 if (!is_gimple_assign (def_stmt))
1195 return false;
1197 code = gimple_assign_rhs_code (def_stmt);
1198 if (code != ADDR_EXPR
1199 && code != POINTER_PLUS_EXPR)
1200 return false;
1202 off = offset_int::from (mem_op->op0, SIGNED);
1204 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1205 from .foo.bar to the preceding MEM_REF offset and replace the
1206 address with &OBJ. */
1207 if (code == ADDR_EXPR)
1209 tree addr, addr_base;
1210 HOST_WIDE_INT addr_offset;
1212 addr = gimple_assign_rhs1 (def_stmt);
1213 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1214 &addr_offset);
1215 /* If that didn't work because the address isn't invariant propagate
1216 the reference tree from the address operation in case the current
1217 dereference isn't offsetted. */
1218 if (!addr_base
1219 && *i_p == ops->length () - 1
1220 && off == 0
1221 /* This makes us disable this transform for PRE where the
1222 reference ops might be also used for code insertion which
1223 is invalid. */
1224 && default_vn_walk_kind == VN_WALKREWRITE)
1226 auto_vec<vn_reference_op_s, 32> tem;
1227 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1228 ops->pop ();
1229 ops->pop ();
1230 ops->safe_splice (tem);
1231 --*i_p;
1232 return true;
1234 if (!addr_base
1235 || TREE_CODE (addr_base) != MEM_REF)
1236 return false;
1238 off += addr_offset;
1239 off += mem_ref_offset (addr_base);
1240 op->op0 = TREE_OPERAND (addr_base, 0);
1242 else
1244 tree ptr, ptroff;
1245 ptr = gimple_assign_rhs1 (def_stmt);
1246 ptroff = gimple_assign_rhs2 (def_stmt);
1247 if (TREE_CODE (ptr) != SSA_NAME
1248 || TREE_CODE (ptroff) != INTEGER_CST)
1249 return false;
1251 off += wi::to_offset (ptroff);
1252 op->op0 = ptr;
1255 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1256 if (tree_fits_shwi_p (mem_op->op0))
1257 mem_op->off = tree_to_shwi (mem_op->op0);
1258 else
1259 mem_op->off = -1;
1260 if (TREE_CODE (op->op0) == SSA_NAME)
1261 op->op0 = SSA_VAL (op->op0);
1262 if (TREE_CODE (op->op0) != SSA_NAME)
1263 op->opcode = TREE_CODE (op->op0);
1265 /* And recurse. */
1266 if (TREE_CODE (op->op0) == SSA_NAME)
1267 vn_reference_maybe_forwprop_address (ops, i_p);
1268 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1269 vn_reference_fold_indirect (ops, i_p);
1270 return true;
1273 /* Optimize the reference REF to a constant if possible or return
1274 NULL_TREE if not. */
1276 tree
1277 fully_constant_vn_reference_p (vn_reference_t ref)
1279 vec<vn_reference_op_s> operands = ref->operands;
1280 vn_reference_op_t op;
1282 /* Try to simplify the translated expression if it is
1283 a call to a builtin function with at most two arguments. */
1284 op = &operands[0];
1285 if (op->opcode == CALL_EXPR
1286 && TREE_CODE (op->op0) == ADDR_EXPR
1287 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1288 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1289 && operands.length () >= 2
1290 && operands.length () <= 3)
1292 vn_reference_op_t arg0, arg1 = NULL;
1293 bool anyconst = false;
1294 arg0 = &operands[1];
1295 if (operands.length () > 2)
1296 arg1 = &operands[2];
1297 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1298 || (arg0->opcode == ADDR_EXPR
1299 && is_gimple_min_invariant (arg0->op0)))
1300 anyconst = true;
1301 if (arg1
1302 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1303 || (arg1->opcode == ADDR_EXPR
1304 && is_gimple_min_invariant (arg1->op0))))
1305 anyconst = true;
1306 if (anyconst)
1308 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1309 arg1 ? 2 : 1,
1310 arg0->op0,
1311 arg1 ? arg1->op0 : NULL);
1312 if (folded
1313 && TREE_CODE (folded) == NOP_EXPR)
1314 folded = TREE_OPERAND (folded, 0);
1315 if (folded
1316 && is_gimple_min_invariant (folded))
1317 return folded;
1321 /* Simplify reads from constants or constant initializers. */
1322 else if (BITS_PER_UNIT == 8
1323 && is_gimple_reg_type (ref->type)
1324 && (!INTEGRAL_TYPE_P (ref->type)
1325 || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
1327 HOST_WIDE_INT off = 0;
1328 HOST_WIDE_INT size;
1329 if (INTEGRAL_TYPE_P (ref->type))
1330 size = TYPE_PRECISION (ref->type);
1331 else
1332 size = tree_to_shwi (TYPE_SIZE (ref->type));
1333 if (size % BITS_PER_UNIT != 0
1334 || size > MAX_BITSIZE_MODE_ANY_MODE)
1335 return NULL_TREE;
1336 size /= BITS_PER_UNIT;
1337 unsigned i;
1338 for (i = 0; i < operands.length (); ++i)
1340 if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1342 ++i;
1343 break;
1345 if (operands[i].off == -1)
1346 return NULL_TREE;
1347 off += operands[i].off;
1348 if (operands[i].opcode == MEM_REF)
1350 ++i;
1351 break;
1354 vn_reference_op_t base = &operands[--i];
1355 tree ctor = error_mark_node;
1356 tree decl = NULL_TREE;
1357 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1358 ctor = base->op0;
1359 else if (base->opcode == MEM_REF
1360 && base[1].opcode == ADDR_EXPR
1361 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1362 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL))
1364 decl = TREE_OPERAND (base[1].op0, 0);
1365 ctor = ctor_for_folding (decl);
1367 if (ctor == NULL_TREE)
1368 return build_zero_cst (ref->type);
1369 else if (ctor != error_mark_node)
1371 if (decl)
1373 tree res = fold_ctor_reference (ref->type, ctor,
1374 off * BITS_PER_UNIT,
1375 size * BITS_PER_UNIT, decl);
1376 if (res)
1378 STRIP_USELESS_TYPE_CONVERSION (res);
1379 if (is_gimple_min_invariant (res))
1380 return res;
1383 else
1385 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1386 int len = native_encode_expr (ctor, buf, size, off);
1387 if (len > 0)
1388 return native_interpret_expr (ref->type, buf, len);
1393 return NULL_TREE;
1396 /* Return true if OPS contain a storage order barrier. */
1398 static bool
1399 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1401 vn_reference_op_t op;
1402 unsigned i;
1404 FOR_EACH_VEC_ELT (ops, i, op)
1405 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1406 return true;
1408 return false;
1411 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1412 structures into their value numbers. This is done in-place, and
1413 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1414 whether any operands were valueized. */
1416 static vec<vn_reference_op_s>
1417 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1419 vn_reference_op_t vro;
1420 unsigned int i;
1422 *valueized_anything = false;
1424 FOR_EACH_VEC_ELT (orig, i, vro)
1426 if (vro->opcode == SSA_NAME
1427 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1429 tree tem = SSA_VAL (vro->op0);
1430 if (tem != vro->op0)
1432 *valueized_anything = true;
1433 vro->op0 = tem;
1435 /* If it transforms from an SSA_NAME to a constant, update
1436 the opcode. */
1437 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1438 vro->opcode = TREE_CODE (vro->op0);
1440 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1442 tree tem = SSA_VAL (vro->op1);
1443 if (tem != vro->op1)
1445 *valueized_anything = true;
1446 vro->op1 = tem;
1449 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1451 tree tem = SSA_VAL (vro->op2);
1452 if (tem != vro->op2)
1454 *valueized_anything = true;
1455 vro->op2 = tem;
1458 /* If it transforms from an SSA_NAME to an address, fold with
1459 a preceding indirect reference. */
1460 if (i > 0
1461 && vro->op0
1462 && TREE_CODE (vro->op0) == ADDR_EXPR
1463 && orig[i - 1].opcode == MEM_REF)
1465 if (vn_reference_fold_indirect (&orig, &i))
1466 *valueized_anything = true;
1468 else if (i > 0
1469 && vro->opcode == SSA_NAME
1470 && orig[i - 1].opcode == MEM_REF)
1472 if (vn_reference_maybe_forwprop_address (&orig, &i))
1473 *valueized_anything = true;
1475 /* If it transforms a non-constant ARRAY_REF into a constant
1476 one, adjust the constant offset. */
1477 else if (vro->opcode == ARRAY_REF
1478 && vro->off == -1
1479 && TREE_CODE (vro->op0) == INTEGER_CST
1480 && TREE_CODE (vro->op1) == INTEGER_CST
1481 && TREE_CODE (vro->op2) == INTEGER_CST)
1483 offset_int off = ((wi::to_offset (vro->op0)
1484 - wi::to_offset (vro->op1))
1485 * wi::to_offset (vro->op2)
1486 * vn_ref_op_align_unit (vro));
1487 if (wi::fits_shwi_p (off))
1488 vro->off = off.to_shwi ();
1492 return orig;
1495 static vec<vn_reference_op_s>
1496 valueize_refs (vec<vn_reference_op_s> orig)
1498 bool tem;
1499 return valueize_refs_1 (orig, &tem);
1502 static vec<vn_reference_op_s> shared_lookup_references;
1504 /* Create a vector of vn_reference_op_s structures from REF, a
1505 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1506 this function. *VALUEIZED_ANYTHING will specify whether any
1507 operands were valueized. */
1509 static vec<vn_reference_op_s>
1510 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1512 if (!ref)
1513 return vNULL;
1514 shared_lookup_references.truncate (0);
1515 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1516 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1517 valueized_anything);
1518 return shared_lookup_references;
1521 /* Create a vector of vn_reference_op_s structures from CALL, a
1522 call statement. The vector is shared among all callers of
1523 this function. */
1525 static vec<vn_reference_op_s>
1526 valueize_shared_reference_ops_from_call (gcall *call)
1528 if (!call)
1529 return vNULL;
1530 shared_lookup_references.truncate (0);
1531 copy_reference_ops_from_call (call, &shared_lookup_references);
1532 shared_lookup_references = valueize_refs (shared_lookup_references);
1533 return shared_lookup_references;
1536 /* Lookup a SCCVN reference operation VR in the current hash table.
1537 Returns the resulting value number if it exists in the hash table,
1538 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1539 vn_reference_t stored in the hashtable if something is found. */
1541 static tree
1542 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1544 vn_reference_s **slot;
1545 hashval_t hash;
1547 hash = vr->hashcode;
1548 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1549 if (!slot && current_info == optimistic_info)
1550 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1551 if (slot)
1553 if (vnresult)
1554 *vnresult = (vn_reference_t)*slot;
1555 return ((vn_reference_t)*slot)->result;
1558 return NULL_TREE;
1561 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1562 with the current VUSE and performs the expression lookup. */
1564 static void *
1565 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1566 unsigned int cnt, void *vr_)
1568 vn_reference_t vr = (vn_reference_t)vr_;
1569 vn_reference_s **slot;
1570 hashval_t hash;
1572 /* This bounds the stmt walks we perform on reference lookups
1573 to O(1) instead of O(N) where N is the number of dominating
1574 stores. */
1575 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1576 return (void *)-1;
1578 if (last_vuse_ptr)
1579 *last_vuse_ptr = vuse;
1581 /* Fixup vuse and hash. */
1582 if (vr->vuse)
1583 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1584 vr->vuse = vuse_ssa_val (vuse);
1585 if (vr->vuse)
1586 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1588 hash = vr->hashcode;
1589 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1590 if (!slot && current_info == optimistic_info)
1591 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1592 if (slot)
1593 return *slot;
1595 return NULL;
1598 /* Lookup an existing or insert a new vn_reference entry into the
1599 value table for the VUSE, SET, TYPE, OPERANDS reference which
1600 has the value VALUE which is either a constant or an SSA name. */
1602 static vn_reference_t
1603 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1604 alias_set_type set,
1605 tree type,
1606 vec<vn_reference_op_s,
1607 va_heap> operands,
1608 tree value)
1610 vn_reference_s vr1;
1611 vn_reference_t result;
1612 unsigned value_id;
1613 vr1.vuse = vuse;
1614 vr1.operands = operands;
1615 vr1.type = type;
1616 vr1.set = set;
1617 vr1.hashcode = vn_reference_compute_hash (&vr1);
1618 if (vn_reference_lookup_1 (&vr1, &result))
1619 return result;
1620 if (TREE_CODE (value) == SSA_NAME)
1621 value_id = VN_INFO (value)->value_id;
1622 else
1623 value_id = get_or_alloc_constant_value_id (value);
1624 return vn_reference_insert_pieces (vuse, set, type,
1625 operands.copy (), value, value_id);
1628 static vn_nary_op_t vn_nary_op_insert_stmt (gimple *stmt, tree result);
1630 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
1632 static tree
1633 vn_lookup_simplify_result (code_helper rcode, tree type, tree *ops)
1635 if (!rcode.is_tree_code ())
1636 return NULL_TREE;
1637 vn_nary_op_t vnresult = NULL;
1638 return vn_nary_op_lookup_pieces (TREE_CODE_LENGTH ((tree_code) rcode),
1639 (tree_code) rcode, type, ops, &vnresult);
1642 /* Return a value-number for RCODE OPS... either by looking up an existing
1643 value-number for the simplified result or by inserting the operation if
1644 INSERT is true. */
1646 static tree
1647 vn_nary_build_or_lookup_1 (code_helper rcode, tree type, tree *ops,
1648 bool insert)
1650 tree result = NULL_TREE;
1651 /* We will be creating a value number for
1652 RCODE (OPS...).
1653 So first simplify and lookup this expression to see if it
1654 is already available. */
1655 mprts_hook = vn_lookup_simplify_result;
1656 bool res = false;
1657 switch (TREE_CODE_LENGTH ((tree_code) rcode))
1659 case 1:
1660 res = gimple_resimplify1 (NULL, &rcode, type, ops, vn_valueize);
1661 break;
1662 case 2:
1663 res = gimple_resimplify2 (NULL, &rcode, type, ops, vn_valueize);
1664 break;
1665 case 3:
1666 res = gimple_resimplify3 (NULL, &rcode, type, ops, vn_valueize);
1667 break;
1669 mprts_hook = NULL;
1670 gimple *new_stmt = NULL;
1671 if (res
1672 && gimple_simplified_result_is_gimple_val (rcode, ops))
1673 /* The expression is already available. */
1674 result = ops[0];
1675 else
1677 tree val = vn_lookup_simplify_result (rcode, type, ops);
1678 if (!val && insert)
1680 gimple_seq stmts = NULL;
1681 result = maybe_push_res_to_seq (rcode, type, ops, &stmts);
1682 if (result)
1684 gcc_assert (gimple_seq_singleton_p (stmts));
1685 new_stmt = gimple_seq_first_stmt (stmts);
1688 else
1689 /* The expression is already available. */
1690 result = val;
1692 if (new_stmt)
1694 /* The expression is not yet available, value-number lhs to
1695 the new SSA_NAME we created. */
1696 /* Initialize value-number information properly. */
1697 VN_INFO_GET (result)->valnum = result;
1698 VN_INFO (result)->value_id = get_next_value_id ();
1699 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
1700 new_stmt);
1701 VN_INFO (result)->needs_insertion = true;
1702 /* ??? PRE phi-translation inserts NARYs without corresponding
1703 SSA name result. Re-use those but set their result according
1704 to the stmt we just built. */
1705 vn_nary_op_t nary = NULL;
1706 vn_nary_op_lookup_stmt (new_stmt, &nary);
1707 if (nary)
1709 gcc_assert (nary->result == NULL_TREE);
1710 nary->result = gimple_assign_lhs (new_stmt);
1712 /* As all "inserted" statements are singleton SCCs, insert
1713 to the valid table. This is strictly needed to
1714 avoid re-generating new value SSA_NAMEs for the same
1715 expression during SCC iteration over and over (the
1716 optimistic table gets cleared after each iteration).
1717 We do not need to insert into the optimistic table, as
1718 lookups there will fall back to the valid table. */
1719 else if (current_info == optimistic_info)
1721 current_info = valid_info;
1722 vn_nary_op_insert_stmt (new_stmt, result);
1723 current_info = optimistic_info;
1725 else
1726 vn_nary_op_insert_stmt (new_stmt, result);
1727 if (dump_file && (dump_flags & TDF_DETAILS))
1729 fprintf (dump_file, "Inserting name ");
1730 print_generic_expr (dump_file, result, 0);
1731 fprintf (dump_file, " for expression ");
1732 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
1733 fprintf (dump_file, "\n");
1736 return result;
1739 /* Return a value-number for RCODE OPS... either by looking up an existing
1740 value-number for the simplified result or by inserting the operation. */
1742 static tree
1743 vn_nary_build_or_lookup (code_helper rcode, tree type, tree *ops)
1745 return vn_nary_build_or_lookup_1 (rcode, type, ops, true);
1748 /* Try to simplify the expression RCODE OPS... of type TYPE and return
1749 its value if present. */
1751 tree
1752 vn_nary_simplify (vn_nary_op_t nary)
1754 if (nary->length > 3)
1755 return NULL_TREE;
1756 tree ops[3];
1757 memcpy (ops, nary->op, sizeof (tree) * nary->length);
1758 return vn_nary_build_or_lookup_1 (nary->opcode, nary->type, ops, false);
1762 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1763 from the statement defining VUSE and if not successful tries to
1764 translate *REFP and VR_ through an aggregate copy at the definition
1765 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
1766 of *REF and *VR. If only disambiguation was performed then
1767 *DISAMBIGUATE_ONLY is set to true. */
1769 static void *
1770 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1771 bool *disambiguate_only)
1773 vn_reference_t vr = (vn_reference_t)vr_;
1774 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
1775 tree base = ao_ref_base (ref);
1776 HOST_WIDE_INT offset, maxsize;
1777 static vec<vn_reference_op_s>
1778 lhs_ops = vNULL;
1779 ao_ref lhs_ref;
1780 bool lhs_ref_ok = false;
1782 /* If the reference is based on a parameter that was determined as
1783 pointing to readonly memory it doesn't change. */
1784 if (TREE_CODE (base) == MEM_REF
1785 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
1786 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
1787 && bitmap_bit_p (const_parms,
1788 SSA_NAME_VERSION (TREE_OPERAND (base, 0))))
1790 *disambiguate_only = true;
1791 return NULL;
1794 /* First try to disambiguate after value-replacing in the definitions LHS. */
1795 if (is_gimple_assign (def_stmt))
1797 tree lhs = gimple_assign_lhs (def_stmt);
1798 bool valueized_anything = false;
1799 /* Avoid re-allocation overhead. */
1800 lhs_ops.truncate (0);
1801 copy_reference_ops_from_ref (lhs, &lhs_ops);
1802 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1803 if (valueized_anything)
1805 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1806 get_alias_set (lhs),
1807 TREE_TYPE (lhs), lhs_ops);
1808 if (lhs_ref_ok
1809 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1811 *disambiguate_only = true;
1812 return NULL;
1815 else
1817 ao_ref_init (&lhs_ref, lhs);
1818 lhs_ref_ok = true;
1821 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1822 && gimple_call_num_args (def_stmt) <= 4)
1824 /* For builtin calls valueize its arguments and call the
1825 alias oracle again. Valueization may improve points-to
1826 info of pointers and constify size and position arguments.
1827 Originally this was motivated by PR61034 which has
1828 conditional calls to free falsely clobbering ref because
1829 of imprecise points-to info of the argument. */
1830 tree oldargs[4];
1831 bool valueized_anything = false;
1832 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1834 oldargs[i] = gimple_call_arg (def_stmt, i);
1835 if (TREE_CODE (oldargs[i]) == SSA_NAME
1836 && VN_INFO (oldargs[i])->valnum != oldargs[i])
1838 gimple_call_set_arg (def_stmt, i, VN_INFO (oldargs[i])->valnum);
1839 valueized_anything = true;
1842 if (valueized_anything)
1844 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1845 ref);
1846 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1847 gimple_call_set_arg (def_stmt, i, oldargs[i]);
1848 if (!res)
1850 *disambiguate_only = true;
1851 return NULL;
1856 if (*disambiguate_only)
1857 return (void *)-1;
1859 offset = ref->offset;
1860 maxsize = ref->max_size;
1862 /* If we cannot constrain the size of the reference we cannot
1863 test if anything kills it. */
1864 if (maxsize == -1)
1865 return (void *)-1;
1867 /* We can't deduce anything useful from clobbers. */
1868 if (gimple_clobber_p (def_stmt))
1869 return (void *)-1;
1871 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1872 from that definition.
1873 1) Memset. */
1874 if (is_gimple_reg_type (vr->type)
1875 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1876 && integer_zerop (gimple_call_arg (def_stmt, 1))
1877 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2))
1878 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1880 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1881 tree base2;
1882 HOST_WIDE_INT offset2, size2, maxsize2;
1883 bool reverse;
1884 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
1885 &reverse);
1886 size2 = tree_to_uhwi (gimple_call_arg (def_stmt, 2)) * 8;
1887 if ((unsigned HOST_WIDE_INT)size2 / 8
1888 == tree_to_uhwi (gimple_call_arg (def_stmt, 2))
1889 && maxsize2 != -1
1890 && operand_equal_p (base, base2, 0)
1891 && offset2 <= offset
1892 && offset2 + size2 >= offset + maxsize)
1894 tree val = build_zero_cst (vr->type);
1895 return vn_reference_lookup_or_insert_for_pieces
1896 (vuse, vr->set, vr->type, vr->operands, val);
1900 /* 2) Assignment from an empty CONSTRUCTOR. */
1901 else if (is_gimple_reg_type (vr->type)
1902 && gimple_assign_single_p (def_stmt)
1903 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1904 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1906 tree base2;
1907 HOST_WIDE_INT offset2, size2, maxsize2;
1908 bool reverse;
1909 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1910 &offset2, &size2, &maxsize2, &reverse);
1911 if (maxsize2 != -1
1912 && operand_equal_p (base, base2, 0)
1913 && offset2 <= offset
1914 && offset2 + size2 >= offset + maxsize)
1916 tree val = build_zero_cst (vr->type);
1917 return vn_reference_lookup_or_insert_for_pieces
1918 (vuse, vr->set, vr->type, vr->operands, val);
1922 /* 3) Assignment from a constant. We can use folds native encode/interpret
1923 routines to extract the assigned bits. */
1924 else if (ref->size == maxsize
1925 && is_gimple_reg_type (vr->type)
1926 && !contains_storage_order_barrier_p (vr->operands)
1927 && gimple_assign_single_p (def_stmt)
1928 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1929 && maxsize % BITS_PER_UNIT == 0
1930 && offset % BITS_PER_UNIT == 0
1931 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
1932 || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
1933 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
1935 tree base2;
1936 HOST_WIDE_INT offset2, size2, maxsize2;
1937 bool reverse;
1938 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1939 &offset2, &size2, &maxsize2, &reverse);
1940 if (!reverse
1941 && maxsize2 != -1
1942 && maxsize2 == size2
1943 && size2 % BITS_PER_UNIT == 0
1944 && offset2 % BITS_PER_UNIT == 0
1945 && operand_equal_p (base, base2, 0)
1946 && offset2 <= offset
1947 && offset2 + size2 >= offset + maxsize)
1949 /* We support up to 512-bit values (for V8DFmode). */
1950 unsigned char buffer[64];
1951 int len;
1953 tree rhs = gimple_assign_rhs1 (def_stmt);
1954 if (TREE_CODE (rhs) == SSA_NAME)
1955 rhs = SSA_VAL (rhs);
1956 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1957 buffer, sizeof (buffer));
1958 if (len > 0)
1960 tree type = vr->type;
1961 /* Make sure to interpret in a type that has a range
1962 covering the whole access size. */
1963 if (INTEGRAL_TYPE_P (vr->type)
1964 && ref->size != TYPE_PRECISION (vr->type))
1965 type = build_nonstandard_integer_type (ref->size,
1966 TYPE_UNSIGNED (type));
1967 tree val = native_interpret_expr (type,
1968 buffer
1969 + ((offset - offset2)
1970 / BITS_PER_UNIT),
1971 ref->size / BITS_PER_UNIT);
1972 /* If we chop off bits because the types precision doesn't
1973 match the memory access size this is ok when optimizing
1974 reads but not when called from the DSE code during
1975 elimination. */
1976 if (val
1977 && type != vr->type)
1979 if (! int_fits_type_p (val, vr->type))
1980 val = NULL_TREE;
1981 else
1982 val = fold_convert (vr->type, val);
1985 if (val)
1986 return vn_reference_lookup_or_insert_for_pieces
1987 (vuse, vr->set, vr->type, vr->operands, val);
1992 /* 4) Assignment from an SSA name which definition we may be able
1993 to access pieces from. */
1994 else if (ref->size == maxsize
1995 && is_gimple_reg_type (vr->type)
1996 && !contains_storage_order_barrier_p (vr->operands)
1997 && gimple_assign_single_p (def_stmt)
1998 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
2000 tree base2;
2001 HOST_WIDE_INT offset2, size2, maxsize2;
2002 bool reverse;
2003 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2004 &offset2, &size2, &maxsize2,
2005 &reverse);
2006 if (!reverse
2007 && maxsize2 != -1
2008 && maxsize2 == size2
2009 && operand_equal_p (base, base2, 0)
2010 && offset2 <= offset
2011 && offset2 + size2 >= offset + maxsize
2012 /* ??? We can't handle bitfield precision extracts without
2013 either using an alternate type for the BIT_FIELD_REF and
2014 then doing a conversion or possibly adjusting the offset
2015 according to endianess. */
2016 && (! INTEGRAL_TYPE_P (vr->type)
2017 || ref->size == TYPE_PRECISION (vr->type))
2018 && ref->size % BITS_PER_UNIT == 0)
2020 code_helper rcode = BIT_FIELD_REF;
2021 tree ops[3];
2022 ops[0] = SSA_VAL (gimple_assign_rhs1 (def_stmt));
2023 ops[1] = bitsize_int (ref->size);
2024 ops[2] = bitsize_int (offset - offset2);
2025 tree val = vn_nary_build_or_lookup (rcode, vr->type, ops);
2026 if (val)
2028 vn_reference_t res = vn_reference_lookup_or_insert_for_pieces
2029 (vuse, vr->set, vr->type, vr->operands, val);
2030 return res;
2035 /* 5) For aggregate copies translate the reference through them if
2036 the copy kills ref. */
2037 else if (vn_walk_kind == VN_WALKREWRITE
2038 && gimple_assign_single_p (def_stmt)
2039 && (DECL_P (gimple_assign_rhs1 (def_stmt))
2040 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
2041 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
2043 tree base2;
2044 HOST_WIDE_INT maxsize2;
2045 int i, j, k;
2046 auto_vec<vn_reference_op_s> rhs;
2047 vn_reference_op_t vro;
2048 ao_ref r;
2050 if (!lhs_ref_ok)
2051 return (void *)-1;
2053 /* See if the assignment kills REF. */
2054 base2 = ao_ref_base (&lhs_ref);
2055 maxsize2 = lhs_ref.max_size;
2056 if (maxsize2 == -1
2057 || (base != base2
2058 && (TREE_CODE (base) != MEM_REF
2059 || TREE_CODE (base2) != MEM_REF
2060 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
2061 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
2062 TREE_OPERAND (base2, 1))))
2063 || !stmt_kills_ref_p (def_stmt, ref))
2064 return (void *)-1;
2066 /* Find the common base of ref and the lhs. lhs_ops already
2067 contains valueized operands for the lhs. */
2068 i = vr->operands.length () - 1;
2069 j = lhs_ops.length () - 1;
2070 while (j >= 0 && i >= 0
2071 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
2073 i--;
2074 j--;
2077 /* ??? The innermost op should always be a MEM_REF and we already
2078 checked that the assignment to the lhs kills vr. Thus for
2079 aggregate copies using char[] types the vn_reference_op_eq
2080 may fail when comparing types for compatibility. But we really
2081 don't care here - further lookups with the rewritten operands
2082 will simply fail if we messed up types too badly. */
2083 HOST_WIDE_INT extra_off = 0;
2084 if (j == 0 && i >= 0
2085 && lhs_ops[0].opcode == MEM_REF
2086 && lhs_ops[0].off != -1)
2088 if (lhs_ops[0].off == vr->operands[i].off)
2089 i--, j--;
2090 else if (vr->operands[i].opcode == MEM_REF
2091 && vr->operands[i].off != -1)
2093 extra_off = vr->operands[i].off - lhs_ops[0].off;
2094 i--, j--;
2098 /* i now points to the first additional op.
2099 ??? LHS may not be completely contained in VR, one or more
2100 VIEW_CONVERT_EXPRs could be in its way. We could at least
2101 try handling outermost VIEW_CONVERT_EXPRs. */
2102 if (j != -1)
2103 return (void *)-1;
2105 /* Punt if the additional ops contain a storage order barrier. */
2106 for (k = i; k >= 0; k--)
2108 vro = &vr->operands[k];
2109 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
2110 return (void *)-1;
2113 /* Now re-write REF to be based on the rhs of the assignment. */
2114 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
2116 /* Apply an extra offset to the inner MEM_REF of the RHS. */
2117 if (extra_off != 0)
2119 if (rhs.length () < 2
2120 || rhs[0].opcode != MEM_REF
2121 || rhs[0].off == -1)
2122 return (void *)-1;
2123 rhs[0].off += extra_off;
2124 rhs[0].op0 = int_const_binop (PLUS_EXPR, rhs[0].op0,
2125 build_int_cst (TREE_TYPE (rhs[0].op0),
2126 extra_off));
2129 /* We need to pre-pend vr->operands[0..i] to rhs. */
2130 vec<vn_reference_op_s> old = vr->operands;
2131 if (i + 1 + rhs.length () > vr->operands.length ())
2132 vr->operands.safe_grow (i + 1 + rhs.length ());
2133 else
2134 vr->operands.truncate (i + 1 + rhs.length ());
2135 FOR_EACH_VEC_ELT (rhs, j, vro)
2136 vr->operands[i + 1 + j] = *vro;
2137 vr->operands = valueize_refs (vr->operands);
2138 if (old == shared_lookup_references)
2139 shared_lookup_references = vr->operands;
2140 vr->hashcode = vn_reference_compute_hash (vr);
2142 /* Try folding the new reference to a constant. */
2143 tree val = fully_constant_vn_reference_p (vr);
2144 if (val)
2145 return vn_reference_lookup_or_insert_for_pieces
2146 (vuse, vr->set, vr->type, vr->operands, val);
2148 /* Adjust *ref from the new operands. */
2149 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2150 return (void *)-1;
2151 /* This can happen with bitfields. */
2152 if (ref->size != r.size)
2153 return (void *)-1;
2154 *ref = r;
2156 /* Do not update last seen VUSE after translating. */
2157 last_vuse_ptr = NULL;
2159 /* Keep looking for the adjusted *REF / VR pair. */
2160 return NULL;
2163 /* 6) For memcpy copies translate the reference through them if
2164 the copy kills ref. */
2165 else if (vn_walk_kind == VN_WALKREWRITE
2166 && is_gimple_reg_type (vr->type)
2167 /* ??? Handle BCOPY as well. */
2168 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2169 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2170 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2171 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2172 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2173 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2174 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2175 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2)))
2177 tree lhs, rhs;
2178 ao_ref r;
2179 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
2180 vn_reference_op_s op;
2181 HOST_WIDE_INT at;
2183 /* Only handle non-variable, addressable refs. */
2184 if (ref->size != maxsize
2185 || offset % BITS_PER_UNIT != 0
2186 || ref->size % BITS_PER_UNIT != 0)
2187 return (void *)-1;
2189 /* Extract a pointer base and an offset for the destination. */
2190 lhs = gimple_call_arg (def_stmt, 0);
2191 lhs_offset = 0;
2192 if (TREE_CODE (lhs) == SSA_NAME)
2194 lhs = SSA_VAL (lhs);
2195 if (TREE_CODE (lhs) == SSA_NAME)
2197 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
2198 if (gimple_assign_single_p (def_stmt)
2199 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2200 lhs = gimple_assign_rhs1 (def_stmt);
2203 if (TREE_CODE (lhs) == ADDR_EXPR)
2205 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2206 &lhs_offset);
2207 if (!tem)
2208 return (void *)-1;
2209 if (TREE_CODE (tem) == MEM_REF
2210 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2212 lhs = TREE_OPERAND (tem, 0);
2213 if (TREE_CODE (lhs) == SSA_NAME)
2214 lhs = SSA_VAL (lhs);
2215 lhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2217 else if (DECL_P (tem))
2218 lhs = build_fold_addr_expr (tem);
2219 else
2220 return (void *)-1;
2222 if (TREE_CODE (lhs) != SSA_NAME
2223 && TREE_CODE (lhs) != ADDR_EXPR)
2224 return (void *)-1;
2226 /* Extract a pointer base and an offset for the source. */
2227 rhs = gimple_call_arg (def_stmt, 1);
2228 rhs_offset = 0;
2229 if (TREE_CODE (rhs) == SSA_NAME)
2230 rhs = SSA_VAL (rhs);
2231 if (TREE_CODE (rhs) == ADDR_EXPR)
2233 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2234 &rhs_offset);
2235 if (!tem)
2236 return (void *)-1;
2237 if (TREE_CODE (tem) == MEM_REF
2238 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2240 rhs = TREE_OPERAND (tem, 0);
2241 rhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2243 else if (DECL_P (tem))
2244 rhs = build_fold_addr_expr (tem);
2245 else
2246 return (void *)-1;
2248 if (TREE_CODE (rhs) != SSA_NAME
2249 && TREE_CODE (rhs) != ADDR_EXPR)
2250 return (void *)-1;
2252 copy_size = tree_to_uhwi (gimple_call_arg (def_stmt, 2));
2254 /* The bases of the destination and the references have to agree. */
2255 if ((TREE_CODE (base) != MEM_REF
2256 && !DECL_P (base))
2257 || (TREE_CODE (base) == MEM_REF
2258 && (TREE_OPERAND (base, 0) != lhs
2259 || !tree_fits_uhwi_p (TREE_OPERAND (base, 1))))
2260 || (DECL_P (base)
2261 && (TREE_CODE (lhs) != ADDR_EXPR
2262 || TREE_OPERAND (lhs, 0) != base)))
2263 return (void *)-1;
2265 at = offset / BITS_PER_UNIT;
2266 if (TREE_CODE (base) == MEM_REF)
2267 at += tree_to_uhwi (TREE_OPERAND (base, 1));
2268 /* If the access is completely outside of the memcpy destination
2269 area there is no aliasing. */
2270 if (lhs_offset >= at + maxsize / BITS_PER_UNIT
2271 || lhs_offset + copy_size <= at)
2272 return NULL;
2273 /* And the access has to be contained within the memcpy destination. */
2274 if (lhs_offset > at
2275 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
2276 return (void *)-1;
2278 /* Make room for 2 operands in the new reference. */
2279 if (vr->operands.length () < 2)
2281 vec<vn_reference_op_s> old = vr->operands;
2282 vr->operands.safe_grow_cleared (2);
2283 if (old == shared_lookup_references)
2284 shared_lookup_references = vr->operands;
2286 else
2287 vr->operands.truncate (2);
2289 /* The looked-through reference is a simple MEM_REF. */
2290 memset (&op, 0, sizeof (op));
2291 op.type = vr->type;
2292 op.opcode = MEM_REF;
2293 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
2294 op.off = at - lhs_offset + rhs_offset;
2295 vr->operands[0] = op;
2296 op.type = TREE_TYPE (rhs);
2297 op.opcode = TREE_CODE (rhs);
2298 op.op0 = rhs;
2299 op.off = -1;
2300 vr->operands[1] = op;
2301 vr->hashcode = vn_reference_compute_hash (vr);
2303 /* Try folding the new reference to a constant. */
2304 tree val = fully_constant_vn_reference_p (vr);
2305 if (val)
2306 return vn_reference_lookup_or_insert_for_pieces
2307 (vuse, vr->set, vr->type, vr->operands, val);
2309 /* Adjust *ref from the new operands. */
2310 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2311 return (void *)-1;
2312 /* This can happen with bitfields. */
2313 if (ref->size != r.size)
2314 return (void *)-1;
2315 *ref = r;
2317 /* Do not update last seen VUSE after translating. */
2318 last_vuse_ptr = NULL;
2320 /* Keep looking for the adjusted *REF / VR pair. */
2321 return NULL;
2324 /* Bail out and stop walking. */
2325 return (void *)-1;
2328 /* Return a reference op vector from OP that can be used for
2329 vn_reference_lookup_pieces. The caller is responsible for releasing
2330 the vector. */
2332 vec<vn_reference_op_s>
2333 vn_reference_operands_for_lookup (tree op)
2335 bool valueized;
2336 return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
2339 /* Lookup a reference operation by it's parts, in the current hash table.
2340 Returns the resulting value number if it exists in the hash table,
2341 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2342 vn_reference_t stored in the hashtable if something is found. */
2344 tree
2345 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2346 vec<vn_reference_op_s> operands,
2347 vn_reference_t *vnresult, vn_lookup_kind kind)
2349 struct vn_reference_s vr1;
2350 vn_reference_t tmp;
2351 tree cst;
2353 if (!vnresult)
2354 vnresult = &tmp;
2355 *vnresult = NULL;
2357 vr1.vuse = vuse_ssa_val (vuse);
2358 shared_lookup_references.truncate (0);
2359 shared_lookup_references.safe_grow (operands.length ());
2360 memcpy (shared_lookup_references.address (),
2361 operands.address (),
2362 sizeof (vn_reference_op_s)
2363 * operands.length ());
2364 vr1.operands = operands = shared_lookup_references
2365 = valueize_refs (shared_lookup_references);
2366 vr1.type = type;
2367 vr1.set = set;
2368 vr1.hashcode = vn_reference_compute_hash (&vr1);
2369 if ((cst = fully_constant_vn_reference_p (&vr1)))
2370 return cst;
2372 vn_reference_lookup_1 (&vr1, vnresult);
2373 if (!*vnresult
2374 && kind != VN_NOWALK
2375 && vr1.vuse)
2377 ao_ref r;
2378 vn_walk_kind = kind;
2379 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2380 *vnresult =
2381 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2382 vn_reference_lookup_2,
2383 vn_reference_lookup_3,
2384 vuse_ssa_val, &vr1);
2385 gcc_checking_assert (vr1.operands == shared_lookup_references);
2388 if (*vnresult)
2389 return (*vnresult)->result;
2391 return NULL_TREE;
2394 /* Lookup OP in the current hash table, and return the resulting value
2395 number if it exists in the hash table. Return NULL_TREE if it does
2396 not exist in the hash table or if the result field of the structure
2397 was NULL.. VNRESULT will be filled in with the vn_reference_t
2398 stored in the hashtable if one exists. When TBAA_P is false assume
2399 we are looking up a store and treat it as having alias-set zero. */
2401 tree
2402 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2403 vn_reference_t *vnresult, bool tbaa_p)
2405 vec<vn_reference_op_s> operands;
2406 struct vn_reference_s vr1;
2407 tree cst;
2408 bool valuezied_anything;
2410 if (vnresult)
2411 *vnresult = NULL;
2413 vr1.vuse = vuse_ssa_val (vuse);
2414 vr1.operands = operands
2415 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2416 vr1.type = TREE_TYPE (op);
2417 vr1.set = tbaa_p ? get_alias_set (op) : 0;
2418 vr1.hashcode = vn_reference_compute_hash (&vr1);
2419 if ((cst = fully_constant_vn_reference_p (&vr1)))
2420 return cst;
2422 if (kind != VN_NOWALK
2423 && vr1.vuse)
2425 vn_reference_t wvnresult;
2426 ao_ref r;
2427 /* Make sure to use a valueized reference if we valueized anything.
2428 Otherwise preserve the full reference for advanced TBAA. */
2429 if (!valuezied_anything
2430 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2431 vr1.operands))
2432 ao_ref_init (&r, op);
2433 if (! tbaa_p)
2434 r.ref_alias_set = r.base_alias_set = 0;
2435 vn_walk_kind = kind;
2436 wvnresult =
2437 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2438 vn_reference_lookup_2,
2439 vn_reference_lookup_3,
2440 vuse_ssa_val, &vr1);
2441 gcc_checking_assert (vr1.operands == shared_lookup_references);
2442 if (wvnresult)
2444 if (vnresult)
2445 *vnresult = wvnresult;
2446 return wvnresult->result;
2449 return NULL_TREE;
2452 return vn_reference_lookup_1 (&vr1, vnresult);
2455 /* Lookup CALL in the current hash table and return the entry in
2456 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2458 void
2459 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2460 vn_reference_t vr)
2462 if (vnresult)
2463 *vnresult = NULL;
2465 tree vuse = gimple_vuse (call);
2467 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2468 vr->operands = valueize_shared_reference_ops_from_call (call);
2469 vr->type = gimple_expr_type (call);
2470 vr->set = 0;
2471 vr->hashcode = vn_reference_compute_hash (vr);
2472 vn_reference_lookup_1 (vr, vnresult);
2475 /* Insert OP into the current hash table with a value number of
2476 RESULT, and return the resulting reference structure we created. */
2478 static vn_reference_t
2479 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2481 vn_reference_s **slot;
2482 vn_reference_t vr1;
2483 bool tem;
2485 vr1 = current_info->references_pool->allocate ();
2486 if (TREE_CODE (result) == SSA_NAME)
2487 vr1->value_id = VN_INFO (result)->value_id;
2488 else
2489 vr1->value_id = get_or_alloc_constant_value_id (result);
2490 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2491 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2492 vr1->type = TREE_TYPE (op);
2493 vr1->set = get_alias_set (op);
2494 vr1->hashcode = vn_reference_compute_hash (vr1);
2495 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2496 vr1->result_vdef = vdef;
2498 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2499 INSERT);
2501 /* Because we lookup stores using vuses, and value number failures
2502 using the vdefs (see visit_reference_op_store for how and why),
2503 it's possible that on failure we may try to insert an already
2504 inserted store. This is not wrong, there is no ssa name for a
2505 store that we could use as a differentiator anyway. Thus, unlike
2506 the other lookup functions, you cannot gcc_assert (!*slot)
2507 here. */
2509 /* But free the old slot in case of a collision. */
2510 if (*slot)
2511 free_reference (*slot);
2513 *slot = vr1;
2514 return vr1;
2517 /* Insert a reference by it's pieces into the current hash table with
2518 a value number of RESULT. Return the resulting reference
2519 structure we created. */
2521 vn_reference_t
2522 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2523 vec<vn_reference_op_s> operands,
2524 tree result, unsigned int value_id)
2527 vn_reference_s **slot;
2528 vn_reference_t vr1;
2530 vr1 = current_info->references_pool->allocate ();
2531 vr1->value_id = value_id;
2532 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2533 vr1->operands = valueize_refs (operands);
2534 vr1->type = type;
2535 vr1->set = set;
2536 vr1->hashcode = vn_reference_compute_hash (vr1);
2537 if (result && TREE_CODE (result) == SSA_NAME)
2538 result = SSA_VAL (result);
2539 vr1->result = result;
2541 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2542 INSERT);
2544 /* At this point we should have all the things inserted that we have
2545 seen before, and we should never try inserting something that
2546 already exists. */
2547 gcc_assert (!*slot);
2548 if (*slot)
2549 free_reference (*slot);
2551 *slot = vr1;
2552 return vr1;
2555 /* Compute and return the hash value for nary operation VBO1. */
2557 static hashval_t
2558 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2560 inchash::hash hstate;
2561 unsigned i;
2563 for (i = 0; i < vno1->length; ++i)
2564 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2565 vno1->op[i] = SSA_VAL (vno1->op[i]);
2567 if (((vno1->length == 2
2568 && commutative_tree_code (vno1->opcode))
2569 || (vno1->length == 3
2570 && commutative_ternary_tree_code (vno1->opcode)))
2571 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2572 std::swap (vno1->op[0], vno1->op[1]);
2573 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
2574 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2576 std::swap (vno1->op[0], vno1->op[1]);
2577 vno1->opcode = swap_tree_comparison (vno1->opcode);
2580 hstate.add_int (vno1->opcode);
2581 for (i = 0; i < vno1->length; ++i)
2582 inchash::add_expr (vno1->op[i], hstate);
2584 return hstate.end ();
2587 /* Compare nary operations VNO1 and VNO2 and return true if they are
2588 equivalent. */
2590 bool
2591 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2593 unsigned i;
2595 if (vno1->hashcode != vno2->hashcode)
2596 return false;
2598 if (vno1->length != vno2->length)
2599 return false;
2601 if (vno1->opcode != vno2->opcode
2602 || !types_compatible_p (vno1->type, vno2->type))
2603 return false;
2605 for (i = 0; i < vno1->length; ++i)
2606 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2607 return false;
2609 return true;
2612 /* Initialize VNO from the pieces provided. */
2614 static void
2615 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2616 enum tree_code code, tree type, tree *ops)
2618 vno->opcode = code;
2619 vno->length = length;
2620 vno->type = type;
2621 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2624 /* Initialize VNO from OP. */
2626 static void
2627 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2629 unsigned i;
2631 vno->opcode = TREE_CODE (op);
2632 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2633 vno->type = TREE_TYPE (op);
2634 for (i = 0; i < vno->length; ++i)
2635 vno->op[i] = TREE_OPERAND (op, i);
2638 /* Return the number of operands for a vn_nary ops structure from STMT. */
2640 static unsigned int
2641 vn_nary_length_from_stmt (gimple *stmt)
2643 switch (gimple_assign_rhs_code (stmt))
2645 case REALPART_EXPR:
2646 case IMAGPART_EXPR:
2647 case VIEW_CONVERT_EXPR:
2648 return 1;
2650 case BIT_FIELD_REF:
2651 return 3;
2653 case CONSTRUCTOR:
2654 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2656 default:
2657 return gimple_num_ops (stmt) - 1;
2661 /* Initialize VNO from STMT. */
2663 static void
2664 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
2666 unsigned i;
2668 vno->opcode = gimple_assign_rhs_code (stmt);
2669 vno->type = gimple_expr_type (stmt);
2670 switch (vno->opcode)
2672 case REALPART_EXPR:
2673 case IMAGPART_EXPR:
2674 case VIEW_CONVERT_EXPR:
2675 vno->length = 1;
2676 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2677 break;
2679 case BIT_FIELD_REF:
2680 vno->length = 3;
2681 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2682 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2683 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2684 break;
2686 case CONSTRUCTOR:
2687 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2688 for (i = 0; i < vno->length; ++i)
2689 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2690 break;
2692 default:
2693 gcc_checking_assert (!gimple_assign_single_p (stmt));
2694 vno->length = gimple_num_ops (stmt) - 1;
2695 for (i = 0; i < vno->length; ++i)
2696 vno->op[i] = gimple_op (stmt, i + 1);
2700 /* Compute the hashcode for VNO and look for it in the hash table;
2701 return the resulting value number if it exists in the hash table.
2702 Return NULL_TREE if it does not exist in the hash table or if the
2703 result field of the operation is NULL. VNRESULT will contain the
2704 vn_nary_op_t from the hashtable if it exists. */
2706 static tree
2707 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2709 vn_nary_op_s **slot;
2711 if (vnresult)
2712 *vnresult = NULL;
2714 vno->hashcode = vn_nary_op_compute_hash (vno);
2715 slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2716 NO_INSERT);
2717 if (!slot && current_info == optimistic_info)
2718 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2719 NO_INSERT);
2720 if (!slot)
2721 return NULL_TREE;
2722 if (vnresult)
2723 *vnresult = *slot;
2724 return (*slot)->result;
2727 /* Lookup a n-ary operation by its pieces and return the resulting value
2728 number if it exists in the hash table. Return NULL_TREE if it does
2729 not exist in the hash table or if the result field of the operation
2730 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2731 if it exists. */
2733 tree
2734 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2735 tree type, tree *ops, vn_nary_op_t *vnresult)
2737 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2738 sizeof_vn_nary_op (length));
2739 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2740 return vn_nary_op_lookup_1 (vno1, vnresult);
2743 /* Lookup OP in the current hash table, and return the resulting value
2744 number if it exists in the hash table. Return NULL_TREE if it does
2745 not exist in the hash table or if the result field of the operation
2746 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2747 if it exists. */
2749 tree
2750 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2752 vn_nary_op_t vno1
2753 = XALLOCAVAR (struct vn_nary_op_s,
2754 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2755 init_vn_nary_op_from_op (vno1, op);
2756 return vn_nary_op_lookup_1 (vno1, vnresult);
2759 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2760 value number if it exists in the hash table. Return NULL_TREE if
2761 it does not exist in the hash table. VNRESULT will contain the
2762 vn_nary_op_t from the hashtable if it exists. */
2764 tree
2765 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
2767 vn_nary_op_t vno1
2768 = XALLOCAVAR (struct vn_nary_op_s,
2769 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2770 init_vn_nary_op_from_stmt (vno1, stmt);
2771 return vn_nary_op_lookup_1 (vno1, vnresult);
2774 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2776 static vn_nary_op_t
2777 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2779 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2782 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2783 obstack. */
2785 static vn_nary_op_t
2786 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2788 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2789 &current_info->nary_obstack);
2791 vno1->value_id = value_id;
2792 vno1->length = length;
2793 vno1->result = result;
2795 return vno1;
2798 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2799 VNO->HASHCODE first. */
2801 static vn_nary_op_t
2802 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
2803 bool compute_hash)
2805 vn_nary_op_s **slot;
2807 if (compute_hash)
2808 vno->hashcode = vn_nary_op_compute_hash (vno);
2810 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
2811 gcc_assert (!*slot);
2813 *slot = vno;
2814 return vno;
2817 /* Insert a n-ary operation into the current hash table using it's
2818 pieces. Return the vn_nary_op_t structure we created and put in
2819 the hashtable. */
2821 vn_nary_op_t
2822 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2823 tree type, tree *ops,
2824 tree result, unsigned int value_id)
2826 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2827 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2828 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2831 /* Insert OP into the current hash table with a value number of
2832 RESULT. Return the vn_nary_op_t structure we created and put in
2833 the hashtable. */
2835 vn_nary_op_t
2836 vn_nary_op_insert (tree op, tree result)
2838 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2839 vn_nary_op_t vno1;
2841 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2842 init_vn_nary_op_from_op (vno1, op);
2843 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2846 /* Insert the rhs of STMT into the current hash table with a value number of
2847 RESULT. */
2849 static vn_nary_op_t
2850 vn_nary_op_insert_stmt (gimple *stmt, tree result)
2852 vn_nary_op_t vno1
2853 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2854 result, VN_INFO (result)->value_id);
2855 init_vn_nary_op_from_stmt (vno1, stmt);
2856 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2859 /* Compute a hashcode for PHI operation VP1 and return it. */
2861 static inline hashval_t
2862 vn_phi_compute_hash (vn_phi_t vp1)
2864 inchash::hash hstate (vp1->phiargs.length () > 2
2865 ? vp1->block->index : vp1->phiargs.length ());
2866 tree phi1op;
2867 tree type;
2868 edge e;
2869 edge_iterator ei;
2871 /* If all PHI arguments are constants we need to distinguish
2872 the PHI node via its type. */
2873 type = vp1->type;
2874 hstate.merge_hash (vn_hash_type (type));
2876 FOR_EACH_EDGE (e, ei, vp1->block->preds)
2878 /* Don't hash backedge values they need to be handled as VN_TOP
2879 for optimistic value-numbering. */
2880 if (e->flags & EDGE_DFS_BACK)
2881 continue;
2883 phi1op = vp1->phiargs[e->dest_idx];
2884 if (phi1op == VN_TOP)
2885 continue;
2886 inchash::add_expr (phi1op, hstate);
2889 return hstate.end ();
2893 /* Return true if COND1 and COND2 represent the same condition, set
2894 *INVERTED_P if one needs to be inverted to make it the same as
2895 the other. */
2897 static bool
2898 cond_stmts_equal_p (gcond *cond1, gcond *cond2, bool *inverted_p)
2900 enum tree_code code1 = gimple_cond_code (cond1);
2901 enum tree_code code2 = gimple_cond_code (cond2);
2902 tree lhs1 = gimple_cond_lhs (cond1);
2903 tree lhs2 = gimple_cond_lhs (cond2);
2904 tree rhs1 = gimple_cond_rhs (cond1);
2905 tree rhs2 = gimple_cond_rhs (cond2);
2907 *inverted_p = false;
2908 if (code1 == code2)
2910 else if (code1 == swap_tree_comparison (code2))
2911 std::swap (lhs2, rhs2);
2912 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
2913 *inverted_p = true;
2914 else if (code1 == invert_tree_comparison
2915 (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
2917 std::swap (lhs2, rhs2);
2918 *inverted_p = true;
2920 else
2921 return false;
2923 lhs1 = vn_valueize (lhs1);
2924 rhs1 = vn_valueize (rhs1);
2925 lhs2 = vn_valueize (lhs2);
2926 rhs2 = vn_valueize (rhs2);
2927 return ((expressions_equal_p (lhs1, lhs2)
2928 && expressions_equal_p (rhs1, rhs2))
2929 || (commutative_tree_code (code1)
2930 && expressions_equal_p (lhs1, rhs2)
2931 && expressions_equal_p (rhs1, lhs2)));
2934 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2936 static int
2937 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
2939 if (vp1->hashcode != vp2->hashcode)
2940 return false;
2942 if (vp1->block != vp2->block)
2944 if (vp1->phiargs.length () != vp2->phiargs.length ())
2945 return false;
2947 switch (vp1->phiargs.length ())
2949 case 1:
2950 /* Single-arg PHIs are just copies. */
2951 break;
2953 case 2:
2955 /* Rule out backedges into the PHI. */
2956 if (vp1->block->loop_father->header == vp1->block
2957 || vp2->block->loop_father->header == vp2->block)
2958 return false;
2960 /* If the PHI nodes do not have compatible types
2961 they are not the same. */
2962 if (!types_compatible_p (vp1->type, vp2->type))
2963 return false;
2965 basic_block idom1
2966 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
2967 basic_block idom2
2968 = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
2969 /* If the immediate dominator end in switch stmts multiple
2970 values may end up in the same PHI arg via intermediate
2971 CFG merges. */
2972 if (EDGE_COUNT (idom1->succs) != 2
2973 || EDGE_COUNT (idom2->succs) != 2)
2974 return false;
2976 /* Verify the controlling stmt is the same. */
2977 gimple *last1 = last_stmt (idom1);
2978 gimple *last2 = last_stmt (idom2);
2979 if (gimple_code (last1) != GIMPLE_COND
2980 || gimple_code (last2) != GIMPLE_COND)
2981 return false;
2982 bool inverted_p;
2983 if (! cond_stmts_equal_p (as_a <gcond *> (last1),
2984 as_a <gcond *> (last2), &inverted_p))
2985 return false;
2987 /* Get at true/false controlled edges into the PHI. */
2988 edge te1, te2, fe1, fe2;
2989 if (! extract_true_false_controlled_edges (idom1, vp1->block,
2990 &te1, &fe1)
2991 || ! extract_true_false_controlled_edges (idom2, vp2->block,
2992 &te2, &fe2))
2993 return false;
2995 /* Swap edges if the second condition is the inverted of the
2996 first. */
2997 if (inverted_p)
2998 std::swap (te2, fe2);
3000 /* ??? Handle VN_TOP specially. */
3001 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
3002 vp2->phiargs[te2->dest_idx])
3003 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
3004 vp2->phiargs[fe2->dest_idx]))
3005 return false;
3007 return true;
3010 default:
3011 return false;
3015 /* If the PHI nodes do not have compatible types
3016 they are not the same. */
3017 if (!types_compatible_p (vp1->type, vp2->type))
3018 return false;
3020 /* Any phi in the same block will have it's arguments in the
3021 same edge order, because of how we store phi nodes. */
3022 int i;
3023 tree phi1op;
3024 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
3026 tree phi2op = vp2->phiargs[i];
3027 if (phi1op == VN_TOP || phi2op == VN_TOP)
3028 continue;
3029 if (!expressions_equal_p (phi1op, phi2op))
3030 return false;
3033 return true;
3036 static vec<tree> shared_lookup_phiargs;
3038 /* Lookup PHI in the current hash table, and return the resulting
3039 value number if it exists in the hash table. Return NULL_TREE if
3040 it does not exist in the hash table. */
3042 static tree
3043 vn_phi_lookup (gimple *phi)
3045 vn_phi_s **slot;
3046 struct vn_phi_s vp1;
3047 edge e;
3048 edge_iterator ei;
3050 shared_lookup_phiargs.truncate (0);
3051 shared_lookup_phiargs.safe_grow (gimple_phi_num_args (phi));
3053 /* Canonicalize the SSA_NAME's to their value number. */
3054 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3056 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3057 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3058 shared_lookup_phiargs[e->dest_idx] = def;
3060 vp1.type = TREE_TYPE (gimple_phi_result (phi));
3061 vp1.phiargs = shared_lookup_phiargs;
3062 vp1.block = gimple_bb (phi);
3063 vp1.hashcode = vn_phi_compute_hash (&vp1);
3064 slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3065 NO_INSERT);
3066 if (!slot && current_info == optimistic_info)
3067 slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3068 NO_INSERT);
3069 if (!slot)
3070 return NULL_TREE;
3071 return (*slot)->result;
3074 /* Insert PHI into the current hash table with a value number of
3075 RESULT. */
3077 static vn_phi_t
3078 vn_phi_insert (gimple *phi, tree result)
3080 vn_phi_s **slot;
3081 vn_phi_t vp1 = current_info->phis_pool->allocate ();
3082 vec<tree> args = vNULL;
3083 edge e;
3084 edge_iterator ei;
3086 args.safe_grow (gimple_phi_num_args (phi));
3088 /* Canonicalize the SSA_NAME's to their value number. */
3089 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3091 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3092 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3093 args[e->dest_idx] = def;
3095 vp1->value_id = VN_INFO (result)->value_id;
3096 vp1->type = TREE_TYPE (gimple_phi_result (phi));
3097 vp1->phiargs = args;
3098 vp1->block = gimple_bb (phi);
3099 vp1->result = result;
3100 vp1->hashcode = vn_phi_compute_hash (vp1);
3102 slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
3104 /* Because we iterate over phi operations more than once, it's
3105 possible the slot might already exist here, hence no assert.*/
3106 *slot = vp1;
3107 return vp1;
3111 /* Print set of components in strongly connected component SCC to OUT. */
3113 static void
3114 print_scc (FILE *out, vec<tree> scc)
3116 tree var;
3117 unsigned int i;
3119 fprintf (out, "SCC consists of:");
3120 FOR_EACH_VEC_ELT (scc, i, var)
3122 fprintf (out, " ");
3123 print_generic_expr (out, var, 0);
3125 fprintf (out, "\n");
3128 /* Return true if BB1 is dominated by BB2 taking into account edges
3129 that are not executable. */
3131 static bool
3132 dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
3134 edge_iterator ei;
3135 edge e;
3137 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3138 return true;
3140 /* Before iterating we'd like to know if there exists a
3141 (executable) path from bb2 to bb1 at all, if not we can
3142 directly return false. For now simply iterate once. */
3144 /* Iterate to the single executable bb1 predecessor. */
3145 if (EDGE_COUNT (bb1->preds) > 1)
3147 edge prede = NULL;
3148 FOR_EACH_EDGE (e, ei, bb1->preds)
3149 if (e->flags & EDGE_EXECUTABLE)
3151 if (prede)
3153 prede = NULL;
3154 break;
3156 prede = e;
3158 if (prede)
3160 bb1 = prede->src;
3162 /* Re-do the dominance check with changed bb1. */
3163 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3164 return true;
3168 /* Iterate to the single executable bb2 successor. */
3169 edge succe = NULL;
3170 FOR_EACH_EDGE (e, ei, bb2->succs)
3171 if (e->flags & EDGE_EXECUTABLE)
3173 if (succe)
3175 succe = NULL;
3176 break;
3178 succe = e;
3180 if (succe)
3182 /* Verify the reached block is only reached through succe.
3183 If there is only one edge we can spare us the dominator
3184 check and iterate directly. */
3185 if (EDGE_COUNT (succe->dest->preds) > 1)
3187 FOR_EACH_EDGE (e, ei, succe->dest->preds)
3188 if (e != succe
3189 && (e->flags & EDGE_EXECUTABLE))
3191 succe = NULL;
3192 break;
3195 if (succe)
3197 bb2 = succe->dest;
3199 /* Re-do the dominance check with changed bb2. */
3200 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3201 return true;
3205 /* We could now iterate updating bb1 / bb2. */
3206 return false;
3209 /* Set the value number of FROM to TO, return true if it has changed
3210 as a result. */
3212 static inline bool
3213 set_ssa_val_to (tree from, tree to)
3215 tree currval = SSA_VAL (from);
3216 HOST_WIDE_INT toff, coff;
3218 /* The only thing we allow as value numbers are ssa_names
3219 and invariants. So assert that here. We don't allow VN_TOP
3220 as visiting a stmt should produce a value-number other than
3221 that.
3222 ??? Still VN_TOP can happen for unreachable code, so force
3223 it to varying in that case. Not all code is prepared to
3224 get VN_TOP on valueization. */
3225 if (to == VN_TOP)
3227 if (dump_file && (dump_flags & TDF_DETAILS))
3228 fprintf (dump_file, "Forcing value number to varying on "
3229 "receiving VN_TOP\n");
3230 to = from;
3233 gcc_assert (to != NULL_TREE
3234 && ((TREE_CODE (to) == SSA_NAME
3235 && (to == from || SSA_VAL (to) == to))
3236 || is_gimple_min_invariant (to)));
3238 if (from != to)
3240 if (currval == from)
3242 if (dump_file && (dump_flags & TDF_DETAILS))
3244 fprintf (dump_file, "Not changing value number of ");
3245 print_generic_expr (dump_file, from, 0);
3246 fprintf (dump_file, " from VARYING to ");
3247 print_generic_expr (dump_file, to, 0);
3248 fprintf (dump_file, "\n");
3250 return false;
3252 else if (TREE_CODE (to) == SSA_NAME
3253 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
3254 to = from;
3257 if (dump_file && (dump_flags & TDF_DETAILS))
3259 fprintf (dump_file, "Setting value number of ");
3260 print_generic_expr (dump_file, from, 0);
3261 fprintf (dump_file, " to ");
3262 print_generic_expr (dump_file, to, 0);
3265 if (currval != to
3266 && !operand_equal_p (currval, to, 0)
3267 /* ??? For addresses involving volatile objects or types operand_equal_p
3268 does not reliably detect ADDR_EXPRs as equal. We know we are only
3269 getting invariant gimple addresses here, so can use
3270 get_addr_base_and_unit_offset to do this comparison. */
3271 && !(TREE_CODE (currval) == ADDR_EXPR
3272 && TREE_CODE (to) == ADDR_EXPR
3273 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
3274 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
3275 && coff == toff))
3277 /* If we equate two SSA names we have to make the side-band info
3278 of the leader conservative (and remember whatever original value
3279 was present). */
3280 if (TREE_CODE (to) == SSA_NAME)
3282 if (INTEGRAL_TYPE_P (TREE_TYPE (to))
3283 && SSA_NAME_RANGE_INFO (to))
3285 if (SSA_NAME_IS_DEFAULT_DEF (to)
3286 || dominated_by_p_w_unex
3287 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3288 gimple_bb (SSA_NAME_DEF_STMT (to))))
3289 /* Keep the info from the dominator. */
3291 else if (SSA_NAME_IS_DEFAULT_DEF (from)
3292 || dominated_by_p_w_unex
3293 (gimple_bb (SSA_NAME_DEF_STMT (to)),
3294 gimple_bb (SSA_NAME_DEF_STMT (from))))
3296 /* Save old info. */
3297 if (! VN_INFO (to)->info.range_info)
3299 VN_INFO (to)->info.range_info = SSA_NAME_RANGE_INFO (to);
3300 VN_INFO (to)->range_info_anti_range_p
3301 = SSA_NAME_ANTI_RANGE_P (to);
3303 /* Use that from the dominator. */
3304 SSA_NAME_RANGE_INFO (to) = SSA_NAME_RANGE_INFO (from);
3305 SSA_NAME_ANTI_RANGE_P (to) = SSA_NAME_ANTI_RANGE_P (from);
3307 else
3309 /* Save old info. */
3310 if (! VN_INFO (to)->info.range_info)
3312 VN_INFO (to)->info.range_info = SSA_NAME_RANGE_INFO (to);
3313 VN_INFO (to)->range_info_anti_range_p
3314 = SSA_NAME_ANTI_RANGE_P (to);
3316 /* Rather than allocating memory and unioning the info
3317 just clear it. */
3318 SSA_NAME_RANGE_INFO (to) = NULL;
3321 else if (POINTER_TYPE_P (TREE_TYPE (to))
3322 && SSA_NAME_PTR_INFO (to))
3324 if (SSA_NAME_IS_DEFAULT_DEF (to)
3325 || dominated_by_p_w_unex
3326 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3327 gimple_bb (SSA_NAME_DEF_STMT (to))))
3328 /* Keep the info from the dominator. */
3330 else if (SSA_NAME_IS_DEFAULT_DEF (from)
3331 || dominated_by_p_w_unex
3332 (gimple_bb (SSA_NAME_DEF_STMT (to)),
3333 gimple_bb (SSA_NAME_DEF_STMT (from))))
3335 /* Save old info. */
3336 if (! VN_INFO (to)->info.ptr_info)
3337 VN_INFO (to)->info.ptr_info = SSA_NAME_PTR_INFO (to);
3338 /* Use that from the dominator. */
3339 SSA_NAME_PTR_INFO (to) = SSA_NAME_PTR_INFO (from);
3341 else if (! SSA_NAME_PTR_INFO (from)
3342 /* Handle the case of trivially equivalent info. */
3343 || memcmp (SSA_NAME_PTR_INFO (to),
3344 SSA_NAME_PTR_INFO (from),
3345 sizeof (ptr_info_def)) != 0)
3347 /* Save old info. */
3348 if (! VN_INFO (to)->info.ptr_info)
3349 VN_INFO (to)->info.ptr_info = SSA_NAME_PTR_INFO (to);
3350 /* Rather than allocating memory and unioning the info
3351 just clear it. */
3352 SSA_NAME_PTR_INFO (to) = NULL;
3357 VN_INFO (from)->valnum = to;
3358 if (dump_file && (dump_flags & TDF_DETAILS))
3359 fprintf (dump_file, " (changed)\n");
3360 return true;
3362 if (dump_file && (dump_flags & TDF_DETAILS))
3363 fprintf (dump_file, "\n");
3364 return false;
3367 /* Mark as processed all the definitions in the defining stmt of USE, or
3368 the USE itself. */
3370 static void
3371 mark_use_processed (tree use)
3373 ssa_op_iter iter;
3374 def_operand_p defp;
3375 gimple *stmt = SSA_NAME_DEF_STMT (use);
3377 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
3379 VN_INFO (use)->use_processed = true;
3380 return;
3383 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3385 tree def = DEF_FROM_PTR (defp);
3387 VN_INFO (def)->use_processed = true;
3391 /* Set all definitions in STMT to value number to themselves.
3392 Return true if a value number changed. */
3394 static bool
3395 defs_to_varying (gimple *stmt)
3397 bool changed = false;
3398 ssa_op_iter iter;
3399 def_operand_p defp;
3401 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3403 tree def = DEF_FROM_PTR (defp);
3404 changed |= set_ssa_val_to (def, def);
3406 return changed;
3409 /* Visit a copy between LHS and RHS, return true if the value number
3410 changed. */
3412 static bool
3413 visit_copy (tree lhs, tree rhs)
3415 /* Valueize. */
3416 rhs = SSA_VAL (rhs);
3418 return set_ssa_val_to (lhs, rhs);
3421 /* Visit a nary operator RHS, value number it, and return true if the
3422 value number of LHS has changed as a result. */
3424 static bool
3425 visit_nary_op (tree lhs, gimple *stmt)
3427 bool changed = false;
3428 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
3430 if (result)
3431 changed = set_ssa_val_to (lhs, result);
3432 else
3434 changed = set_ssa_val_to (lhs, lhs);
3435 vn_nary_op_insert_stmt (stmt, lhs);
3438 return changed;
3441 /* Visit a call STMT storing into LHS. Return true if the value number
3442 of the LHS has changed as a result. */
3444 static bool
3445 visit_reference_op_call (tree lhs, gcall *stmt)
3447 bool changed = false;
3448 struct vn_reference_s vr1;
3449 vn_reference_t vnresult = NULL;
3450 tree vdef = gimple_vdef (stmt);
3452 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
3453 if (lhs && TREE_CODE (lhs) != SSA_NAME)
3454 lhs = NULL_TREE;
3456 vn_reference_lookup_call (stmt, &vnresult, &vr1);
3457 if (vnresult)
3459 if (vnresult->result_vdef && vdef)
3460 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
3462 if (!vnresult->result && lhs)
3463 vnresult->result = lhs;
3465 if (vnresult->result && lhs)
3466 changed |= set_ssa_val_to (lhs, vnresult->result);
3468 else
3470 vn_reference_t vr2;
3471 vn_reference_s **slot;
3472 if (vdef)
3473 changed |= set_ssa_val_to (vdef, vdef);
3474 if (lhs)
3475 changed |= set_ssa_val_to (lhs, lhs);
3476 vr2 = current_info->references_pool->allocate ();
3477 vr2->vuse = vr1.vuse;
3478 /* As we are not walking the virtual operand chain we know the
3479 shared_lookup_references are still original so we can re-use
3480 them here. */
3481 vr2->operands = vr1.operands.copy ();
3482 vr2->type = vr1.type;
3483 vr2->set = vr1.set;
3484 vr2->hashcode = vr1.hashcode;
3485 vr2->result = lhs;
3486 vr2->result_vdef = vdef;
3487 slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3488 INSERT);
3489 gcc_assert (!*slot);
3490 *slot = vr2;
3493 return changed;
3496 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3497 and return true if the value number of the LHS has changed as a result. */
3499 static bool
3500 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
3502 bool changed = false;
3503 tree last_vuse;
3504 tree result;
3506 last_vuse = gimple_vuse (stmt);
3507 last_vuse_ptr = &last_vuse;
3508 result = vn_reference_lookup (op, gimple_vuse (stmt),
3509 default_vn_walk_kind, NULL, true);
3510 last_vuse_ptr = NULL;
3512 /* We handle type-punning through unions by value-numbering based
3513 on offset and size of the access. Be prepared to handle a
3514 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3515 if (result
3516 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3518 /* We will be setting the value number of lhs to the value number
3519 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3520 So first simplify and lookup this expression to see if it
3521 is already available. */
3522 code_helper rcode = VIEW_CONVERT_EXPR;
3523 tree ops[3] = { result };
3524 result = vn_nary_build_or_lookup (rcode, TREE_TYPE (op), ops);
3527 if (result)
3528 changed = set_ssa_val_to (lhs, result);
3529 else
3531 changed = set_ssa_val_to (lhs, lhs);
3532 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
3535 return changed;
3539 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3540 and return true if the value number of the LHS has changed as a result. */
3542 static bool
3543 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
3545 bool changed = false;
3546 vn_reference_t vnresult = NULL;
3547 tree result, assign;
3548 bool resultsame = false;
3549 tree vuse = gimple_vuse (stmt);
3550 tree vdef = gimple_vdef (stmt);
3552 if (TREE_CODE (op) == SSA_NAME)
3553 op = SSA_VAL (op);
3555 /* First we want to lookup using the *vuses* from the store and see
3556 if there the last store to this location with the same address
3557 had the same value.
3559 The vuses represent the memory state before the store. If the
3560 memory state, address, and value of the store is the same as the
3561 last store to this location, then this store will produce the
3562 same memory state as that store.
3564 In this case the vdef versions for this store are value numbered to those
3565 vuse versions, since they represent the same memory state after
3566 this store.
3568 Otherwise, the vdefs for the store are used when inserting into
3569 the table, since the store generates a new memory state. */
3571 result = vn_reference_lookup (lhs, vuse, VN_NOWALK, NULL, false);
3573 if (result)
3575 if (TREE_CODE (result) == SSA_NAME)
3576 result = SSA_VAL (result);
3577 resultsame = expressions_equal_p (result, op);
3580 if ((!result || !resultsame)
3581 /* Only perform the following when being called from PRE
3582 which embeds tail merging. */
3583 && default_vn_walk_kind == VN_WALK)
3585 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3586 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
3587 if (vnresult)
3589 VN_INFO (vdef)->use_processed = true;
3590 return set_ssa_val_to (vdef, vnresult->result_vdef);
3594 if (!result || !resultsame)
3596 if (dump_file && (dump_flags & TDF_DETAILS))
3598 fprintf (dump_file, "No store match\n");
3599 fprintf (dump_file, "Value numbering store ");
3600 print_generic_expr (dump_file, lhs, 0);
3601 fprintf (dump_file, " to ");
3602 print_generic_expr (dump_file, op, 0);
3603 fprintf (dump_file, "\n");
3605 /* Have to set value numbers before insert, since insert is
3606 going to valueize the references in-place. */
3607 if (vdef)
3609 changed |= set_ssa_val_to (vdef, vdef);
3612 /* Do not insert structure copies into the tables. */
3613 if (is_gimple_min_invariant (op)
3614 || is_gimple_reg (op))
3615 vn_reference_insert (lhs, op, vdef, NULL);
3617 /* Only perform the following when being called from PRE
3618 which embeds tail merging. */
3619 if (default_vn_walk_kind == VN_WALK)
3621 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3622 vn_reference_insert (assign, lhs, vuse, vdef);
3625 else
3627 /* We had a match, so value number the vdef to have the value
3628 number of the vuse it came from. */
3630 if (dump_file && (dump_flags & TDF_DETAILS))
3631 fprintf (dump_file, "Store matched earlier value,"
3632 "value numbering store vdefs to matching vuses.\n");
3634 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3637 return changed;
3640 /* Visit and value number PHI, return true if the value number
3641 changed. */
3643 static bool
3644 visit_phi (gimple *phi)
3646 bool changed = false;
3647 tree result;
3648 tree sameval = VN_TOP;
3649 bool allsame = true;
3650 unsigned n_executable = 0;
3652 /* TODO: We could check for this in init_sccvn, and replace this
3653 with a gcc_assert. */
3654 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3655 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3657 /* See if all non-TOP arguments have the same value. TOP is
3658 equivalent to everything, so we can ignore it. */
3659 edge_iterator ei;
3660 edge e;
3661 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3662 if (e->flags & EDGE_EXECUTABLE)
3664 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3666 ++n_executable;
3667 if (TREE_CODE (def) == SSA_NAME)
3668 def = SSA_VAL (def);
3669 if (def == VN_TOP)
3670 continue;
3671 if (sameval == VN_TOP)
3672 sameval = def;
3673 else if (!expressions_equal_p (def, sameval))
3675 allsame = false;
3676 break;
3680 /* If none of the edges was executable or all incoming values are
3681 undefined keep the value-number at VN_TOP. If only a single edge
3682 is exectuable use its value. */
3683 if (sameval == VN_TOP
3684 || n_executable == 1)
3685 return set_ssa_val_to (PHI_RESULT (phi), sameval);
3687 /* First see if it is equivalent to a phi node in this block. We prefer
3688 this as it allows IV elimination - see PRs 66502 and 67167. */
3689 result = vn_phi_lookup (phi);
3690 if (result)
3691 changed = set_ssa_val_to (PHI_RESULT (phi), result);
3692 /* Otherwise all value numbered to the same value, the phi node has that
3693 value. */
3694 else if (allsame)
3695 changed = set_ssa_val_to (PHI_RESULT (phi), sameval);
3696 else
3698 vn_phi_insert (phi, PHI_RESULT (phi));
3699 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3702 return changed;
3705 /* Try to simplify RHS using equivalences and constant folding. */
3707 static tree
3708 try_to_simplify (gassign *stmt)
3710 enum tree_code code = gimple_assign_rhs_code (stmt);
3711 tree tem;
3713 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3714 in this case, there is no point in doing extra work. */
3715 if (code == SSA_NAME)
3716 return NULL_TREE;
3718 /* First try constant folding based on our current lattice. */
3719 mprts_hook = vn_lookup_simplify_result;
3720 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
3721 mprts_hook = NULL;
3722 if (tem
3723 && (TREE_CODE (tem) == SSA_NAME
3724 || is_gimple_min_invariant (tem)))
3725 return tem;
3727 return NULL_TREE;
3730 /* Visit and value number USE, return true if the value number
3731 changed. */
3733 static bool
3734 visit_use (tree use)
3736 bool changed = false;
3737 gimple *stmt = SSA_NAME_DEF_STMT (use);
3739 mark_use_processed (use);
3741 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3742 if (dump_file && (dump_flags & TDF_DETAILS)
3743 && !SSA_NAME_IS_DEFAULT_DEF (use))
3745 fprintf (dump_file, "Value numbering ");
3746 print_generic_expr (dump_file, use, 0);
3747 fprintf (dump_file, " stmt = ");
3748 print_gimple_stmt (dump_file, stmt, 0, 0);
3751 /* Handle uninitialized uses. */
3752 if (SSA_NAME_IS_DEFAULT_DEF (use))
3753 changed = set_ssa_val_to (use, use);
3754 else if (gimple_code (stmt) == GIMPLE_PHI)
3755 changed = visit_phi (stmt);
3756 else if (gimple_has_volatile_ops (stmt))
3757 changed = defs_to_varying (stmt);
3758 else if (gassign *ass = dyn_cast <gassign *> (stmt))
3760 enum tree_code code = gimple_assign_rhs_code (ass);
3761 tree lhs = gimple_assign_lhs (ass);
3762 tree rhs1 = gimple_assign_rhs1 (ass);
3763 tree simplified;
3765 /* Shortcut for copies. Simplifying copies is pointless,
3766 since we copy the expression and value they represent. */
3767 if (code == SSA_NAME
3768 && TREE_CODE (lhs) == SSA_NAME)
3770 changed = visit_copy (lhs, rhs1);
3771 goto done;
3773 simplified = try_to_simplify (ass);
3774 if (simplified)
3776 if (dump_file && (dump_flags & TDF_DETAILS))
3778 fprintf (dump_file, "RHS ");
3779 print_gimple_expr (dump_file, ass, 0, 0);
3780 fprintf (dump_file, " simplified to ");
3781 print_generic_expr (dump_file, simplified, 0);
3782 fprintf (dump_file, "\n");
3785 /* Setting value numbers to constants will occasionally
3786 screw up phi congruence because constants are not
3787 uniquely associated with a single ssa name that can be
3788 looked up. */
3789 if (simplified
3790 && is_gimple_min_invariant (simplified)
3791 && TREE_CODE (lhs) == SSA_NAME)
3793 changed = set_ssa_val_to (lhs, simplified);
3794 goto done;
3796 else if (simplified
3797 && TREE_CODE (simplified) == SSA_NAME
3798 && TREE_CODE (lhs) == SSA_NAME)
3800 changed = visit_copy (lhs, simplified);
3801 goto done;
3804 if ((TREE_CODE (lhs) == SSA_NAME
3805 /* We can substitute SSA_NAMEs that are live over
3806 abnormal edges with their constant value. */
3807 && !(gimple_assign_copy_p (ass)
3808 && is_gimple_min_invariant (rhs1))
3809 && !(simplified
3810 && is_gimple_min_invariant (simplified))
3811 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3812 /* Stores or copies from SSA_NAMEs that are live over
3813 abnormal edges are a problem. */
3814 || (code == SSA_NAME
3815 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3816 changed = defs_to_varying (ass);
3817 else if (REFERENCE_CLASS_P (lhs)
3818 || DECL_P (lhs))
3819 changed = visit_reference_op_store (lhs, rhs1, ass);
3820 else if (TREE_CODE (lhs) == SSA_NAME)
3822 if ((gimple_assign_copy_p (ass)
3823 && is_gimple_min_invariant (rhs1))
3824 || (simplified
3825 && is_gimple_min_invariant (simplified)))
3827 if (simplified)
3828 changed = set_ssa_val_to (lhs, simplified);
3829 else
3830 changed = set_ssa_val_to (lhs, rhs1);
3832 else
3834 /* Visit the original statement. */
3835 switch (vn_get_stmt_kind (ass))
3837 case VN_NARY:
3838 changed = visit_nary_op (lhs, ass);
3839 break;
3840 case VN_REFERENCE:
3841 changed = visit_reference_op_load (lhs, rhs1, ass);
3842 break;
3843 default:
3844 changed = defs_to_varying (ass);
3845 break;
3849 else
3850 changed = defs_to_varying (ass);
3852 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
3854 tree lhs = gimple_call_lhs (call_stmt);
3855 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3857 /* Try constant folding based on our current lattice. */
3858 tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
3859 vn_valueize);
3860 if (simplified)
3862 if (dump_file && (dump_flags & TDF_DETAILS))
3864 fprintf (dump_file, "call ");
3865 print_gimple_expr (dump_file, call_stmt, 0, 0);
3866 fprintf (dump_file, " simplified to ");
3867 print_generic_expr (dump_file, simplified, 0);
3868 fprintf (dump_file, "\n");
3871 /* Setting value numbers to constants will occasionally
3872 screw up phi congruence because constants are not
3873 uniquely associated with a single ssa name that can be
3874 looked up. */
3875 if (simplified
3876 && is_gimple_min_invariant (simplified))
3878 changed = set_ssa_val_to (lhs, simplified);
3879 if (gimple_vdef (call_stmt))
3880 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
3881 SSA_VAL (gimple_vuse (call_stmt)));
3882 goto done;
3884 else if (simplified
3885 && TREE_CODE (simplified) == SSA_NAME)
3887 changed = visit_copy (lhs, simplified);
3888 if (gimple_vdef (call_stmt))
3889 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
3890 SSA_VAL (gimple_vuse (call_stmt)));
3891 goto done;
3893 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3895 changed = defs_to_varying (call_stmt);
3896 goto done;
3900 if (!gimple_call_internal_p (call_stmt)
3901 && (/* Calls to the same function with the same vuse
3902 and the same operands do not necessarily return the same
3903 value, unless they're pure or const. */
3904 gimple_call_flags (call_stmt) & (ECF_PURE | ECF_CONST)
3905 /* If calls have a vdef, subsequent calls won't have
3906 the same incoming vuse. So, if 2 calls with vdef have the
3907 same vuse, we know they're not subsequent.
3908 We can value number 2 calls to the same function with the
3909 same vuse and the same operands which are not subsequent
3910 the same, because there is no code in the program that can
3911 compare the 2 values... */
3912 || (gimple_vdef (call_stmt)
3913 /* ... unless the call returns a pointer which does
3914 not alias with anything else. In which case the
3915 information that the values are distinct are encoded
3916 in the IL. */
3917 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
3918 /* Only perform the following when being called from PRE
3919 which embeds tail merging. */
3920 && default_vn_walk_kind == VN_WALK)))
3921 changed = visit_reference_op_call (lhs, call_stmt);
3922 else
3923 changed = defs_to_varying (call_stmt);
3925 else
3926 changed = defs_to_varying (stmt);
3927 done:
3928 return changed;
3931 /* Compare two operands by reverse postorder index */
3933 static int
3934 compare_ops (const void *pa, const void *pb)
3936 const tree opa = *((const tree *)pa);
3937 const tree opb = *((const tree *)pb);
3938 gimple *opstmta = SSA_NAME_DEF_STMT (opa);
3939 gimple *opstmtb = SSA_NAME_DEF_STMT (opb);
3940 basic_block bba;
3941 basic_block bbb;
3943 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3944 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3945 else if (gimple_nop_p (opstmta))
3946 return -1;
3947 else if (gimple_nop_p (opstmtb))
3948 return 1;
3950 bba = gimple_bb (opstmta);
3951 bbb = gimple_bb (opstmtb);
3953 if (!bba && !bbb)
3954 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3955 else if (!bba)
3956 return -1;
3957 else if (!bbb)
3958 return 1;
3960 if (bba == bbb)
3962 if (gimple_code (opstmta) == GIMPLE_PHI
3963 && gimple_code (opstmtb) == GIMPLE_PHI)
3964 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3965 else if (gimple_code (opstmta) == GIMPLE_PHI)
3966 return -1;
3967 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3968 return 1;
3969 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3970 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3971 else
3972 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3974 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3977 /* Sort an array containing members of a strongly connected component
3978 SCC so that the members are ordered by RPO number.
3979 This means that when the sort is complete, iterating through the
3980 array will give you the members in RPO order. */
3982 static void
3983 sort_scc (vec<tree> scc)
3985 scc.qsort (compare_ops);
3988 /* Insert the no longer used nary ONARY to the hash INFO. */
3990 static void
3991 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3993 size_t size = sizeof_vn_nary_op (onary->length);
3994 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3995 &info->nary_obstack);
3996 memcpy (nary, onary, size);
3997 vn_nary_op_insert_into (nary, info->nary, false);
4000 /* Insert the no longer used phi OPHI to the hash INFO. */
4002 static void
4003 copy_phi (vn_phi_t ophi, vn_tables_t info)
4005 vn_phi_t phi = info->phis_pool->allocate ();
4006 vn_phi_s **slot;
4007 memcpy (phi, ophi, sizeof (*phi));
4008 ophi->phiargs.create (0);
4009 slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
4010 gcc_assert (!*slot);
4011 *slot = phi;
4014 /* Insert the no longer used reference OREF to the hash INFO. */
4016 static void
4017 copy_reference (vn_reference_t oref, vn_tables_t info)
4019 vn_reference_t ref;
4020 vn_reference_s **slot;
4021 ref = info->references_pool->allocate ();
4022 memcpy (ref, oref, sizeof (*ref));
4023 oref->operands.create (0);
4024 slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
4025 if (*slot)
4026 free_reference (*slot);
4027 *slot = ref;
4030 /* Process a strongly connected component in the SSA graph. */
4032 static void
4033 process_scc (vec<tree> scc)
4035 tree var;
4036 unsigned int i;
4037 unsigned int iterations = 0;
4038 bool changed = true;
4039 vn_nary_op_iterator_type hin;
4040 vn_phi_iterator_type hip;
4041 vn_reference_iterator_type hir;
4042 vn_nary_op_t nary;
4043 vn_phi_t phi;
4044 vn_reference_t ref;
4046 /* If the SCC has a single member, just visit it. */
4047 if (scc.length () == 1)
4049 tree use = scc[0];
4050 if (VN_INFO (use)->use_processed)
4051 return;
4052 /* We need to make sure it doesn't form a cycle itself, which can
4053 happen for self-referential PHI nodes. In that case we would
4054 end up inserting an expression with VN_TOP operands into the
4055 valid table which makes us derive bogus equivalences later.
4056 The cheapest way to check this is to assume it for all PHI nodes. */
4057 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
4058 /* Fallthru to iteration. */ ;
4059 else
4061 visit_use (use);
4062 return;
4066 if (dump_file && (dump_flags & TDF_DETAILS))
4067 print_scc (dump_file, scc);
4069 /* Iterate over the SCC with the optimistic table until it stops
4070 changing. */
4071 current_info = optimistic_info;
4072 while (changed)
4074 changed = false;
4075 iterations++;
4076 if (dump_file && (dump_flags & TDF_DETAILS))
4077 fprintf (dump_file, "Starting iteration %d\n", iterations);
4078 /* As we are value-numbering optimistically we have to
4079 clear the expression tables and the simplified expressions
4080 in each iteration until we converge. */
4081 optimistic_info->nary->empty ();
4082 optimistic_info->phis->empty ();
4083 optimistic_info->references->empty ();
4084 obstack_free (&optimistic_info->nary_obstack, NULL);
4085 gcc_obstack_init (&optimistic_info->nary_obstack);
4086 optimistic_info->phis_pool->release ();
4087 optimistic_info->references_pool->release ();
4088 FOR_EACH_VEC_ELT (scc, i, var)
4089 gcc_assert (!VN_INFO (var)->needs_insertion
4090 && VN_INFO (var)->expr == NULL);
4091 FOR_EACH_VEC_ELT (scc, i, var)
4092 changed |= visit_use (var);
4095 if (dump_file && (dump_flags & TDF_DETAILS))
4096 fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
4097 statistics_histogram_event (cfun, "SCC iterations", iterations);
4099 /* Finally, copy the contents of the no longer used optimistic
4100 table to the valid table. */
4101 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
4102 copy_nary (nary, valid_info);
4103 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
4104 copy_phi (phi, valid_info);
4105 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
4106 ref, vn_reference_t, hir)
4107 copy_reference (ref, valid_info);
4109 current_info = valid_info;
4113 /* Pop the components of the found SCC for NAME off the SCC stack
4114 and process them. Returns true if all went well, false if
4115 we run into resource limits. */
4117 static bool
4118 extract_and_process_scc_for_name (tree name)
4120 auto_vec<tree> scc;
4121 tree x;
4123 /* Found an SCC, pop the components off the SCC stack and
4124 process them. */
4127 x = sccstack.pop ();
4129 VN_INFO (x)->on_sccstack = false;
4130 scc.safe_push (x);
4131 } while (x != name);
4133 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
4134 if (scc.length ()
4135 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
4137 if (dump_file)
4138 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
4139 "SCC size %u exceeding %u\n", scc.length (),
4140 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
4142 return false;
4145 if (scc.length () > 1)
4146 sort_scc (scc);
4148 process_scc (scc);
4150 return true;
4153 /* Depth first search on NAME to discover and process SCC's in the SSA
4154 graph.
4155 Execution of this algorithm relies on the fact that the SCC's are
4156 popped off the stack in topological order.
4157 Returns true if successful, false if we stopped processing SCC's due
4158 to resource constraints. */
4160 static bool
4161 DFS (tree name)
4163 auto_vec<ssa_op_iter> itervec;
4164 auto_vec<tree> namevec;
4165 use_operand_p usep = NULL;
4166 gimple *defstmt;
4167 tree use;
4168 ssa_op_iter iter;
4170 start_over:
4171 /* SCC info */
4172 VN_INFO (name)->dfsnum = next_dfs_num++;
4173 VN_INFO (name)->visited = true;
4174 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
4176 sccstack.safe_push (name);
4177 VN_INFO (name)->on_sccstack = true;
4178 defstmt = SSA_NAME_DEF_STMT (name);
4180 /* Recursively DFS on our operands, looking for SCC's. */
4181 if (!gimple_nop_p (defstmt))
4183 /* Push a new iterator. */
4184 if (gphi *phi = dyn_cast <gphi *> (defstmt))
4185 usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
4186 else
4187 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
4189 else
4190 clear_and_done_ssa_iter (&iter);
4192 while (1)
4194 /* If we are done processing uses of a name, go up the stack
4195 of iterators and process SCCs as we found them. */
4196 if (op_iter_done (&iter))
4198 /* See if we found an SCC. */
4199 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
4200 if (!extract_and_process_scc_for_name (name))
4201 return false;
4203 /* Check if we are done. */
4204 if (namevec.is_empty ())
4205 return true;
4207 /* Restore the last use walker and continue walking there. */
4208 use = name;
4209 name = namevec.pop ();
4210 memcpy (&iter, &itervec.last (),
4211 sizeof (ssa_op_iter));
4212 itervec.pop ();
4213 goto continue_walking;
4216 use = USE_FROM_PTR (usep);
4218 /* Since we handle phi nodes, we will sometimes get
4219 invariants in the use expression. */
4220 if (TREE_CODE (use) == SSA_NAME)
4222 if (! (VN_INFO (use)->visited))
4224 /* Recurse by pushing the current use walking state on
4225 the stack and starting over. */
4226 itervec.safe_push (iter);
4227 namevec.safe_push (name);
4228 name = use;
4229 goto start_over;
4231 continue_walking:
4232 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
4233 VN_INFO (use)->low);
4235 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
4236 && VN_INFO (use)->on_sccstack)
4238 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
4239 VN_INFO (name)->low);
4243 usep = op_iter_next_use (&iter);
4247 /* Allocate a value number table. */
4249 static void
4250 allocate_vn_table (vn_tables_t table)
4252 table->phis = new vn_phi_table_type (23);
4253 table->nary = new vn_nary_op_table_type (23);
4254 table->references = new vn_reference_table_type (23);
4256 gcc_obstack_init (&table->nary_obstack);
4257 table->phis_pool = new object_allocator<vn_phi_s> ("VN phis");
4258 table->references_pool = new object_allocator<vn_reference_s>
4259 ("VN references");
4262 /* Free a value number table. */
4264 static void
4265 free_vn_table (vn_tables_t table)
4267 delete table->phis;
4268 table->phis = NULL;
4269 delete table->nary;
4270 table->nary = NULL;
4271 delete table->references;
4272 table->references = NULL;
4273 obstack_free (&table->nary_obstack, NULL);
4274 delete table->phis_pool;
4275 delete table->references_pool;
4278 static void
4279 init_scc_vn (void)
4281 size_t i;
4282 int j;
4283 int *rpo_numbers_temp;
4285 calculate_dominance_info (CDI_DOMINATORS);
4286 mark_dfs_back_edges ();
4288 sccstack.create (0);
4289 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
4291 constant_value_ids = BITMAP_ALLOC (NULL);
4293 next_dfs_num = 1;
4294 next_value_id = 1;
4296 vn_ssa_aux_table.create (num_ssa_names + 1);
4297 /* VEC_alloc doesn't actually grow it to the right size, it just
4298 preallocates the space to do so. */
4299 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
4300 gcc_obstack_init (&vn_ssa_aux_obstack);
4302 shared_lookup_phiargs.create (0);
4303 shared_lookup_references.create (0);
4304 rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
4305 rpo_numbers_temp =
4306 XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
4307 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4309 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4310 the i'th block in RPO order is bb. We want to map bb's to RPO
4311 numbers, so we need to rearrange this array. */
4312 for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
4313 rpo_numbers[rpo_numbers_temp[j]] = j;
4315 XDELETE (rpo_numbers_temp);
4317 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
4319 renumber_gimple_stmt_uids ();
4321 /* Create the valid and optimistic value numbering tables. */
4322 valid_info = XCNEW (struct vn_tables_s);
4323 allocate_vn_table (valid_info);
4324 optimistic_info = XCNEW (struct vn_tables_s);
4325 allocate_vn_table (optimistic_info);
4326 current_info = valid_info;
4328 /* Create the VN_INFO structures, and initialize value numbers to
4329 TOP or VARYING for parameters. */
4330 for (i = 1; i < num_ssa_names; i++)
4332 tree name = ssa_name (i);
4333 if (!name)
4334 continue;
4336 VN_INFO_GET (name)->valnum = VN_TOP;
4337 VN_INFO (name)->needs_insertion = false;
4338 VN_INFO (name)->expr = NULL;
4339 VN_INFO (name)->value_id = 0;
4341 if (!SSA_NAME_IS_DEFAULT_DEF (name))
4342 continue;
4344 switch (TREE_CODE (SSA_NAME_VAR (name)))
4346 case VAR_DECL:
4347 /* Undefined vars keep TOP. */
4348 break;
4350 case PARM_DECL:
4351 /* Parameters are VARYING but we can record a condition
4352 if we know it is a non-NULL pointer. */
4353 VN_INFO (name)->visited = true;
4354 VN_INFO (name)->valnum = name;
4355 if (POINTER_TYPE_P (TREE_TYPE (name))
4356 && nonnull_arg_p (SSA_NAME_VAR (name)))
4358 tree ops[2];
4359 ops[0] = name;
4360 ops[1] = build_int_cst (TREE_TYPE (name), 0);
4361 vn_nary_op_insert_pieces (2, NE_EXPR, boolean_type_node, ops,
4362 boolean_true_node, 0);
4363 if (dump_file && (dump_flags & TDF_DETAILS))
4365 fprintf (dump_file, "Recording ");
4366 print_generic_expr (dump_file, name, TDF_SLIM);
4367 fprintf (dump_file, " != 0\n");
4370 break;
4372 case RESULT_DECL:
4373 /* If the result is passed by invisible reference the default
4374 def is initialized, otherwise it's uninitialized. */
4375 if (DECL_BY_REFERENCE (SSA_NAME_VAR (name)))
4377 VN_INFO (name)->visited = true;
4378 VN_INFO (name)->valnum = name;
4380 break;
4382 default:
4383 gcc_unreachable ();
4388 /* Restore SSA info that has been reset on value leaders. */
4390 void
4391 scc_vn_restore_ssa_info (void)
4393 for (unsigned i = 0; i < num_ssa_names; i++)
4395 tree name = ssa_name (i);
4396 if (name
4397 && has_VN_INFO (name))
4399 if (VN_INFO (name)->needs_insertion)
4401 else if (POINTER_TYPE_P (TREE_TYPE (name))
4402 && VN_INFO (name)->info.ptr_info)
4403 SSA_NAME_PTR_INFO (name) = VN_INFO (name)->info.ptr_info;
4404 else if (INTEGRAL_TYPE_P (TREE_TYPE (name))
4405 && VN_INFO (name)->info.range_info)
4407 SSA_NAME_RANGE_INFO (name) = VN_INFO (name)->info.range_info;
4408 SSA_NAME_ANTI_RANGE_P (name)
4409 = VN_INFO (name)->range_info_anti_range_p;
4415 void
4416 free_scc_vn (void)
4418 size_t i;
4420 delete constant_to_value_id;
4421 constant_to_value_id = NULL;
4422 BITMAP_FREE (constant_value_ids);
4423 shared_lookup_phiargs.release ();
4424 shared_lookup_references.release ();
4425 XDELETEVEC (rpo_numbers);
4427 for (i = 0; i < num_ssa_names; i++)
4429 tree name = ssa_name (i);
4430 if (name
4431 && has_VN_INFO (name)
4432 && VN_INFO (name)->needs_insertion)
4433 release_ssa_name (name);
4435 obstack_free (&vn_ssa_aux_obstack, NULL);
4436 vn_ssa_aux_table.release ();
4438 sccstack.release ();
4439 free_vn_table (valid_info);
4440 XDELETE (valid_info);
4441 free_vn_table (optimistic_info);
4442 XDELETE (optimistic_info);
4444 BITMAP_FREE (const_parms);
4447 /* Set *ID according to RESULT. */
4449 static void
4450 set_value_id_for_result (tree result, unsigned int *id)
4452 if (result && TREE_CODE (result) == SSA_NAME)
4453 *id = VN_INFO (result)->value_id;
4454 else if (result && is_gimple_min_invariant (result))
4455 *id = get_or_alloc_constant_value_id (result);
4456 else
4457 *id = get_next_value_id ();
4460 /* Set the value ids in the valid hash tables. */
4462 static void
4463 set_hashtable_value_ids (void)
4465 vn_nary_op_iterator_type hin;
4466 vn_phi_iterator_type hip;
4467 vn_reference_iterator_type hir;
4468 vn_nary_op_t vno;
4469 vn_reference_t vr;
4470 vn_phi_t vp;
4472 /* Now set the value ids of the things we had put in the hash
4473 table. */
4475 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4476 set_value_id_for_result (vno->result, &vno->value_id);
4478 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4479 set_value_id_for_result (vp->result, &vp->value_id);
4481 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4482 hir)
4483 set_value_id_for_result (vr->result, &vr->value_id);
4486 class sccvn_dom_walker : public dom_walker
4488 public:
4489 sccvn_dom_walker ()
4490 : dom_walker (CDI_DOMINATORS, true), fail (false), cond_stack (0) {}
4492 virtual edge before_dom_children (basic_block);
4493 virtual void after_dom_children (basic_block);
4495 void record_cond (basic_block,
4496 enum tree_code code, tree lhs, tree rhs, bool value);
4497 void record_conds (basic_block,
4498 enum tree_code code, tree lhs, tree rhs, bool value);
4500 bool fail;
4501 auto_vec<std::pair <basic_block, std::pair <vn_nary_op_t, vn_nary_op_t> > >
4502 cond_stack;
4505 /* Record a temporary condition for the BB and its dominated blocks. */
4507 void
4508 sccvn_dom_walker::record_cond (basic_block bb,
4509 enum tree_code code, tree lhs, tree rhs,
4510 bool value)
4512 tree ops[2] = { lhs, rhs };
4513 vn_nary_op_t old = NULL;
4514 if (vn_nary_op_lookup_pieces (2, code, boolean_type_node, ops, &old))
4515 current_info->nary->remove_elt_with_hash (old, old->hashcode);
4516 vn_nary_op_t cond
4517 = vn_nary_op_insert_pieces (2, code, boolean_type_node, ops,
4518 value
4519 ? boolean_true_node
4520 : boolean_false_node, 0);
4521 if (dump_file && (dump_flags & TDF_DETAILS))
4523 fprintf (dump_file, "Recording temporarily ");
4524 print_generic_expr (dump_file, ops[0], TDF_SLIM);
4525 fprintf (dump_file, " %s ", get_tree_code_name (code));
4526 print_generic_expr (dump_file, ops[1], TDF_SLIM);
4527 fprintf (dump_file, " == %s%s\n",
4528 value ? "true" : "false",
4529 old ? " (old entry saved)" : "");
4531 cond_stack.safe_push (std::make_pair (bb, std::make_pair (cond, old)));
4534 /* Record temporary conditions for the BB and its dominated blocks
4535 according to LHS CODE RHS == VALUE and its dominated conditions. */
4537 void
4538 sccvn_dom_walker::record_conds (basic_block bb,
4539 enum tree_code code, tree lhs, tree rhs,
4540 bool value)
4542 /* Record the original condition. */
4543 record_cond (bb, code, lhs, rhs, value);
4545 if (!value)
4546 return;
4548 /* Record dominated conditions if the condition is true. Note that
4549 the inversion is already recorded. */
4550 switch (code)
4552 case LT_EXPR:
4553 case GT_EXPR:
4554 record_cond (bb, code == LT_EXPR ? LE_EXPR : GE_EXPR, lhs, rhs, true);
4555 record_cond (bb, NE_EXPR, lhs, rhs, true);
4556 record_cond (bb, EQ_EXPR, lhs, rhs, false);
4557 break;
4559 case EQ_EXPR:
4560 record_cond (bb, LE_EXPR, lhs, rhs, true);
4561 record_cond (bb, GE_EXPR, lhs, rhs, true);
4562 record_cond (bb, LT_EXPR, lhs, rhs, false);
4563 record_cond (bb, GT_EXPR, lhs, rhs, false);
4564 break;
4566 default:
4567 break;
4571 /* Restore expressions and values derived from conditionals. */
4573 void
4574 sccvn_dom_walker::after_dom_children (basic_block bb)
4576 while (!cond_stack.is_empty ()
4577 && cond_stack.last ().first == bb)
4579 vn_nary_op_t cond = cond_stack.last ().second.first;
4580 vn_nary_op_t old = cond_stack.last ().second.second;
4581 current_info->nary->remove_elt_with_hash (cond, cond->hashcode);
4582 if (old)
4583 vn_nary_op_insert_into (old, current_info->nary, false);
4584 cond_stack.pop ();
4588 /* Value number all statements in BB. */
4590 edge
4591 sccvn_dom_walker::before_dom_children (basic_block bb)
4593 edge e;
4594 edge_iterator ei;
4596 if (fail)
4597 return NULL;
4599 if (dump_file && (dump_flags & TDF_DETAILS))
4600 fprintf (dump_file, "Visiting BB %d\n", bb->index);
4602 /* If we have a single predecessor record the equivalence from a
4603 possible condition on the predecessor edge. */
4604 edge pred_e = NULL;
4605 FOR_EACH_EDGE (e, ei, bb->preds)
4607 /* Ignore simple backedges from this to allow recording conditions
4608 in loop headers. */
4609 if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
4610 continue;
4611 if (! pred_e)
4612 pred_e = e;
4613 else
4615 pred_e = NULL;
4616 break;
4619 if (pred_e)
4621 /* Check if there are multiple executable successor edges in
4622 the source block. Otherwise there is no additional info
4623 to be recorded. */
4624 edge e2;
4625 FOR_EACH_EDGE (e2, ei, pred_e->src->succs)
4626 if (e2 != pred_e
4627 && e2->flags & EDGE_EXECUTABLE)
4628 break;
4629 if (e2 && (e2->flags & EDGE_EXECUTABLE))
4631 gimple *stmt = last_stmt (pred_e->src);
4632 if (stmt
4633 && gimple_code (stmt) == GIMPLE_COND)
4635 enum tree_code code = gimple_cond_code (stmt);
4636 tree lhs = gimple_cond_lhs (stmt);
4637 tree rhs = gimple_cond_rhs (stmt);
4638 record_conds (bb, code, lhs, rhs,
4639 (pred_e->flags & EDGE_TRUE_VALUE) != 0);
4640 code = invert_tree_comparison (code, HONOR_NANS (lhs));
4641 if (code != ERROR_MARK)
4642 record_conds (bb, code, lhs, rhs,
4643 (pred_e->flags & EDGE_TRUE_VALUE) == 0);
4648 /* Value-number all defs in the basic-block. */
4649 for (gphi_iterator gsi = gsi_start_phis (bb);
4650 !gsi_end_p (gsi); gsi_next (&gsi))
4652 gphi *phi = gsi.phi ();
4653 tree res = PHI_RESULT (phi);
4654 if (!VN_INFO (res)->visited
4655 && !DFS (res))
4657 fail = true;
4658 return NULL;
4661 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
4662 !gsi_end_p (gsi); gsi_next (&gsi))
4664 ssa_op_iter i;
4665 tree op;
4666 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
4667 if (!VN_INFO (op)->visited
4668 && !DFS (op))
4670 fail = true;
4671 return NULL;
4675 /* Finally look at the last stmt. */
4676 gimple *stmt = last_stmt (bb);
4677 if (!stmt)
4678 return NULL;
4680 enum gimple_code code = gimple_code (stmt);
4681 if (code != GIMPLE_COND
4682 && code != GIMPLE_SWITCH
4683 && code != GIMPLE_GOTO)
4684 return NULL;
4686 if (dump_file && (dump_flags & TDF_DETAILS))
4688 fprintf (dump_file, "Visiting control stmt ending BB %d: ", bb->index);
4689 print_gimple_stmt (dump_file, stmt, 0, 0);
4692 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
4693 if value-numbering can prove they are not reachable. Handling
4694 computed gotos is also possible. */
4695 tree val;
4696 switch (code)
4698 case GIMPLE_COND:
4700 tree lhs = vn_valueize (gimple_cond_lhs (stmt));
4701 tree rhs = vn_valueize (gimple_cond_rhs (stmt));
4702 val = gimple_simplify (gimple_cond_code (stmt),
4703 boolean_type_node, lhs, rhs,
4704 NULL, vn_valueize);
4705 /* If that didn't simplify to a constant see if we have recorded
4706 temporary expressions from taken edges. */
4707 if (!val || TREE_CODE (val) != INTEGER_CST)
4709 tree ops[2];
4710 ops[0] = lhs;
4711 ops[1] = rhs;
4712 val = vn_nary_op_lookup_pieces (2, gimple_cond_code (stmt),
4713 boolean_type_node, ops, NULL);
4715 break;
4717 case GIMPLE_SWITCH:
4718 val = gimple_switch_index (as_a <gswitch *> (stmt));
4719 break;
4720 case GIMPLE_GOTO:
4721 val = gimple_goto_dest (stmt);
4722 break;
4723 default:
4724 gcc_unreachable ();
4726 if (!val)
4727 return NULL;
4729 edge taken = find_taken_edge (bb, vn_valueize (val));
4730 if (!taken)
4731 return NULL;
4733 if (dump_file && (dump_flags & TDF_DETAILS))
4734 fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
4735 "not executable\n", bb->index, bb->index, taken->dest->index);
4737 return taken;
4740 /* Do SCCVN. Returns true if it finished, false if we bailed out
4741 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
4742 how we use the alias oracle walking during the VN process. */
4744 bool
4745 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
4747 size_t i;
4749 default_vn_walk_kind = default_vn_walk_kind_;
4751 init_scc_vn ();
4753 /* Collect pointers we know point to readonly memory. */
4754 const_parms = BITMAP_ALLOC (NULL);
4755 tree fnspec = lookup_attribute ("fn spec",
4756 TYPE_ATTRIBUTES (TREE_TYPE (cfun->decl)));
4757 if (fnspec)
4759 fnspec = TREE_VALUE (TREE_VALUE (fnspec));
4760 i = 1;
4761 for (tree arg = DECL_ARGUMENTS (cfun->decl);
4762 arg; arg = DECL_CHAIN (arg), ++i)
4764 if (i >= (unsigned) TREE_STRING_LENGTH (fnspec))
4765 break;
4766 if (TREE_STRING_POINTER (fnspec)[i] == 'R'
4767 || TREE_STRING_POINTER (fnspec)[i] == 'r')
4769 tree name = ssa_default_def (cfun, arg);
4770 if (name)
4771 bitmap_set_bit (const_parms, SSA_NAME_VERSION (name));
4776 /* Walk all blocks in dominator order, value-numbering stmts
4777 SSA defs and decide whether outgoing edges are not executable. */
4778 sccvn_dom_walker walker;
4779 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
4780 if (walker.fail)
4782 free_scc_vn ();
4783 return false;
4786 /* Initialize the value ids and prune out remaining VN_TOPs
4787 from dead code. */
4788 for (i = 1; i < num_ssa_names; ++i)
4790 tree name = ssa_name (i);
4791 vn_ssa_aux_t info;
4792 if (!name)
4793 continue;
4794 info = VN_INFO (name);
4795 if (!info->visited)
4796 info->valnum = name;
4797 if (info->valnum == name
4798 || info->valnum == VN_TOP)
4799 info->value_id = get_next_value_id ();
4800 else if (is_gimple_min_invariant (info->valnum))
4801 info->value_id = get_or_alloc_constant_value_id (info->valnum);
4804 /* Propagate. */
4805 for (i = 1; i < num_ssa_names; ++i)
4807 tree name = ssa_name (i);
4808 vn_ssa_aux_t info;
4809 if (!name)
4810 continue;
4811 info = VN_INFO (name);
4812 if (TREE_CODE (info->valnum) == SSA_NAME
4813 && info->valnum != name
4814 && info->value_id != VN_INFO (info->valnum)->value_id)
4815 info->value_id = VN_INFO (info->valnum)->value_id;
4818 set_hashtable_value_ids ();
4820 if (dump_file && (dump_flags & TDF_DETAILS))
4822 fprintf (dump_file, "Value numbers:\n");
4823 for (i = 0; i < num_ssa_names; i++)
4825 tree name = ssa_name (i);
4826 if (name
4827 && VN_INFO (name)->visited
4828 && SSA_VAL (name) != name)
4830 print_generic_expr (dump_file, name, 0);
4831 fprintf (dump_file, " = ");
4832 print_generic_expr (dump_file, SSA_VAL (name), 0);
4833 fprintf (dump_file, "\n");
4838 return true;
4841 /* Return the maximum value id we have ever seen. */
4843 unsigned int
4844 get_max_value_id (void)
4846 return next_value_id;
4849 /* Return the next unique value id. */
4851 unsigned int
4852 get_next_value_id (void)
4854 return next_value_id++;
4858 /* Compare two expressions E1 and E2 and return true if they are equal. */
4860 bool
4861 expressions_equal_p (tree e1, tree e2)
4863 /* The obvious case. */
4864 if (e1 == e2)
4865 return true;
4867 /* If either one is VN_TOP consider them equal. */
4868 if (e1 == VN_TOP || e2 == VN_TOP)
4869 return true;
4871 /* If only one of them is null, they cannot be equal. */
4872 if (!e1 || !e2)
4873 return false;
4875 /* Now perform the actual comparison. */
4876 if (TREE_CODE (e1) == TREE_CODE (e2)
4877 && operand_equal_p (e1, e2, OEP_PURE_SAME))
4878 return true;
4880 return false;
4884 /* Return true if the nary operation NARY may trap. This is a copy
4885 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4887 bool
4888 vn_nary_may_trap (vn_nary_op_t nary)
4890 tree type;
4891 tree rhs2 = NULL_TREE;
4892 bool honor_nans = false;
4893 bool honor_snans = false;
4894 bool fp_operation = false;
4895 bool honor_trapv = false;
4896 bool handled, ret;
4897 unsigned i;
4899 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4900 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4901 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4903 type = nary->type;
4904 fp_operation = FLOAT_TYPE_P (type);
4905 if (fp_operation)
4907 honor_nans = flag_trapping_math && !flag_finite_math_only;
4908 honor_snans = flag_signaling_nans != 0;
4910 else if (INTEGRAL_TYPE_P (type)
4911 && TYPE_OVERFLOW_TRAPS (type))
4912 honor_trapv = true;
4914 if (nary->length >= 2)
4915 rhs2 = nary->op[1];
4916 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4917 honor_trapv,
4918 honor_nans, honor_snans, rhs2,
4919 &handled);
4920 if (handled
4921 && ret)
4922 return true;
4924 for (i = 0; i < nary->length; ++i)
4925 if (tree_could_trap_p (nary->op[i]))
4926 return true;
4928 return false;