Remove assert in get_def_bb_for_const
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blob730db04ff8bfa47f3b07aba9eac591fce8af37cf
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2016 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "insn-config.h"
32 #include "emit-rtl.h"
33 #include "cgraph.h"
34 #include "gimple-pretty-print.h"
35 #include "alias.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "cfganal.h"
39 #include "tree-inline.h"
40 #include "internal-fn.h"
41 #include "gimple-fold.h"
42 #include "tree-eh.h"
43 #include "gimplify.h"
44 #include "flags.h"
45 #include "dojump.h"
46 #include "explow.h"
47 #include "calls.h"
48 #include "varasm.h"
49 #include "stmt.h"
50 #include "expr.h"
51 #include "tree-dfa.h"
52 #include "tree-ssa.h"
53 #include "dumpfile.h"
54 #include "cfgloop.h"
55 #include "params.h"
56 #include "tree-ssa-propagate.h"
57 #include "tree-ssa-sccvn.h"
58 #include "tree-cfg.h"
59 #include "domwalk.h"
60 #include "gimple-iterator.h"
61 #include "gimple-match.h"
63 /* This algorithm is based on the SCC algorithm presented by Keith
64 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
65 (http://citeseer.ist.psu.edu/41805.html). In
66 straight line code, it is equivalent to a regular hash based value
67 numbering that is performed in reverse postorder.
69 For code with cycles, there are two alternatives, both of which
70 require keeping the hashtables separate from the actual list of
71 value numbers for SSA names.
73 1. Iterate value numbering in an RPO walk of the blocks, removing
74 all the entries from the hashtable after each iteration (but
75 keeping the SSA name->value number mapping between iterations).
76 Iterate until it does not change.
78 2. Perform value numbering as part of an SCC walk on the SSA graph,
79 iterating only the cycles in the SSA graph until they do not change
80 (using a separate, optimistic hashtable for value numbering the SCC
81 operands).
83 The second is not just faster in practice (because most SSA graph
84 cycles do not involve all the variables in the graph), it also has
85 some nice properties.
87 One of these nice properties is that when we pop an SCC off the
88 stack, we are guaranteed to have processed all the operands coming from
89 *outside of that SCC*, so we do not need to do anything special to
90 ensure they have value numbers.
92 Another nice property is that the SCC walk is done as part of a DFS
93 of the SSA graph, which makes it easy to perform combining and
94 simplifying operations at the same time.
96 The code below is deliberately written in a way that makes it easy
97 to separate the SCC walk from the other work it does.
99 In order to propagate constants through the code, we track which
100 expressions contain constants, and use those while folding. In
101 theory, we could also track expressions whose value numbers are
102 replaced, in case we end up folding based on expression
103 identities.
105 In order to value number memory, we assign value numbers to vuses.
106 This enables us to note that, for example, stores to the same
107 address of the same value from the same starting memory states are
108 equivalent.
109 TODO:
111 1. We can iterate only the changing portions of the SCC's, but
112 I have not seen an SCC big enough for this to be a win.
113 2. If you differentiate between phi nodes for loops and phi nodes
114 for if-then-else, you can properly consider phi nodes in different
115 blocks for equivalence.
116 3. We could value number vuses in more cases, particularly, whole
117 structure copies.
121 static tree *last_vuse_ptr;
122 static vn_lookup_kind vn_walk_kind;
123 static vn_lookup_kind default_vn_walk_kind;
124 bitmap const_parms;
126 /* vn_nary_op hashtable helpers. */
128 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
130 typedef vn_nary_op_s *compare_type;
131 static inline hashval_t hash (const vn_nary_op_s *);
132 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
135 /* Return the computed hashcode for nary operation P1. */
137 inline hashval_t
138 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
140 return vno1->hashcode;
143 /* Compare nary operations P1 and P2 and return true if they are
144 equivalent. */
146 inline bool
147 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
149 return vn_nary_op_eq (vno1, vno2);
152 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
153 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
156 /* vn_phi hashtable helpers. */
158 static int
159 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
161 struct vn_phi_hasher : pointer_hash <vn_phi_s>
163 static inline hashval_t hash (const vn_phi_s *);
164 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
165 static inline void remove (vn_phi_s *);
168 /* Return the computed hashcode for phi operation P1. */
170 inline hashval_t
171 vn_phi_hasher::hash (const vn_phi_s *vp1)
173 return vp1->hashcode;
176 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
178 inline bool
179 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
181 return vn_phi_eq (vp1, vp2);
184 /* Free a phi operation structure VP. */
186 inline void
187 vn_phi_hasher::remove (vn_phi_s *phi)
189 phi->phiargs.release ();
192 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
193 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
196 /* Compare two reference operands P1 and P2 for equality. Return true if
197 they are equal, and false otherwise. */
199 static int
200 vn_reference_op_eq (const void *p1, const void *p2)
202 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
203 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
205 return (vro1->opcode == vro2->opcode
206 /* We do not care for differences in type qualification. */
207 && (vro1->type == vro2->type
208 || (vro1->type && vro2->type
209 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
210 TYPE_MAIN_VARIANT (vro2->type))))
211 && expressions_equal_p (vro1->op0, vro2->op0)
212 && expressions_equal_p (vro1->op1, vro2->op1)
213 && expressions_equal_p (vro1->op2, vro2->op2));
216 /* Free a reference operation structure VP. */
218 static inline void
219 free_reference (vn_reference_s *vr)
221 vr->operands.release ();
225 /* vn_reference hashtable helpers. */
227 struct vn_reference_hasher : pointer_hash <vn_reference_s>
229 static inline hashval_t hash (const vn_reference_s *);
230 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
231 static inline void remove (vn_reference_s *);
234 /* Return the hashcode for a given reference operation P1. */
236 inline hashval_t
237 vn_reference_hasher::hash (const vn_reference_s *vr1)
239 return vr1->hashcode;
242 inline bool
243 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
245 return vn_reference_eq (v, c);
248 inline void
249 vn_reference_hasher::remove (vn_reference_s *v)
251 free_reference (v);
254 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
255 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
258 /* The set of hashtables and alloc_pool's for their items. */
260 typedef struct vn_tables_s
262 vn_nary_op_table_type *nary;
263 vn_phi_table_type *phis;
264 vn_reference_table_type *references;
265 struct obstack nary_obstack;
266 object_allocator<vn_phi_s> *phis_pool;
267 object_allocator<vn_reference_s> *references_pool;
268 } *vn_tables_t;
271 /* vn_constant hashtable helpers. */
273 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
275 static inline hashval_t hash (const vn_constant_s *);
276 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
279 /* Hash table hash function for vn_constant_t. */
281 inline hashval_t
282 vn_constant_hasher::hash (const vn_constant_s *vc1)
284 return vc1->hashcode;
287 /* Hash table equality function for vn_constant_t. */
289 inline bool
290 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
292 if (vc1->hashcode != vc2->hashcode)
293 return false;
295 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
298 static hash_table<vn_constant_hasher> *constant_to_value_id;
299 static bitmap constant_value_ids;
302 /* Valid hashtables storing information we have proven to be
303 correct. */
305 static vn_tables_t valid_info;
307 /* Optimistic hashtables storing information we are making assumptions about
308 during iterations. */
310 static vn_tables_t optimistic_info;
312 /* Pointer to the set of hashtables that is currently being used.
313 Should always point to either the optimistic_info, or the
314 valid_info. */
316 static vn_tables_t current_info;
319 /* Reverse post order index for each basic block. */
321 static int *rpo_numbers;
323 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
325 /* Return the SSA value of the VUSE x, supporting released VDEFs
326 during elimination which will value-number the VDEF to the
327 associated VUSE (but not substitute in the whole lattice). */
329 static inline tree
330 vuse_ssa_val (tree x)
332 if (!x)
333 return NULL_TREE;
337 x = SSA_VAL (x);
339 while (SSA_NAME_IN_FREE_LIST (x));
341 return x;
344 /* This represents the top of the VN lattice, which is the universal
345 value. */
347 tree VN_TOP;
349 /* Unique counter for our value ids. */
351 static unsigned int next_value_id;
353 /* Next DFS number and the stack for strongly connected component
354 detection. */
356 static unsigned int next_dfs_num;
357 static vec<tree> sccstack;
361 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
362 are allocated on an obstack for locality reasons, and to free them
363 without looping over the vec. */
365 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
366 static struct obstack vn_ssa_aux_obstack;
368 /* Return whether there is value numbering information for a given SSA name. */
370 bool
371 has_VN_INFO (tree name)
373 if (SSA_NAME_VERSION (name) < vn_ssa_aux_table.length ())
374 return vn_ssa_aux_table[SSA_NAME_VERSION (name)] != NULL;
375 return false;
378 /* Return the value numbering information for a given SSA name. */
380 vn_ssa_aux_t
381 VN_INFO (tree name)
383 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
384 gcc_checking_assert (res);
385 return res;
388 /* Set the value numbering info for a given SSA name to a given
389 value. */
391 static inline void
392 VN_INFO_SET (tree name, vn_ssa_aux_t value)
394 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
397 /* Initialize the value numbering info for a given SSA name.
398 This should be called just once for every SSA name. */
400 vn_ssa_aux_t
401 VN_INFO_GET (tree name)
403 vn_ssa_aux_t newinfo;
405 gcc_assert (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ()
406 || vn_ssa_aux_table[SSA_NAME_VERSION (name)] == NULL);
407 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
408 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
409 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
410 vn_ssa_aux_table.safe_grow_cleared (SSA_NAME_VERSION (name) + 1);
411 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
412 return newinfo;
416 /* Return the vn_kind the expression computed by the stmt should be
417 associated with. */
419 enum vn_kind
420 vn_get_stmt_kind (gimple *stmt)
422 switch (gimple_code (stmt))
424 case GIMPLE_CALL:
425 return VN_REFERENCE;
426 case GIMPLE_PHI:
427 return VN_PHI;
428 case GIMPLE_ASSIGN:
430 enum tree_code code = gimple_assign_rhs_code (stmt);
431 tree rhs1 = gimple_assign_rhs1 (stmt);
432 switch (get_gimple_rhs_class (code))
434 case GIMPLE_UNARY_RHS:
435 case GIMPLE_BINARY_RHS:
436 case GIMPLE_TERNARY_RHS:
437 return VN_NARY;
438 case GIMPLE_SINGLE_RHS:
439 switch (TREE_CODE_CLASS (code))
441 case tcc_reference:
442 /* VOP-less references can go through unary case. */
443 if ((code == REALPART_EXPR
444 || code == IMAGPART_EXPR
445 || code == VIEW_CONVERT_EXPR
446 || code == BIT_FIELD_REF)
447 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
448 return VN_NARY;
450 /* Fallthrough. */
451 case tcc_declaration:
452 return VN_REFERENCE;
454 case tcc_constant:
455 return VN_CONSTANT;
457 default:
458 if (code == ADDR_EXPR)
459 return (is_gimple_min_invariant (rhs1)
460 ? VN_CONSTANT : VN_REFERENCE);
461 else if (code == CONSTRUCTOR)
462 return VN_NARY;
463 return VN_NONE;
465 default:
466 return VN_NONE;
469 default:
470 return VN_NONE;
474 /* Lookup a value id for CONSTANT and return it. If it does not
475 exist returns 0. */
477 unsigned int
478 get_constant_value_id (tree constant)
480 vn_constant_s **slot;
481 struct vn_constant_s vc;
483 vc.hashcode = vn_hash_constant_with_type (constant);
484 vc.constant = constant;
485 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
486 if (slot)
487 return (*slot)->value_id;
488 return 0;
491 /* Lookup a value id for CONSTANT, and if it does not exist, create a
492 new one and return it. If it does exist, return it. */
494 unsigned int
495 get_or_alloc_constant_value_id (tree constant)
497 vn_constant_s **slot;
498 struct vn_constant_s vc;
499 vn_constant_t vcp;
501 vc.hashcode = vn_hash_constant_with_type (constant);
502 vc.constant = constant;
503 slot = constant_to_value_id->find_slot (&vc, INSERT);
504 if (*slot)
505 return (*slot)->value_id;
507 vcp = XNEW (struct vn_constant_s);
508 vcp->hashcode = vc.hashcode;
509 vcp->constant = constant;
510 vcp->value_id = get_next_value_id ();
511 *slot = vcp;
512 bitmap_set_bit (constant_value_ids, vcp->value_id);
513 return vcp->value_id;
516 /* Return true if V is a value id for a constant. */
518 bool
519 value_id_constant_p (unsigned int v)
521 return bitmap_bit_p (constant_value_ids, v);
524 /* Compute the hash for a reference operand VRO1. */
526 static void
527 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
529 hstate.add_int (vro1->opcode);
530 if (vro1->op0)
531 inchash::add_expr (vro1->op0, hstate);
532 if (vro1->op1)
533 inchash::add_expr (vro1->op1, hstate);
534 if (vro1->op2)
535 inchash::add_expr (vro1->op2, hstate);
538 /* Compute a hash for the reference operation VR1 and return it. */
540 static hashval_t
541 vn_reference_compute_hash (const vn_reference_t vr1)
543 inchash::hash hstate;
544 hashval_t result;
545 int i;
546 vn_reference_op_t vro;
547 HOST_WIDE_INT off = -1;
548 bool deref = false;
550 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
552 if (vro->opcode == MEM_REF)
553 deref = true;
554 else if (vro->opcode != ADDR_EXPR)
555 deref = false;
556 if (vro->off != -1)
558 if (off == -1)
559 off = 0;
560 off += vro->off;
562 else
564 if (off != -1
565 && off != 0)
566 hstate.add_int (off);
567 off = -1;
568 if (deref
569 && vro->opcode == ADDR_EXPR)
571 if (vro->op0)
573 tree op = TREE_OPERAND (vro->op0, 0);
574 hstate.add_int (TREE_CODE (op));
575 inchash::add_expr (op, hstate);
578 else
579 vn_reference_op_compute_hash (vro, hstate);
582 result = hstate.end ();
583 /* ??? We would ICE later if we hash instead of adding that in. */
584 if (vr1->vuse)
585 result += SSA_NAME_VERSION (vr1->vuse);
587 return result;
590 /* Return true if reference operations VR1 and VR2 are equivalent. This
591 means they have the same set of operands and vuses. */
593 bool
594 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
596 unsigned i, j;
598 /* Early out if this is not a hash collision. */
599 if (vr1->hashcode != vr2->hashcode)
600 return false;
602 /* The VOP needs to be the same. */
603 if (vr1->vuse != vr2->vuse)
604 return false;
606 /* If the operands are the same we are done. */
607 if (vr1->operands == vr2->operands)
608 return true;
610 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
611 return false;
613 if (INTEGRAL_TYPE_P (vr1->type)
614 && INTEGRAL_TYPE_P (vr2->type))
616 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
617 return false;
619 else if (INTEGRAL_TYPE_P (vr1->type)
620 && (TYPE_PRECISION (vr1->type)
621 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
622 return false;
623 else if (INTEGRAL_TYPE_P (vr2->type)
624 && (TYPE_PRECISION (vr2->type)
625 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
626 return false;
628 i = 0;
629 j = 0;
632 HOST_WIDE_INT off1 = 0, off2 = 0;
633 vn_reference_op_t vro1, vro2;
634 vn_reference_op_s tem1, tem2;
635 bool deref1 = false, deref2 = false;
636 for (; vr1->operands.iterate (i, &vro1); i++)
638 if (vro1->opcode == MEM_REF)
639 deref1 = true;
640 /* Do not look through a storage order barrier. */
641 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
642 return false;
643 if (vro1->off == -1)
644 break;
645 off1 += vro1->off;
647 for (; vr2->operands.iterate (j, &vro2); j++)
649 if (vro2->opcode == MEM_REF)
650 deref2 = true;
651 /* Do not look through a storage order barrier. */
652 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
653 return false;
654 if (vro2->off == -1)
655 break;
656 off2 += vro2->off;
658 if (off1 != off2)
659 return false;
660 if (deref1 && vro1->opcode == ADDR_EXPR)
662 memset (&tem1, 0, sizeof (tem1));
663 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
664 tem1.type = TREE_TYPE (tem1.op0);
665 tem1.opcode = TREE_CODE (tem1.op0);
666 vro1 = &tem1;
667 deref1 = false;
669 if (deref2 && vro2->opcode == ADDR_EXPR)
671 memset (&tem2, 0, sizeof (tem2));
672 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
673 tem2.type = TREE_TYPE (tem2.op0);
674 tem2.opcode = TREE_CODE (tem2.op0);
675 vro2 = &tem2;
676 deref2 = false;
678 if (deref1 != deref2)
679 return false;
680 if (!vn_reference_op_eq (vro1, vro2))
681 return false;
682 ++j;
683 ++i;
685 while (vr1->operands.length () != i
686 || vr2->operands.length () != j);
688 return true;
691 /* Copy the operations present in load/store REF into RESULT, a vector of
692 vn_reference_op_s's. */
694 static void
695 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
697 if (TREE_CODE (ref) == TARGET_MEM_REF)
699 vn_reference_op_s temp;
701 result->reserve (3);
703 memset (&temp, 0, sizeof (temp));
704 temp.type = TREE_TYPE (ref);
705 temp.opcode = TREE_CODE (ref);
706 temp.op0 = TMR_INDEX (ref);
707 temp.op1 = TMR_STEP (ref);
708 temp.op2 = TMR_OFFSET (ref);
709 temp.off = -1;
710 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
711 temp.base = MR_DEPENDENCE_BASE (ref);
712 result->quick_push (temp);
714 memset (&temp, 0, sizeof (temp));
715 temp.type = NULL_TREE;
716 temp.opcode = ERROR_MARK;
717 temp.op0 = TMR_INDEX2 (ref);
718 temp.off = -1;
719 result->quick_push (temp);
721 memset (&temp, 0, sizeof (temp));
722 temp.type = NULL_TREE;
723 temp.opcode = TREE_CODE (TMR_BASE (ref));
724 temp.op0 = TMR_BASE (ref);
725 temp.off = -1;
726 result->quick_push (temp);
727 return;
730 /* For non-calls, store the information that makes up the address. */
731 tree orig = ref;
732 while (ref)
734 vn_reference_op_s temp;
736 memset (&temp, 0, sizeof (temp));
737 temp.type = TREE_TYPE (ref);
738 temp.opcode = TREE_CODE (ref);
739 temp.off = -1;
741 switch (temp.opcode)
743 case MODIFY_EXPR:
744 temp.op0 = TREE_OPERAND (ref, 1);
745 break;
746 case WITH_SIZE_EXPR:
747 temp.op0 = TREE_OPERAND (ref, 1);
748 temp.off = 0;
749 break;
750 case MEM_REF:
751 /* The base address gets its own vn_reference_op_s structure. */
752 temp.op0 = TREE_OPERAND (ref, 1);
754 offset_int off = mem_ref_offset (ref);
755 if (wi::fits_shwi_p (off))
756 temp.off = off.to_shwi ();
758 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
759 temp.base = MR_DEPENDENCE_BASE (ref);
760 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
761 break;
762 case BIT_FIELD_REF:
763 /* Record bits, position and storage order. */
764 temp.op0 = TREE_OPERAND (ref, 1);
765 temp.op1 = TREE_OPERAND (ref, 2);
766 if (tree_fits_shwi_p (TREE_OPERAND (ref, 2)))
768 HOST_WIDE_INT off = tree_to_shwi (TREE_OPERAND (ref, 2));
769 if (off % BITS_PER_UNIT == 0)
770 temp.off = off / BITS_PER_UNIT;
772 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
773 break;
774 case COMPONENT_REF:
775 /* The field decl is enough to unambiguously specify the field,
776 a matching type is not necessary and a mismatching type
777 is always a spurious difference. */
778 temp.type = NULL_TREE;
779 temp.op0 = TREE_OPERAND (ref, 1);
780 temp.op1 = TREE_OPERAND (ref, 2);
782 tree this_offset = component_ref_field_offset (ref);
783 if (this_offset
784 && TREE_CODE (this_offset) == INTEGER_CST)
786 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
787 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
789 offset_int off
790 = (wi::to_offset (this_offset)
791 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
792 if (wi::fits_shwi_p (off)
793 /* Probibit value-numbering zero offset components
794 of addresses the same before the pass folding
795 __builtin_object_size had a chance to run
796 (checking cfun->after_inlining does the
797 trick here). */
798 && (TREE_CODE (orig) != ADDR_EXPR
799 || off != 0
800 || cfun->after_inlining))
801 temp.off = off.to_shwi ();
805 break;
806 case ARRAY_RANGE_REF:
807 case ARRAY_REF:
808 /* Record index as operand. */
809 temp.op0 = TREE_OPERAND (ref, 1);
810 /* Always record lower bounds and element size. */
811 temp.op1 = array_ref_low_bound (ref);
812 temp.op2 = array_ref_element_size (ref);
813 if (TREE_CODE (temp.op0) == INTEGER_CST
814 && TREE_CODE (temp.op1) == INTEGER_CST
815 && TREE_CODE (temp.op2) == INTEGER_CST)
817 offset_int off = ((wi::to_offset (temp.op0)
818 - wi::to_offset (temp.op1))
819 * wi::to_offset (temp.op2));
820 if (wi::fits_shwi_p (off))
821 temp.off = off.to_shwi();
823 break;
824 case VAR_DECL:
825 if (DECL_HARD_REGISTER (ref))
827 temp.op0 = ref;
828 break;
830 /* Fallthru. */
831 case PARM_DECL:
832 case CONST_DECL:
833 case RESULT_DECL:
834 /* Canonicalize decls to MEM[&decl] which is what we end up with
835 when valueizing MEM[ptr] with ptr = &decl. */
836 temp.opcode = MEM_REF;
837 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
838 temp.off = 0;
839 result->safe_push (temp);
840 temp.opcode = ADDR_EXPR;
841 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
842 temp.type = TREE_TYPE (temp.op0);
843 temp.off = -1;
844 break;
845 case STRING_CST:
846 case INTEGER_CST:
847 case COMPLEX_CST:
848 case VECTOR_CST:
849 case REAL_CST:
850 case FIXED_CST:
851 case CONSTRUCTOR:
852 case SSA_NAME:
853 temp.op0 = ref;
854 break;
855 case ADDR_EXPR:
856 if (is_gimple_min_invariant (ref))
858 temp.op0 = ref;
859 break;
861 break;
862 /* These are only interesting for their operands, their
863 existence, and their type. They will never be the last
864 ref in the chain of references (IE they require an
865 operand), so we don't have to put anything
866 for op* as it will be handled by the iteration */
867 case REALPART_EXPR:
868 temp.off = 0;
869 break;
870 case VIEW_CONVERT_EXPR:
871 temp.off = 0;
872 temp.reverse = storage_order_barrier_p (ref);
873 break;
874 case IMAGPART_EXPR:
875 /* This is only interesting for its constant offset. */
876 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
877 break;
878 default:
879 gcc_unreachable ();
881 result->safe_push (temp);
883 if (REFERENCE_CLASS_P (ref)
884 || TREE_CODE (ref) == MODIFY_EXPR
885 || TREE_CODE (ref) == WITH_SIZE_EXPR
886 || (TREE_CODE (ref) == ADDR_EXPR
887 && !is_gimple_min_invariant (ref)))
888 ref = TREE_OPERAND (ref, 0);
889 else
890 ref = NULL_TREE;
894 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
895 operands in *OPS, the reference alias set SET and the reference type TYPE.
896 Return true if something useful was produced. */
898 bool
899 ao_ref_init_from_vn_reference (ao_ref *ref,
900 alias_set_type set, tree type,
901 vec<vn_reference_op_s> ops)
903 vn_reference_op_t op;
904 unsigned i;
905 tree base = NULL_TREE;
906 tree *op0_p = &base;
907 offset_int offset = 0;
908 offset_int max_size;
909 offset_int size = -1;
910 tree size_tree = NULL_TREE;
911 alias_set_type base_alias_set = -1;
913 /* First get the final access size from just the outermost expression. */
914 op = &ops[0];
915 if (op->opcode == COMPONENT_REF)
916 size_tree = DECL_SIZE (op->op0);
917 else if (op->opcode == BIT_FIELD_REF)
918 size_tree = op->op0;
919 else
921 machine_mode mode = TYPE_MODE (type);
922 if (mode == BLKmode)
923 size_tree = TYPE_SIZE (type);
924 else
925 size = int (GET_MODE_BITSIZE (mode));
927 if (size_tree != NULL_TREE
928 && TREE_CODE (size_tree) == INTEGER_CST)
929 size = wi::to_offset (size_tree);
931 /* Initially, maxsize is the same as the accessed element size.
932 In the following it will only grow (or become -1). */
933 max_size = size;
935 /* Compute cumulative bit-offset for nested component-refs and array-refs,
936 and find the ultimate containing object. */
937 FOR_EACH_VEC_ELT (ops, i, op)
939 switch (op->opcode)
941 /* These may be in the reference ops, but we cannot do anything
942 sensible with them here. */
943 case ADDR_EXPR:
944 /* Apart from ADDR_EXPR arguments to MEM_REF. */
945 if (base != NULL_TREE
946 && TREE_CODE (base) == MEM_REF
947 && op->op0
948 && DECL_P (TREE_OPERAND (op->op0, 0)))
950 vn_reference_op_t pop = &ops[i-1];
951 base = TREE_OPERAND (op->op0, 0);
952 if (pop->off == -1)
954 max_size = -1;
955 offset = 0;
957 else
958 offset += pop->off * BITS_PER_UNIT;
959 op0_p = NULL;
960 break;
962 /* Fallthru. */
963 case CALL_EXPR:
964 return false;
966 /* Record the base objects. */
967 case MEM_REF:
968 base_alias_set = get_deref_alias_set (op->op0);
969 *op0_p = build2 (MEM_REF, op->type,
970 NULL_TREE, op->op0);
971 MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
972 MR_DEPENDENCE_BASE (*op0_p) = op->base;
973 op0_p = &TREE_OPERAND (*op0_p, 0);
974 break;
976 case VAR_DECL:
977 case PARM_DECL:
978 case RESULT_DECL:
979 case SSA_NAME:
980 *op0_p = op->op0;
981 op0_p = NULL;
982 break;
984 /* And now the usual component-reference style ops. */
985 case BIT_FIELD_REF:
986 offset += wi::to_offset (op->op1);
987 break;
989 case COMPONENT_REF:
991 tree field = op->op0;
992 /* We do not have a complete COMPONENT_REF tree here so we
993 cannot use component_ref_field_offset. Do the interesting
994 parts manually. */
995 tree this_offset = DECL_FIELD_OFFSET (field);
997 if (op->op1 || TREE_CODE (this_offset) != INTEGER_CST)
998 max_size = -1;
999 else
1001 offset_int woffset = (wi::to_offset (this_offset)
1002 << LOG2_BITS_PER_UNIT);
1003 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1004 offset += woffset;
1006 break;
1009 case ARRAY_RANGE_REF:
1010 case ARRAY_REF:
1011 /* We recorded the lower bound and the element size. */
1012 if (TREE_CODE (op->op0) != INTEGER_CST
1013 || TREE_CODE (op->op1) != INTEGER_CST
1014 || TREE_CODE (op->op2) != INTEGER_CST)
1015 max_size = -1;
1016 else
1018 offset_int woffset
1019 = wi::sext (wi::to_offset (op->op0) - wi::to_offset (op->op1),
1020 TYPE_PRECISION (TREE_TYPE (op->op0)));
1021 woffset *= wi::to_offset (op->op2);
1022 woffset <<= LOG2_BITS_PER_UNIT;
1023 offset += woffset;
1025 break;
1027 case REALPART_EXPR:
1028 break;
1030 case IMAGPART_EXPR:
1031 offset += size;
1032 break;
1034 case VIEW_CONVERT_EXPR:
1035 break;
1037 case STRING_CST:
1038 case INTEGER_CST:
1039 case COMPLEX_CST:
1040 case VECTOR_CST:
1041 case REAL_CST:
1042 case CONSTRUCTOR:
1043 case CONST_DECL:
1044 return false;
1046 default:
1047 return false;
1051 if (base == NULL_TREE)
1052 return false;
1054 ref->ref = NULL_TREE;
1055 ref->base = base;
1056 ref->ref_alias_set = set;
1057 if (base_alias_set != -1)
1058 ref->base_alias_set = base_alias_set;
1059 else
1060 ref->base_alias_set = get_alias_set (base);
1061 /* We discount volatiles from value-numbering elsewhere. */
1062 ref->volatile_p = false;
1064 if (!wi::fits_shwi_p (size) || wi::neg_p (size))
1066 ref->offset = 0;
1067 ref->size = -1;
1068 ref->max_size = -1;
1069 return true;
1072 ref->size = size.to_shwi ();
1074 if (!wi::fits_shwi_p (offset))
1076 ref->offset = 0;
1077 ref->max_size = -1;
1078 return true;
1081 ref->offset = offset.to_shwi ();
1083 if (!wi::fits_shwi_p (max_size) || wi::neg_p (max_size))
1084 ref->max_size = -1;
1085 else
1086 ref->max_size = max_size.to_shwi ();
1088 return true;
1091 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1092 vn_reference_op_s's. */
1094 static void
1095 copy_reference_ops_from_call (gcall *call,
1096 vec<vn_reference_op_s> *result)
1098 vn_reference_op_s temp;
1099 unsigned i;
1100 tree lhs = gimple_call_lhs (call);
1101 int lr;
1103 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1104 different. By adding the lhs here in the vector, we ensure that the
1105 hashcode is different, guaranteeing a different value number. */
1106 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1108 memset (&temp, 0, sizeof (temp));
1109 temp.opcode = MODIFY_EXPR;
1110 temp.type = TREE_TYPE (lhs);
1111 temp.op0 = lhs;
1112 temp.off = -1;
1113 result->safe_push (temp);
1116 /* Copy the type, opcode, function, static chain and EH region, if any. */
1117 memset (&temp, 0, sizeof (temp));
1118 temp.type = gimple_call_return_type (call);
1119 temp.opcode = CALL_EXPR;
1120 temp.op0 = gimple_call_fn (call);
1121 temp.op1 = gimple_call_chain (call);
1122 if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1123 temp.op2 = size_int (lr);
1124 temp.off = -1;
1125 if (gimple_call_with_bounds_p (call))
1126 temp.with_bounds = 1;
1127 result->safe_push (temp);
1129 /* Copy the call arguments. As they can be references as well,
1130 just chain them together. */
1131 for (i = 0; i < gimple_call_num_args (call); ++i)
1133 tree callarg = gimple_call_arg (call, i);
1134 copy_reference_ops_from_ref (callarg, result);
1138 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1139 *I_P to point to the last element of the replacement. */
1140 static bool
1141 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1142 unsigned int *i_p)
1144 unsigned int i = *i_p;
1145 vn_reference_op_t op = &(*ops)[i];
1146 vn_reference_op_t mem_op = &(*ops)[i - 1];
1147 tree addr_base;
1148 HOST_WIDE_INT addr_offset = 0;
1150 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1151 from .foo.bar to the preceding MEM_REF offset and replace the
1152 address with &OBJ. */
1153 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1154 &addr_offset);
1155 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1156 if (addr_base != TREE_OPERAND (op->op0, 0))
1158 offset_int off = offset_int::from (mem_op->op0, SIGNED);
1159 off += addr_offset;
1160 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1161 op->op0 = build_fold_addr_expr (addr_base);
1162 if (tree_fits_shwi_p (mem_op->op0))
1163 mem_op->off = tree_to_shwi (mem_op->op0);
1164 else
1165 mem_op->off = -1;
1166 return true;
1168 return false;
1171 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1172 *I_P to point to the last element of the replacement. */
1173 static bool
1174 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1175 unsigned int *i_p)
1177 unsigned int i = *i_p;
1178 vn_reference_op_t op = &(*ops)[i];
1179 vn_reference_op_t mem_op = &(*ops)[i - 1];
1180 gimple *def_stmt;
1181 enum tree_code code;
1182 offset_int off;
1184 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1185 if (!is_gimple_assign (def_stmt))
1186 return false;
1188 code = gimple_assign_rhs_code (def_stmt);
1189 if (code != ADDR_EXPR
1190 && code != POINTER_PLUS_EXPR)
1191 return false;
1193 off = offset_int::from (mem_op->op0, SIGNED);
1195 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1196 from .foo.bar to the preceding MEM_REF offset and replace the
1197 address with &OBJ. */
1198 if (code == ADDR_EXPR)
1200 tree addr, addr_base;
1201 HOST_WIDE_INT addr_offset;
1203 addr = gimple_assign_rhs1 (def_stmt);
1204 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1205 &addr_offset);
1206 /* If that didn't work because the address isn't invariant propagate
1207 the reference tree from the address operation in case the current
1208 dereference isn't offsetted. */
1209 if (!addr_base
1210 && *i_p == ops->length () - 1
1211 && off == 0
1212 /* This makes us disable this transform for PRE where the
1213 reference ops might be also used for code insertion which
1214 is invalid. */
1215 && default_vn_walk_kind == VN_WALKREWRITE)
1217 auto_vec<vn_reference_op_s, 32> tem;
1218 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1219 ops->pop ();
1220 ops->pop ();
1221 ops->safe_splice (tem);
1222 --*i_p;
1223 return true;
1225 if (!addr_base
1226 || TREE_CODE (addr_base) != MEM_REF)
1227 return false;
1229 off += addr_offset;
1230 off += mem_ref_offset (addr_base);
1231 op->op0 = TREE_OPERAND (addr_base, 0);
1233 else
1235 tree ptr, ptroff;
1236 ptr = gimple_assign_rhs1 (def_stmt);
1237 ptroff = gimple_assign_rhs2 (def_stmt);
1238 if (TREE_CODE (ptr) != SSA_NAME
1239 || TREE_CODE (ptroff) != INTEGER_CST)
1240 return false;
1242 off += wi::to_offset (ptroff);
1243 op->op0 = ptr;
1246 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1247 if (tree_fits_shwi_p (mem_op->op0))
1248 mem_op->off = tree_to_shwi (mem_op->op0);
1249 else
1250 mem_op->off = -1;
1251 if (TREE_CODE (op->op0) == SSA_NAME)
1252 op->op0 = SSA_VAL (op->op0);
1253 if (TREE_CODE (op->op0) != SSA_NAME)
1254 op->opcode = TREE_CODE (op->op0);
1256 /* And recurse. */
1257 if (TREE_CODE (op->op0) == SSA_NAME)
1258 vn_reference_maybe_forwprop_address (ops, i_p);
1259 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1260 vn_reference_fold_indirect (ops, i_p);
1261 return true;
1264 /* Optimize the reference REF to a constant if possible or return
1265 NULL_TREE if not. */
1267 tree
1268 fully_constant_vn_reference_p (vn_reference_t ref)
1270 vec<vn_reference_op_s> operands = ref->operands;
1271 vn_reference_op_t op;
1273 /* Try to simplify the translated expression if it is
1274 a call to a builtin function with at most two arguments. */
1275 op = &operands[0];
1276 if (op->opcode == CALL_EXPR
1277 && TREE_CODE (op->op0) == ADDR_EXPR
1278 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1279 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1280 && operands.length () >= 2
1281 && operands.length () <= 3)
1283 vn_reference_op_t arg0, arg1 = NULL;
1284 bool anyconst = false;
1285 arg0 = &operands[1];
1286 if (operands.length () > 2)
1287 arg1 = &operands[2];
1288 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1289 || (arg0->opcode == ADDR_EXPR
1290 && is_gimple_min_invariant (arg0->op0)))
1291 anyconst = true;
1292 if (arg1
1293 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1294 || (arg1->opcode == ADDR_EXPR
1295 && is_gimple_min_invariant (arg1->op0))))
1296 anyconst = true;
1297 if (anyconst)
1299 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1300 arg1 ? 2 : 1,
1301 arg0->op0,
1302 arg1 ? arg1->op0 : NULL);
1303 if (folded
1304 && TREE_CODE (folded) == NOP_EXPR)
1305 folded = TREE_OPERAND (folded, 0);
1306 if (folded
1307 && is_gimple_min_invariant (folded))
1308 return folded;
1312 /* Simplify reads from constants or constant initializers. */
1313 else if (BITS_PER_UNIT == 8
1314 && is_gimple_reg_type (ref->type)
1315 && (!INTEGRAL_TYPE_P (ref->type)
1316 || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
1318 HOST_WIDE_INT off = 0;
1319 HOST_WIDE_INT size;
1320 if (INTEGRAL_TYPE_P (ref->type))
1321 size = TYPE_PRECISION (ref->type);
1322 else
1323 size = tree_to_shwi (TYPE_SIZE (ref->type));
1324 if (size % BITS_PER_UNIT != 0
1325 || size > MAX_BITSIZE_MODE_ANY_MODE)
1326 return NULL_TREE;
1327 size /= BITS_PER_UNIT;
1328 unsigned i;
1329 for (i = 0; i < operands.length (); ++i)
1331 if (operands[i].off == -1)
1332 return NULL_TREE;
1333 off += operands[i].off;
1334 if (operands[i].opcode == MEM_REF)
1336 ++i;
1337 break;
1340 vn_reference_op_t base = &operands[--i];
1341 tree ctor = error_mark_node;
1342 tree decl = NULL_TREE;
1343 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1344 ctor = base->op0;
1345 else if (base->opcode == MEM_REF
1346 && base[1].opcode == ADDR_EXPR
1347 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1348 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL))
1350 decl = TREE_OPERAND (base[1].op0, 0);
1351 ctor = ctor_for_folding (decl);
1353 if (ctor == NULL_TREE)
1354 return build_zero_cst (ref->type);
1355 else if (ctor != error_mark_node)
1357 if (decl)
1359 tree res = fold_ctor_reference (ref->type, ctor,
1360 off * BITS_PER_UNIT,
1361 size * BITS_PER_UNIT, decl);
1362 if (res)
1364 STRIP_USELESS_TYPE_CONVERSION (res);
1365 if (is_gimple_min_invariant (res))
1366 return res;
1369 else
1371 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1372 int len = native_encode_expr (ctor, buf, size, off);
1373 if (len > 0)
1374 return native_interpret_expr (ref->type, buf, len);
1379 return NULL_TREE;
1382 /* Return true if OPS contain a storage order barrier. */
1384 static bool
1385 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1387 vn_reference_op_t op;
1388 unsigned i;
1390 FOR_EACH_VEC_ELT (ops, i, op)
1391 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1392 return true;
1394 return false;
1397 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1398 structures into their value numbers. This is done in-place, and
1399 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1400 whether any operands were valueized. */
1402 static vec<vn_reference_op_s>
1403 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1405 vn_reference_op_t vro;
1406 unsigned int i;
1408 *valueized_anything = false;
1410 FOR_EACH_VEC_ELT (orig, i, vro)
1412 if (vro->opcode == SSA_NAME
1413 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1415 tree tem = SSA_VAL (vro->op0);
1416 if (tem != vro->op0)
1418 *valueized_anything = true;
1419 vro->op0 = tem;
1421 /* If it transforms from an SSA_NAME to a constant, update
1422 the opcode. */
1423 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1424 vro->opcode = TREE_CODE (vro->op0);
1426 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1428 tree tem = SSA_VAL (vro->op1);
1429 if (tem != vro->op1)
1431 *valueized_anything = true;
1432 vro->op1 = tem;
1435 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1437 tree tem = SSA_VAL (vro->op2);
1438 if (tem != vro->op2)
1440 *valueized_anything = true;
1441 vro->op2 = tem;
1444 /* If it transforms from an SSA_NAME to an address, fold with
1445 a preceding indirect reference. */
1446 if (i > 0
1447 && vro->op0
1448 && TREE_CODE (vro->op0) == ADDR_EXPR
1449 && orig[i - 1].opcode == MEM_REF)
1451 if (vn_reference_fold_indirect (&orig, &i))
1452 *valueized_anything = true;
1454 else if (i > 0
1455 && vro->opcode == SSA_NAME
1456 && orig[i - 1].opcode == MEM_REF)
1458 if (vn_reference_maybe_forwprop_address (&orig, &i))
1459 *valueized_anything = true;
1461 /* If it transforms a non-constant ARRAY_REF into a constant
1462 one, adjust the constant offset. */
1463 else if (vro->opcode == ARRAY_REF
1464 && vro->off == -1
1465 && TREE_CODE (vro->op0) == INTEGER_CST
1466 && TREE_CODE (vro->op1) == INTEGER_CST
1467 && TREE_CODE (vro->op2) == INTEGER_CST)
1469 offset_int off = ((wi::to_offset (vro->op0)
1470 - wi::to_offset (vro->op1))
1471 * wi::to_offset (vro->op2));
1472 if (wi::fits_shwi_p (off))
1473 vro->off = off.to_shwi ();
1477 return orig;
1480 static vec<vn_reference_op_s>
1481 valueize_refs (vec<vn_reference_op_s> orig)
1483 bool tem;
1484 return valueize_refs_1 (orig, &tem);
1487 static vec<vn_reference_op_s> shared_lookup_references;
1489 /* Create a vector of vn_reference_op_s structures from REF, a
1490 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1491 this function. *VALUEIZED_ANYTHING will specify whether any
1492 operands were valueized. */
1494 static vec<vn_reference_op_s>
1495 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1497 if (!ref)
1498 return vNULL;
1499 shared_lookup_references.truncate (0);
1500 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1501 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1502 valueized_anything);
1503 return shared_lookup_references;
1506 /* Create a vector of vn_reference_op_s structures from CALL, a
1507 call statement. The vector is shared among all callers of
1508 this function. */
1510 static vec<vn_reference_op_s>
1511 valueize_shared_reference_ops_from_call (gcall *call)
1513 if (!call)
1514 return vNULL;
1515 shared_lookup_references.truncate (0);
1516 copy_reference_ops_from_call (call, &shared_lookup_references);
1517 shared_lookup_references = valueize_refs (shared_lookup_references);
1518 return shared_lookup_references;
1521 /* Lookup a SCCVN reference operation VR in the current hash table.
1522 Returns the resulting value number if it exists in the hash table,
1523 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1524 vn_reference_t stored in the hashtable if something is found. */
1526 static tree
1527 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1529 vn_reference_s **slot;
1530 hashval_t hash;
1532 hash = vr->hashcode;
1533 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1534 if (!slot && current_info == optimistic_info)
1535 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1536 if (slot)
1538 if (vnresult)
1539 *vnresult = (vn_reference_t)*slot;
1540 return ((vn_reference_t)*slot)->result;
1543 return NULL_TREE;
1546 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1547 with the current VUSE and performs the expression lookup. */
1549 static void *
1550 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1551 unsigned int cnt, void *vr_)
1553 vn_reference_t vr = (vn_reference_t)vr_;
1554 vn_reference_s **slot;
1555 hashval_t hash;
1557 /* This bounds the stmt walks we perform on reference lookups
1558 to O(1) instead of O(N) where N is the number of dominating
1559 stores. */
1560 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1561 return (void *)-1;
1563 if (last_vuse_ptr)
1564 *last_vuse_ptr = vuse;
1566 /* Fixup vuse and hash. */
1567 if (vr->vuse)
1568 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1569 vr->vuse = vuse_ssa_val (vuse);
1570 if (vr->vuse)
1571 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1573 hash = vr->hashcode;
1574 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1575 if (!slot && current_info == optimistic_info)
1576 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1577 if (slot)
1578 return *slot;
1580 return NULL;
1583 /* Lookup an existing or insert a new vn_reference entry into the
1584 value table for the VUSE, SET, TYPE, OPERANDS reference which
1585 has the value VALUE which is either a constant or an SSA name. */
1587 static vn_reference_t
1588 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1589 alias_set_type set,
1590 tree type,
1591 vec<vn_reference_op_s,
1592 va_heap> operands,
1593 tree value)
1595 vn_reference_s vr1;
1596 vn_reference_t result;
1597 unsigned value_id;
1598 vr1.vuse = vuse;
1599 vr1.operands = operands;
1600 vr1.type = type;
1601 vr1.set = set;
1602 vr1.hashcode = vn_reference_compute_hash (&vr1);
1603 if (vn_reference_lookup_1 (&vr1, &result))
1604 return result;
1605 if (TREE_CODE (value) == SSA_NAME)
1606 value_id = VN_INFO (value)->value_id;
1607 else
1608 value_id = get_or_alloc_constant_value_id (value);
1609 return vn_reference_insert_pieces (vuse, set, type,
1610 operands.copy (), value, value_id);
1613 static vn_nary_op_t vn_nary_op_insert_stmt (gimple *stmt, tree result);
1615 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
1617 static tree
1618 vn_lookup_simplify_result (code_helper rcode, tree type, tree *ops)
1620 if (!rcode.is_tree_code ())
1621 return NULL_TREE;
1622 vn_nary_op_t vnresult = NULL;
1623 return vn_nary_op_lookup_pieces (TREE_CODE_LENGTH ((tree_code) rcode),
1624 (tree_code) rcode, type, ops, &vnresult);
1627 /* Return a value-number for RCODE OPS... either by looking up an existing
1628 value-number for the simplified result or by inserting the operation. */
1630 static tree
1631 vn_nary_build_or_lookup (code_helper rcode, tree type, tree *ops)
1633 tree result = NULL_TREE;
1634 /* We will be creating a value number for
1635 RCODE (OPS...).
1636 So first simplify and lookup this expression to see if it
1637 is already available. */
1638 mprts_hook = vn_lookup_simplify_result;
1639 bool res = false;
1640 switch (TREE_CODE_LENGTH ((tree_code) rcode))
1642 case 1:
1643 res = gimple_resimplify1 (NULL, &rcode, type, ops, vn_valueize);
1644 break;
1645 case 2:
1646 res = gimple_resimplify2 (NULL, &rcode, type, ops, vn_valueize);
1647 break;
1648 case 3:
1649 res = gimple_resimplify3 (NULL, &rcode, type, ops, vn_valueize);
1650 break;
1652 mprts_hook = NULL;
1653 gimple *new_stmt = NULL;
1654 if (res
1655 && gimple_simplified_result_is_gimple_val (rcode, ops))
1656 /* The expression is already available. */
1657 result = ops[0];
1658 else
1660 tree val = vn_lookup_simplify_result (rcode, type, ops);
1661 if (!val)
1663 gimple_seq stmts = NULL;
1664 result = maybe_push_res_to_seq (rcode, type, ops, &stmts);
1665 if (result)
1667 gcc_assert (gimple_seq_singleton_p (stmts));
1668 new_stmt = gimple_seq_first_stmt (stmts);
1671 else
1672 /* The expression is already available. */
1673 result = val;
1675 if (new_stmt)
1677 /* The expression is not yet available, value-number lhs to
1678 the new SSA_NAME we created. */
1679 /* Initialize value-number information properly. */
1680 VN_INFO_GET (result)->valnum = result;
1681 VN_INFO (result)->value_id = get_next_value_id ();
1682 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
1683 new_stmt);
1684 VN_INFO (result)->needs_insertion = true;
1685 /* ??? PRE phi-translation inserts NARYs without corresponding
1686 SSA name result. Re-use those but set their result according
1687 to the stmt we just built. */
1688 vn_nary_op_t nary = NULL;
1689 vn_nary_op_lookup_stmt (new_stmt, &nary);
1690 if (nary)
1692 gcc_assert (nary->result == NULL_TREE);
1693 nary->result = gimple_assign_lhs (new_stmt);
1695 /* As all "inserted" statements are singleton SCCs, insert
1696 to the valid table. This is strictly needed to
1697 avoid re-generating new value SSA_NAMEs for the same
1698 expression during SCC iteration over and over (the
1699 optimistic table gets cleared after each iteration).
1700 We do not need to insert into the optimistic table, as
1701 lookups there will fall back to the valid table. */
1702 else if (current_info == optimistic_info)
1704 current_info = valid_info;
1705 vn_nary_op_insert_stmt (new_stmt, result);
1706 current_info = optimistic_info;
1708 else
1709 vn_nary_op_insert_stmt (new_stmt, result);
1710 if (dump_file && (dump_flags & TDF_DETAILS))
1712 fprintf (dump_file, "Inserting name ");
1713 print_generic_expr (dump_file, result, 0);
1714 fprintf (dump_file, " for expression ");
1715 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
1716 fprintf (dump_file, "\n");
1719 return result;
1722 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1723 from the statement defining VUSE and if not successful tries to
1724 translate *REFP and VR_ through an aggregate copy at the definition
1725 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
1726 of *REF and *VR. If only disambiguation was performed then
1727 *DISAMBIGUATE_ONLY is set to true. */
1729 static void *
1730 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1731 bool *disambiguate_only)
1733 vn_reference_t vr = (vn_reference_t)vr_;
1734 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
1735 tree base = ao_ref_base (ref);
1736 HOST_WIDE_INT offset, maxsize;
1737 static vec<vn_reference_op_s>
1738 lhs_ops = vNULL;
1739 ao_ref lhs_ref;
1740 bool lhs_ref_ok = false;
1742 /* If the reference is based on a parameter that was determined as
1743 pointing to readonly memory it doesn't change. */
1744 if (TREE_CODE (base) == MEM_REF
1745 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
1746 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
1747 && bitmap_bit_p (const_parms,
1748 SSA_NAME_VERSION (TREE_OPERAND (base, 0))))
1750 *disambiguate_only = true;
1751 return NULL;
1754 /* First try to disambiguate after value-replacing in the definitions LHS. */
1755 if (is_gimple_assign (def_stmt))
1757 tree lhs = gimple_assign_lhs (def_stmt);
1758 bool valueized_anything = false;
1759 /* Avoid re-allocation overhead. */
1760 lhs_ops.truncate (0);
1761 copy_reference_ops_from_ref (lhs, &lhs_ops);
1762 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1763 if (valueized_anything)
1765 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1766 get_alias_set (lhs),
1767 TREE_TYPE (lhs), lhs_ops);
1768 if (lhs_ref_ok
1769 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1771 *disambiguate_only = true;
1772 return NULL;
1775 else
1777 ao_ref_init (&lhs_ref, lhs);
1778 lhs_ref_ok = true;
1781 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1782 && gimple_call_num_args (def_stmt) <= 4)
1784 /* For builtin calls valueize its arguments and call the
1785 alias oracle again. Valueization may improve points-to
1786 info of pointers and constify size and position arguments.
1787 Originally this was motivated by PR61034 which has
1788 conditional calls to free falsely clobbering ref because
1789 of imprecise points-to info of the argument. */
1790 tree oldargs[4];
1791 bool valueized_anything = false;
1792 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1794 oldargs[i] = gimple_call_arg (def_stmt, i);
1795 if (TREE_CODE (oldargs[i]) == SSA_NAME
1796 && VN_INFO (oldargs[i])->valnum != oldargs[i])
1798 gimple_call_set_arg (def_stmt, i, VN_INFO (oldargs[i])->valnum);
1799 valueized_anything = true;
1802 if (valueized_anything)
1804 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1805 ref);
1806 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1807 gimple_call_set_arg (def_stmt, i, oldargs[i]);
1808 if (!res)
1810 *disambiguate_only = true;
1811 return NULL;
1816 if (*disambiguate_only)
1817 return (void *)-1;
1819 offset = ref->offset;
1820 maxsize = ref->max_size;
1822 /* If we cannot constrain the size of the reference we cannot
1823 test if anything kills it. */
1824 if (maxsize == -1)
1825 return (void *)-1;
1827 /* We can't deduce anything useful from clobbers. */
1828 if (gimple_clobber_p (def_stmt))
1829 return (void *)-1;
1831 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1832 from that definition.
1833 1) Memset. */
1834 if (is_gimple_reg_type (vr->type)
1835 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1836 && integer_zerop (gimple_call_arg (def_stmt, 1))
1837 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2))
1838 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1840 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1841 tree base2;
1842 HOST_WIDE_INT offset2, size2, maxsize2;
1843 bool reverse;
1844 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
1845 &reverse);
1846 size2 = tree_to_uhwi (gimple_call_arg (def_stmt, 2)) * 8;
1847 if ((unsigned HOST_WIDE_INT)size2 / 8
1848 == tree_to_uhwi (gimple_call_arg (def_stmt, 2))
1849 && maxsize2 != -1
1850 && operand_equal_p (base, base2, 0)
1851 && offset2 <= offset
1852 && offset2 + size2 >= offset + maxsize)
1854 tree val = build_zero_cst (vr->type);
1855 return vn_reference_lookup_or_insert_for_pieces
1856 (vuse, vr->set, vr->type, vr->operands, val);
1860 /* 2) Assignment from an empty CONSTRUCTOR. */
1861 else if (is_gimple_reg_type (vr->type)
1862 && gimple_assign_single_p (def_stmt)
1863 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1864 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1866 tree base2;
1867 HOST_WIDE_INT offset2, size2, maxsize2;
1868 bool reverse;
1869 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1870 &offset2, &size2, &maxsize2, &reverse);
1871 if (maxsize2 != -1
1872 && operand_equal_p (base, base2, 0)
1873 && offset2 <= offset
1874 && offset2 + size2 >= offset + maxsize)
1876 tree val = build_zero_cst (vr->type);
1877 return vn_reference_lookup_or_insert_for_pieces
1878 (vuse, vr->set, vr->type, vr->operands, val);
1882 /* 3) Assignment from a constant. We can use folds native encode/interpret
1883 routines to extract the assigned bits. */
1884 else if (ref->size == maxsize
1885 && is_gimple_reg_type (vr->type)
1886 && !contains_storage_order_barrier_p (vr->operands)
1887 && gimple_assign_single_p (def_stmt)
1888 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1889 && maxsize % BITS_PER_UNIT == 0
1890 && offset % BITS_PER_UNIT == 0
1891 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
1892 || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
1893 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
1895 tree base2;
1896 HOST_WIDE_INT offset2, size2, maxsize2;
1897 bool reverse;
1898 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1899 &offset2, &size2, &maxsize2, &reverse);
1900 if (!reverse
1901 && maxsize2 != -1
1902 && maxsize2 == size2
1903 && size2 % BITS_PER_UNIT == 0
1904 && offset2 % BITS_PER_UNIT == 0
1905 && operand_equal_p (base, base2, 0)
1906 && offset2 <= offset
1907 && offset2 + size2 >= offset + maxsize)
1909 /* We support up to 512-bit values (for V8DFmode). */
1910 unsigned char buffer[64];
1911 int len;
1913 tree rhs = gimple_assign_rhs1 (def_stmt);
1914 if (TREE_CODE (rhs) == SSA_NAME)
1915 rhs = SSA_VAL (rhs);
1916 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1917 buffer, sizeof (buffer));
1918 if (len > 0)
1920 tree type = vr->type;
1921 /* Make sure to interpret in a type that has a range
1922 covering the whole access size. */
1923 if (INTEGRAL_TYPE_P (vr->type)
1924 && ref->size != TYPE_PRECISION (vr->type))
1925 type = build_nonstandard_integer_type (ref->size,
1926 TYPE_UNSIGNED (type));
1927 tree val = native_interpret_expr (type,
1928 buffer
1929 + ((offset - offset2)
1930 / BITS_PER_UNIT),
1931 ref->size / BITS_PER_UNIT);
1932 /* If we chop off bits because the types precision doesn't
1933 match the memory access size this is ok when optimizing
1934 reads but not when called from the DSE code during
1935 elimination. */
1936 if (val
1937 && type != vr->type)
1939 if (! int_fits_type_p (val, vr->type))
1940 val = NULL_TREE;
1941 else
1942 val = fold_convert (vr->type, val);
1945 if (val)
1946 return vn_reference_lookup_or_insert_for_pieces
1947 (vuse, vr->set, vr->type, vr->operands, val);
1952 /* 4) Assignment from an SSA name which definition we may be able
1953 to access pieces from. */
1954 else if (ref->size == maxsize
1955 && is_gimple_reg_type (vr->type)
1956 && !contains_storage_order_barrier_p (vr->operands)
1957 && gimple_assign_single_p (def_stmt)
1958 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
1960 tree base2;
1961 HOST_WIDE_INT offset2, size2, maxsize2;
1962 bool reverse;
1963 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1964 &offset2, &size2, &maxsize2,
1965 &reverse);
1966 if (!reverse
1967 && maxsize2 != -1
1968 && maxsize2 == size2
1969 && operand_equal_p (base, base2, 0)
1970 && offset2 <= offset
1971 && offset2 + size2 >= offset + maxsize
1972 /* ??? We can't handle bitfield precision extracts without
1973 either using an alternate type for the BIT_FIELD_REF and
1974 then doing a conversion or possibly adjusting the offset
1975 according to endianess. */
1976 && (! INTEGRAL_TYPE_P (vr->type)
1977 || ref->size == TYPE_PRECISION (vr->type))
1978 && ref->size % BITS_PER_UNIT == 0)
1980 code_helper rcode = BIT_FIELD_REF;
1981 tree ops[3];
1982 ops[0] = SSA_VAL (gimple_assign_rhs1 (def_stmt));
1983 ops[1] = bitsize_int (ref->size);
1984 ops[2] = bitsize_int (offset - offset2);
1985 tree val = vn_nary_build_or_lookup (rcode, vr->type, ops);
1986 if (val)
1988 vn_reference_t res = vn_reference_lookup_or_insert_for_pieces
1989 (vuse, vr->set, vr->type, vr->operands, val);
1990 return res;
1995 /* 5) For aggregate copies translate the reference through them if
1996 the copy kills ref. */
1997 else if (vn_walk_kind == VN_WALKREWRITE
1998 && gimple_assign_single_p (def_stmt)
1999 && (DECL_P (gimple_assign_rhs1 (def_stmt))
2000 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
2001 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
2003 tree base2;
2004 HOST_WIDE_INT maxsize2;
2005 int i, j, k;
2006 auto_vec<vn_reference_op_s> rhs;
2007 vn_reference_op_t vro;
2008 ao_ref r;
2010 if (!lhs_ref_ok)
2011 return (void *)-1;
2013 /* See if the assignment kills REF. */
2014 base2 = ao_ref_base (&lhs_ref);
2015 maxsize2 = lhs_ref.max_size;
2016 if (maxsize2 == -1
2017 || (base != base2
2018 && (TREE_CODE (base) != MEM_REF
2019 || TREE_CODE (base2) != MEM_REF
2020 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
2021 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
2022 TREE_OPERAND (base2, 1))))
2023 || !stmt_kills_ref_p (def_stmt, ref))
2024 return (void *)-1;
2026 /* Find the common base of ref and the lhs. lhs_ops already
2027 contains valueized operands for the lhs. */
2028 i = vr->operands.length () - 1;
2029 j = lhs_ops.length () - 1;
2030 while (j >= 0 && i >= 0
2031 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
2033 i--;
2034 j--;
2037 /* ??? The innermost op should always be a MEM_REF and we already
2038 checked that the assignment to the lhs kills vr. Thus for
2039 aggregate copies using char[] types the vn_reference_op_eq
2040 may fail when comparing types for compatibility. But we really
2041 don't care here - further lookups with the rewritten operands
2042 will simply fail if we messed up types too badly. */
2043 HOST_WIDE_INT extra_off = 0;
2044 if (j == 0 && i >= 0
2045 && lhs_ops[0].opcode == MEM_REF
2046 && lhs_ops[0].off != -1)
2048 if (lhs_ops[0].off == vr->operands[i].off)
2049 i--, j--;
2050 else if (vr->operands[i].opcode == MEM_REF
2051 && vr->operands[i].off != -1)
2053 extra_off = vr->operands[i].off - lhs_ops[0].off;
2054 i--, j--;
2058 /* i now points to the first additional op.
2059 ??? LHS may not be completely contained in VR, one or more
2060 VIEW_CONVERT_EXPRs could be in its way. We could at least
2061 try handling outermost VIEW_CONVERT_EXPRs. */
2062 if (j != -1)
2063 return (void *)-1;
2065 /* Punt if the additional ops contain a storage order barrier. */
2066 for (k = i; k >= 0; k--)
2068 vro = &vr->operands[k];
2069 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
2070 return (void *)-1;
2073 /* Now re-write REF to be based on the rhs of the assignment. */
2074 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
2076 /* Apply an extra offset to the inner MEM_REF of the RHS. */
2077 if (extra_off != 0)
2079 if (rhs.length () < 2
2080 || rhs[0].opcode != MEM_REF
2081 || rhs[0].off == -1)
2082 return (void *)-1;
2083 rhs[0].off += extra_off;
2084 rhs[0].op0 = int_const_binop (PLUS_EXPR, rhs[0].op0,
2085 build_int_cst (TREE_TYPE (rhs[0].op0),
2086 extra_off));
2089 /* We need to pre-pend vr->operands[0..i] to rhs. */
2090 vec<vn_reference_op_s> old = vr->operands;
2091 if (i + 1 + rhs.length () > vr->operands.length ())
2093 vr->operands.safe_grow (i + 1 + rhs.length ());
2094 if (old == shared_lookup_references)
2095 shared_lookup_references = vr->operands;
2097 else
2098 vr->operands.truncate (i + 1 + rhs.length ());
2099 FOR_EACH_VEC_ELT (rhs, j, vro)
2100 vr->operands[i + 1 + j] = *vro;
2101 vr->operands = valueize_refs (vr->operands);
2102 if (old == shared_lookup_references)
2103 shared_lookup_references = vr->operands;
2104 vr->hashcode = vn_reference_compute_hash (vr);
2106 /* Try folding the new reference to a constant. */
2107 tree val = fully_constant_vn_reference_p (vr);
2108 if (val)
2109 return vn_reference_lookup_or_insert_for_pieces
2110 (vuse, vr->set, vr->type, vr->operands, val);
2112 /* Adjust *ref from the new operands. */
2113 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2114 return (void *)-1;
2115 /* This can happen with bitfields. */
2116 if (ref->size != r.size)
2117 return (void *)-1;
2118 *ref = r;
2120 /* Do not update last seen VUSE after translating. */
2121 last_vuse_ptr = NULL;
2123 /* Keep looking for the adjusted *REF / VR pair. */
2124 return NULL;
2127 /* 6) For memcpy copies translate the reference through them if
2128 the copy kills ref. */
2129 else if (vn_walk_kind == VN_WALKREWRITE
2130 && is_gimple_reg_type (vr->type)
2131 /* ??? Handle BCOPY as well. */
2132 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2133 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2134 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2135 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2136 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2137 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2138 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2139 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2)))
2141 tree lhs, rhs;
2142 ao_ref r;
2143 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
2144 vn_reference_op_s op;
2145 HOST_WIDE_INT at;
2147 /* Only handle non-variable, addressable refs. */
2148 if (ref->size != maxsize
2149 || offset % BITS_PER_UNIT != 0
2150 || ref->size % BITS_PER_UNIT != 0)
2151 return (void *)-1;
2153 /* Extract a pointer base and an offset for the destination. */
2154 lhs = gimple_call_arg (def_stmt, 0);
2155 lhs_offset = 0;
2156 if (TREE_CODE (lhs) == SSA_NAME)
2158 lhs = SSA_VAL (lhs);
2159 if (TREE_CODE (lhs) == SSA_NAME)
2161 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
2162 if (gimple_assign_single_p (def_stmt)
2163 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2164 lhs = gimple_assign_rhs1 (def_stmt);
2167 if (TREE_CODE (lhs) == ADDR_EXPR)
2169 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2170 &lhs_offset);
2171 if (!tem)
2172 return (void *)-1;
2173 if (TREE_CODE (tem) == MEM_REF
2174 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2176 lhs = TREE_OPERAND (tem, 0);
2177 if (TREE_CODE (lhs) == SSA_NAME)
2178 lhs = SSA_VAL (lhs);
2179 lhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2181 else if (DECL_P (tem))
2182 lhs = build_fold_addr_expr (tem);
2183 else
2184 return (void *)-1;
2186 if (TREE_CODE (lhs) != SSA_NAME
2187 && TREE_CODE (lhs) != ADDR_EXPR)
2188 return (void *)-1;
2190 /* Extract a pointer base and an offset for the source. */
2191 rhs = gimple_call_arg (def_stmt, 1);
2192 rhs_offset = 0;
2193 if (TREE_CODE (rhs) == SSA_NAME)
2194 rhs = SSA_VAL (rhs);
2195 if (TREE_CODE (rhs) == ADDR_EXPR)
2197 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2198 &rhs_offset);
2199 if (!tem)
2200 return (void *)-1;
2201 if (TREE_CODE (tem) == MEM_REF
2202 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2204 rhs = TREE_OPERAND (tem, 0);
2205 rhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2207 else if (DECL_P (tem))
2208 rhs = build_fold_addr_expr (tem);
2209 else
2210 return (void *)-1;
2212 if (TREE_CODE (rhs) != SSA_NAME
2213 && TREE_CODE (rhs) != ADDR_EXPR)
2214 return (void *)-1;
2216 copy_size = tree_to_uhwi (gimple_call_arg (def_stmt, 2));
2218 /* The bases of the destination and the references have to agree. */
2219 if ((TREE_CODE (base) != MEM_REF
2220 && !DECL_P (base))
2221 || (TREE_CODE (base) == MEM_REF
2222 && (TREE_OPERAND (base, 0) != lhs
2223 || !tree_fits_uhwi_p (TREE_OPERAND (base, 1))))
2224 || (DECL_P (base)
2225 && (TREE_CODE (lhs) != ADDR_EXPR
2226 || TREE_OPERAND (lhs, 0) != base)))
2227 return (void *)-1;
2229 at = offset / BITS_PER_UNIT;
2230 if (TREE_CODE (base) == MEM_REF)
2231 at += tree_to_uhwi (TREE_OPERAND (base, 1));
2232 /* If the access is completely outside of the memcpy destination
2233 area there is no aliasing. */
2234 if (lhs_offset >= at + maxsize / BITS_PER_UNIT
2235 || lhs_offset + copy_size <= at)
2236 return NULL;
2237 /* And the access has to be contained within the memcpy destination. */
2238 if (lhs_offset > at
2239 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
2240 return (void *)-1;
2242 /* Make room for 2 operands in the new reference. */
2243 if (vr->operands.length () < 2)
2245 vec<vn_reference_op_s> old = vr->operands;
2246 vr->operands.safe_grow_cleared (2);
2247 if (old == shared_lookup_references
2248 && vr->operands != old)
2249 shared_lookup_references = vr->operands;
2251 else
2252 vr->operands.truncate (2);
2254 /* The looked-through reference is a simple MEM_REF. */
2255 memset (&op, 0, sizeof (op));
2256 op.type = vr->type;
2257 op.opcode = MEM_REF;
2258 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
2259 op.off = at - lhs_offset + rhs_offset;
2260 vr->operands[0] = op;
2261 op.type = TREE_TYPE (rhs);
2262 op.opcode = TREE_CODE (rhs);
2263 op.op0 = rhs;
2264 op.off = -1;
2265 vr->operands[1] = op;
2266 vr->hashcode = vn_reference_compute_hash (vr);
2268 /* Try folding the new reference to a constant. */
2269 tree val = fully_constant_vn_reference_p (vr);
2270 if (val)
2271 return vn_reference_lookup_or_insert_for_pieces
2272 (vuse, vr->set, vr->type, vr->operands, val);
2274 /* Adjust *ref from the new operands. */
2275 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2276 return (void *)-1;
2277 /* This can happen with bitfields. */
2278 if (ref->size != r.size)
2279 return (void *)-1;
2280 *ref = r;
2282 /* Do not update last seen VUSE after translating. */
2283 last_vuse_ptr = NULL;
2285 /* Keep looking for the adjusted *REF / VR pair. */
2286 return NULL;
2289 /* Bail out and stop walking. */
2290 return (void *)-1;
2293 /* Lookup a reference operation by it's parts, in the current hash table.
2294 Returns the resulting value number if it exists in the hash table,
2295 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2296 vn_reference_t stored in the hashtable if something is found. */
2298 tree
2299 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2300 vec<vn_reference_op_s> operands,
2301 vn_reference_t *vnresult, vn_lookup_kind kind)
2303 struct vn_reference_s vr1;
2304 vn_reference_t tmp;
2305 tree cst;
2307 if (!vnresult)
2308 vnresult = &tmp;
2309 *vnresult = NULL;
2311 vr1.vuse = vuse_ssa_val (vuse);
2312 shared_lookup_references.truncate (0);
2313 shared_lookup_references.safe_grow (operands.length ());
2314 memcpy (shared_lookup_references.address (),
2315 operands.address (),
2316 sizeof (vn_reference_op_s)
2317 * operands.length ());
2318 vr1.operands = operands = shared_lookup_references
2319 = valueize_refs (shared_lookup_references);
2320 vr1.type = type;
2321 vr1.set = set;
2322 vr1.hashcode = vn_reference_compute_hash (&vr1);
2323 if ((cst = fully_constant_vn_reference_p (&vr1)))
2324 return cst;
2326 vn_reference_lookup_1 (&vr1, vnresult);
2327 if (!*vnresult
2328 && kind != VN_NOWALK
2329 && vr1.vuse)
2331 ao_ref r;
2332 vn_walk_kind = kind;
2333 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2334 *vnresult =
2335 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2336 vn_reference_lookup_2,
2337 vn_reference_lookup_3,
2338 vuse_ssa_val, &vr1);
2339 gcc_checking_assert (vr1.operands == shared_lookup_references);
2342 if (*vnresult)
2343 return (*vnresult)->result;
2345 return NULL_TREE;
2348 /* Lookup OP in the current hash table, and return the resulting value
2349 number if it exists in the hash table. Return NULL_TREE if it does
2350 not exist in the hash table or if the result field of the structure
2351 was NULL.. VNRESULT will be filled in with the vn_reference_t
2352 stored in the hashtable if one exists. When TBAA_P is false assume
2353 we are looking up a store and treat it as having alias-set zero. */
2355 tree
2356 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2357 vn_reference_t *vnresult, bool tbaa_p)
2359 vec<vn_reference_op_s> operands;
2360 struct vn_reference_s vr1;
2361 tree cst;
2362 bool valuezied_anything;
2364 if (vnresult)
2365 *vnresult = NULL;
2367 vr1.vuse = vuse_ssa_val (vuse);
2368 vr1.operands = operands
2369 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2370 vr1.type = TREE_TYPE (op);
2371 vr1.set = tbaa_p ? get_alias_set (op) : 0;
2372 vr1.hashcode = vn_reference_compute_hash (&vr1);
2373 if ((cst = fully_constant_vn_reference_p (&vr1)))
2374 return cst;
2376 if (kind != VN_NOWALK
2377 && vr1.vuse)
2379 vn_reference_t wvnresult;
2380 ao_ref r;
2381 /* Make sure to use a valueized reference if we valueized anything.
2382 Otherwise preserve the full reference for advanced TBAA. */
2383 if (!valuezied_anything
2384 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2385 vr1.operands))
2386 ao_ref_init (&r, op);
2387 if (! tbaa_p)
2388 r.ref_alias_set = r.base_alias_set = 0;
2389 vn_walk_kind = kind;
2390 wvnresult =
2391 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2392 vn_reference_lookup_2,
2393 vn_reference_lookup_3,
2394 vuse_ssa_val, &vr1);
2395 gcc_checking_assert (vr1.operands == shared_lookup_references);
2396 if (wvnresult)
2398 if (vnresult)
2399 *vnresult = wvnresult;
2400 return wvnresult->result;
2403 return NULL_TREE;
2406 return vn_reference_lookup_1 (&vr1, vnresult);
2409 /* Lookup CALL in the current hash table and return the entry in
2410 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2412 void
2413 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2414 vn_reference_t vr)
2416 if (vnresult)
2417 *vnresult = NULL;
2419 tree vuse = gimple_vuse (call);
2421 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2422 vr->operands = valueize_shared_reference_ops_from_call (call);
2423 vr->type = gimple_expr_type (call);
2424 vr->set = 0;
2425 vr->hashcode = vn_reference_compute_hash (vr);
2426 vn_reference_lookup_1 (vr, vnresult);
2429 /* Insert OP into the current hash table with a value number of
2430 RESULT, and return the resulting reference structure we created. */
2432 static vn_reference_t
2433 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2435 vn_reference_s **slot;
2436 vn_reference_t vr1;
2437 bool tem;
2439 vr1 = current_info->references_pool->allocate ();
2440 if (TREE_CODE (result) == SSA_NAME)
2441 vr1->value_id = VN_INFO (result)->value_id;
2442 else
2443 vr1->value_id = get_or_alloc_constant_value_id (result);
2444 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2445 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2446 vr1->type = TREE_TYPE (op);
2447 vr1->set = get_alias_set (op);
2448 vr1->hashcode = vn_reference_compute_hash (vr1);
2449 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2450 vr1->result_vdef = vdef;
2452 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2453 INSERT);
2455 /* Because we lookup stores using vuses, and value number failures
2456 using the vdefs (see visit_reference_op_store for how and why),
2457 it's possible that on failure we may try to insert an already
2458 inserted store. This is not wrong, there is no ssa name for a
2459 store that we could use as a differentiator anyway. Thus, unlike
2460 the other lookup functions, you cannot gcc_assert (!*slot)
2461 here. */
2463 /* But free the old slot in case of a collision. */
2464 if (*slot)
2465 free_reference (*slot);
2467 *slot = vr1;
2468 return vr1;
2471 /* Insert a reference by it's pieces into the current hash table with
2472 a value number of RESULT. Return the resulting reference
2473 structure we created. */
2475 vn_reference_t
2476 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2477 vec<vn_reference_op_s> operands,
2478 tree result, unsigned int value_id)
2481 vn_reference_s **slot;
2482 vn_reference_t vr1;
2484 vr1 = current_info->references_pool->allocate ();
2485 vr1->value_id = value_id;
2486 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2487 vr1->operands = valueize_refs (operands);
2488 vr1->type = type;
2489 vr1->set = set;
2490 vr1->hashcode = vn_reference_compute_hash (vr1);
2491 if (result && TREE_CODE (result) == SSA_NAME)
2492 result = SSA_VAL (result);
2493 vr1->result = result;
2495 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2496 INSERT);
2498 /* At this point we should have all the things inserted that we have
2499 seen before, and we should never try inserting something that
2500 already exists. */
2501 gcc_assert (!*slot);
2502 if (*slot)
2503 free_reference (*slot);
2505 *slot = vr1;
2506 return vr1;
2509 /* Compute and return the hash value for nary operation VBO1. */
2511 static hashval_t
2512 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2514 inchash::hash hstate;
2515 unsigned i;
2517 for (i = 0; i < vno1->length; ++i)
2518 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2519 vno1->op[i] = SSA_VAL (vno1->op[i]);
2521 if (((vno1->length == 2
2522 && commutative_tree_code (vno1->opcode))
2523 || (vno1->length == 3
2524 && commutative_ternary_tree_code (vno1->opcode)))
2525 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2526 std::swap (vno1->op[0], vno1->op[1]);
2527 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
2528 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2530 std::swap (vno1->op[0], vno1->op[1]);
2531 vno1->opcode = swap_tree_comparison (vno1->opcode);
2534 hstate.add_int (vno1->opcode);
2535 for (i = 0; i < vno1->length; ++i)
2536 inchash::add_expr (vno1->op[i], hstate);
2538 return hstate.end ();
2541 /* Compare nary operations VNO1 and VNO2 and return true if they are
2542 equivalent. */
2544 bool
2545 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2547 unsigned i;
2549 if (vno1->hashcode != vno2->hashcode)
2550 return false;
2552 if (vno1->length != vno2->length)
2553 return false;
2555 if (vno1->opcode != vno2->opcode
2556 || !types_compatible_p (vno1->type, vno2->type))
2557 return false;
2559 for (i = 0; i < vno1->length; ++i)
2560 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2561 return false;
2563 return true;
2566 /* Initialize VNO from the pieces provided. */
2568 static void
2569 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2570 enum tree_code code, tree type, tree *ops)
2572 vno->opcode = code;
2573 vno->length = length;
2574 vno->type = type;
2575 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2578 /* Initialize VNO from OP. */
2580 static void
2581 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2583 unsigned i;
2585 vno->opcode = TREE_CODE (op);
2586 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2587 vno->type = TREE_TYPE (op);
2588 for (i = 0; i < vno->length; ++i)
2589 vno->op[i] = TREE_OPERAND (op, i);
2592 /* Return the number of operands for a vn_nary ops structure from STMT. */
2594 static unsigned int
2595 vn_nary_length_from_stmt (gimple *stmt)
2597 switch (gimple_assign_rhs_code (stmt))
2599 case REALPART_EXPR:
2600 case IMAGPART_EXPR:
2601 case VIEW_CONVERT_EXPR:
2602 return 1;
2604 case BIT_FIELD_REF:
2605 return 3;
2607 case CONSTRUCTOR:
2608 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2610 default:
2611 return gimple_num_ops (stmt) - 1;
2615 /* Initialize VNO from STMT. */
2617 static void
2618 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
2620 unsigned i;
2622 vno->opcode = gimple_assign_rhs_code (stmt);
2623 vno->type = gimple_expr_type (stmt);
2624 switch (vno->opcode)
2626 case REALPART_EXPR:
2627 case IMAGPART_EXPR:
2628 case VIEW_CONVERT_EXPR:
2629 vno->length = 1;
2630 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2631 break;
2633 case BIT_FIELD_REF:
2634 vno->length = 3;
2635 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2636 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2637 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2638 break;
2640 case CONSTRUCTOR:
2641 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2642 for (i = 0; i < vno->length; ++i)
2643 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2644 break;
2646 default:
2647 gcc_checking_assert (!gimple_assign_single_p (stmt));
2648 vno->length = gimple_num_ops (stmt) - 1;
2649 for (i = 0; i < vno->length; ++i)
2650 vno->op[i] = gimple_op (stmt, i + 1);
2654 /* Compute the hashcode for VNO and look for it in the hash table;
2655 return the resulting value number if it exists in the hash table.
2656 Return NULL_TREE if it does not exist in the hash table or if the
2657 result field of the operation is NULL. VNRESULT will contain the
2658 vn_nary_op_t from the hashtable if it exists. */
2660 static tree
2661 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2663 vn_nary_op_s **slot;
2665 if (vnresult)
2666 *vnresult = NULL;
2668 vno->hashcode = vn_nary_op_compute_hash (vno);
2669 slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2670 NO_INSERT);
2671 if (!slot && current_info == optimistic_info)
2672 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2673 NO_INSERT);
2674 if (!slot)
2675 return NULL_TREE;
2676 if (vnresult)
2677 *vnresult = *slot;
2678 return (*slot)->result;
2681 /* Lookup a n-ary operation by its pieces and return the resulting value
2682 number if it exists in the hash table. Return NULL_TREE if it does
2683 not exist in the hash table or if the result field of the operation
2684 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2685 if it exists. */
2687 tree
2688 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2689 tree type, tree *ops, vn_nary_op_t *vnresult)
2691 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2692 sizeof_vn_nary_op (length));
2693 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2694 return vn_nary_op_lookup_1 (vno1, vnresult);
2697 /* Lookup OP in the current hash table, and return the resulting value
2698 number if it exists in the hash table. Return NULL_TREE if it does
2699 not exist in the hash table or if the result field of the operation
2700 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2701 if it exists. */
2703 tree
2704 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2706 vn_nary_op_t vno1
2707 = XALLOCAVAR (struct vn_nary_op_s,
2708 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2709 init_vn_nary_op_from_op (vno1, op);
2710 return vn_nary_op_lookup_1 (vno1, vnresult);
2713 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2714 value number if it exists in the hash table. Return NULL_TREE if
2715 it does not exist in the hash table. VNRESULT will contain the
2716 vn_nary_op_t from the hashtable if it exists. */
2718 tree
2719 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
2721 vn_nary_op_t vno1
2722 = XALLOCAVAR (struct vn_nary_op_s,
2723 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2724 init_vn_nary_op_from_stmt (vno1, stmt);
2725 return vn_nary_op_lookup_1 (vno1, vnresult);
2728 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2730 static vn_nary_op_t
2731 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2733 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2736 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2737 obstack. */
2739 static vn_nary_op_t
2740 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2742 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2743 &current_info->nary_obstack);
2745 vno1->value_id = value_id;
2746 vno1->length = length;
2747 vno1->result = result;
2749 return vno1;
2752 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2753 VNO->HASHCODE first. */
2755 static vn_nary_op_t
2756 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
2757 bool compute_hash)
2759 vn_nary_op_s **slot;
2761 if (compute_hash)
2762 vno->hashcode = vn_nary_op_compute_hash (vno);
2764 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
2765 gcc_assert (!*slot);
2767 *slot = vno;
2768 return vno;
2771 /* Insert a n-ary operation into the current hash table using it's
2772 pieces. Return the vn_nary_op_t structure we created and put in
2773 the hashtable. */
2775 vn_nary_op_t
2776 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2777 tree type, tree *ops,
2778 tree result, unsigned int value_id)
2780 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2781 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2782 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2785 /* Insert OP into the current hash table with a value number of
2786 RESULT. Return the vn_nary_op_t structure we created and put in
2787 the hashtable. */
2789 vn_nary_op_t
2790 vn_nary_op_insert (tree op, tree result)
2792 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2793 vn_nary_op_t vno1;
2795 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2796 init_vn_nary_op_from_op (vno1, op);
2797 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2800 /* Insert the rhs of STMT into the current hash table with a value number of
2801 RESULT. */
2803 static vn_nary_op_t
2804 vn_nary_op_insert_stmt (gimple *stmt, tree result)
2806 vn_nary_op_t vno1
2807 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2808 result, VN_INFO (result)->value_id);
2809 init_vn_nary_op_from_stmt (vno1, stmt);
2810 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2813 /* Compute a hashcode for PHI operation VP1 and return it. */
2815 static inline hashval_t
2816 vn_phi_compute_hash (vn_phi_t vp1)
2818 inchash::hash hstate (vp1->phiargs.length () > 2
2819 ? vp1->block->index : vp1->phiargs.length ());
2820 tree phi1op;
2821 tree type;
2822 edge e;
2823 edge_iterator ei;
2825 /* If all PHI arguments are constants we need to distinguish
2826 the PHI node via its type. */
2827 type = vp1->type;
2828 hstate.merge_hash (vn_hash_type (type));
2830 FOR_EACH_EDGE (e, ei, vp1->block->preds)
2832 /* Don't hash backedge values they need to be handled as VN_TOP
2833 for optimistic value-numbering. */
2834 if (e->flags & EDGE_DFS_BACK)
2835 continue;
2837 phi1op = vp1->phiargs[e->dest_idx];
2838 if (phi1op == VN_TOP)
2839 continue;
2840 inchash::add_expr (phi1op, hstate);
2843 return hstate.end ();
2847 /* Return true if COND1 and COND2 represent the same condition, set
2848 *INVERTED_P if one needs to be inverted to make it the same as
2849 the other. */
2851 static bool
2852 cond_stmts_equal_p (gcond *cond1, gcond *cond2, bool *inverted_p)
2854 enum tree_code code1 = gimple_cond_code (cond1);
2855 enum tree_code code2 = gimple_cond_code (cond2);
2856 tree lhs1 = gimple_cond_lhs (cond1);
2857 tree lhs2 = gimple_cond_lhs (cond2);
2858 tree rhs1 = gimple_cond_rhs (cond1);
2859 tree rhs2 = gimple_cond_rhs (cond2);
2861 *inverted_p = false;
2862 if (code1 == code2)
2864 else if (code1 == swap_tree_comparison (code2))
2865 std::swap (lhs2, rhs2);
2866 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
2867 *inverted_p = true;
2868 else if (code1 == invert_tree_comparison
2869 (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
2871 std::swap (lhs2, rhs2);
2872 *inverted_p = true;
2874 else
2875 return false;
2877 lhs1 = vn_valueize (lhs1);
2878 rhs1 = vn_valueize (rhs1);
2879 lhs2 = vn_valueize (lhs2);
2880 rhs2 = vn_valueize (rhs2);
2881 return ((expressions_equal_p (lhs1, lhs2)
2882 && expressions_equal_p (rhs1, rhs2))
2883 || (commutative_tree_code (code1)
2884 && expressions_equal_p (lhs1, rhs2)
2885 && expressions_equal_p (rhs1, lhs2)));
2888 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2890 static int
2891 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
2893 if (vp1->hashcode != vp2->hashcode)
2894 return false;
2896 if (vp1->block != vp2->block)
2898 if (vp1->phiargs.length () != vp2->phiargs.length ())
2899 return false;
2901 switch (vp1->phiargs.length ())
2903 case 1:
2904 /* Single-arg PHIs are just copies. */
2905 break;
2907 case 2:
2909 /* Rule out backedges into the PHI. */
2910 if (vp1->block->loop_father->header == vp1->block
2911 || vp2->block->loop_father->header == vp2->block)
2912 return false;
2914 /* If the PHI nodes do not have compatible types
2915 they are not the same. */
2916 if (!types_compatible_p (vp1->type, vp2->type))
2917 return false;
2919 basic_block idom1
2920 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
2921 basic_block idom2
2922 = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
2923 /* If the immediate dominator end in switch stmts multiple
2924 values may end up in the same PHI arg via intermediate
2925 CFG merges. */
2926 if (EDGE_COUNT (idom1->succs) != 2
2927 || EDGE_COUNT (idom2->succs) != 2)
2928 return false;
2930 /* Verify the controlling stmt is the same. */
2931 gimple *last1 = last_stmt (idom1);
2932 gimple *last2 = last_stmt (idom2);
2933 if (gimple_code (last1) != GIMPLE_COND
2934 || gimple_code (last2) != GIMPLE_COND)
2935 return false;
2936 bool inverted_p;
2937 if (! cond_stmts_equal_p (as_a <gcond *> (last1),
2938 as_a <gcond *> (last2), &inverted_p))
2939 return false;
2941 /* Get at true/false controlled edges into the PHI. */
2942 edge te1, te2, fe1, fe2;
2943 if (! extract_true_false_controlled_edges (idom1, vp1->block,
2944 &te1, &fe1)
2945 || ! extract_true_false_controlled_edges (idom2, vp2->block,
2946 &te2, &fe2))
2947 return false;
2949 /* Swap edges if the second condition is the inverted of the
2950 first. */
2951 if (inverted_p)
2952 std::swap (te2, fe2);
2954 /* ??? Handle VN_TOP specially. */
2955 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
2956 vp2->phiargs[te2->dest_idx])
2957 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
2958 vp2->phiargs[fe2->dest_idx]))
2959 return false;
2961 return true;
2964 default:
2965 return false;
2969 /* If the PHI nodes do not have compatible types
2970 they are not the same. */
2971 if (!types_compatible_p (vp1->type, vp2->type))
2972 return false;
2974 /* Any phi in the same block will have it's arguments in the
2975 same edge order, because of how we store phi nodes. */
2976 int i;
2977 tree phi1op;
2978 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2980 tree phi2op = vp2->phiargs[i];
2981 if (phi1op == VN_TOP || phi2op == VN_TOP)
2982 continue;
2983 if (!expressions_equal_p (phi1op, phi2op))
2984 return false;
2987 return true;
2990 static vec<tree> shared_lookup_phiargs;
2992 /* Lookup PHI in the current hash table, and return the resulting
2993 value number if it exists in the hash table. Return NULL_TREE if
2994 it does not exist in the hash table. */
2996 static tree
2997 vn_phi_lookup (gimple *phi)
2999 vn_phi_s **slot;
3000 struct vn_phi_s vp1;
3001 edge e;
3002 edge_iterator ei;
3004 shared_lookup_phiargs.truncate (0);
3005 shared_lookup_phiargs.safe_grow (gimple_phi_num_args (phi));
3007 /* Canonicalize the SSA_NAME's to their value number. */
3008 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3010 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3011 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3012 shared_lookup_phiargs[e->dest_idx] = def;
3014 vp1.type = TREE_TYPE (gimple_phi_result (phi));
3015 vp1.phiargs = shared_lookup_phiargs;
3016 vp1.block = gimple_bb (phi);
3017 vp1.hashcode = vn_phi_compute_hash (&vp1);
3018 slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3019 NO_INSERT);
3020 if (!slot && current_info == optimistic_info)
3021 slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3022 NO_INSERT);
3023 if (!slot)
3024 return NULL_TREE;
3025 return (*slot)->result;
3028 /* Insert PHI into the current hash table with a value number of
3029 RESULT. */
3031 static vn_phi_t
3032 vn_phi_insert (gimple *phi, tree result)
3034 vn_phi_s **slot;
3035 vn_phi_t vp1 = current_info->phis_pool->allocate ();
3036 vec<tree> args = vNULL;
3037 edge e;
3038 edge_iterator ei;
3040 args.safe_grow (gimple_phi_num_args (phi));
3042 /* Canonicalize the SSA_NAME's to their value number. */
3043 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3045 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3046 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3047 args[e->dest_idx] = def;
3049 vp1->value_id = VN_INFO (result)->value_id;
3050 vp1->type = TREE_TYPE (gimple_phi_result (phi));
3051 vp1->phiargs = args;
3052 vp1->block = gimple_bb (phi);
3053 vp1->result = result;
3054 vp1->hashcode = vn_phi_compute_hash (vp1);
3056 slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
3058 /* Because we iterate over phi operations more than once, it's
3059 possible the slot might already exist here, hence no assert.*/
3060 *slot = vp1;
3061 return vp1;
3065 /* Print set of components in strongly connected component SCC to OUT. */
3067 static void
3068 print_scc (FILE *out, vec<tree> scc)
3070 tree var;
3071 unsigned int i;
3073 fprintf (out, "SCC consists of:");
3074 FOR_EACH_VEC_ELT (scc, i, var)
3076 fprintf (out, " ");
3077 print_generic_expr (out, var, 0);
3079 fprintf (out, "\n");
3082 /* Return true if BB1 is dominated by BB2 taking into account edges
3083 that are not executable. */
3085 static bool
3086 dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
3088 edge_iterator ei;
3089 edge e;
3091 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3092 return true;
3094 /* Before iterating we'd like to know if there exists a
3095 (executable) path from bb2 to bb1 at all, if not we can
3096 directly return false. For now simply iterate once. */
3098 /* Iterate to the single executable bb1 predecessor. */
3099 if (EDGE_COUNT (bb1->preds) > 1)
3101 edge prede = NULL;
3102 FOR_EACH_EDGE (e, ei, bb1->preds)
3103 if (e->flags & EDGE_EXECUTABLE)
3105 if (prede)
3107 prede = NULL;
3108 break;
3110 prede = e;
3112 if (prede)
3114 bb1 = prede->src;
3116 /* Re-do the dominance check with changed bb1. */
3117 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3118 return true;
3122 /* Iterate to the single executable bb2 successor. */
3123 edge succe = NULL;
3124 FOR_EACH_EDGE (e, ei, bb2->succs)
3125 if (e->flags & EDGE_EXECUTABLE)
3127 if (succe)
3129 succe = NULL;
3130 break;
3132 succe = e;
3134 if (succe)
3136 /* Verify the reached block is only reached through succe.
3137 If there is only one edge we can spare us the dominator
3138 check and iterate directly. */
3139 if (EDGE_COUNT (succe->dest->preds) > 1)
3141 FOR_EACH_EDGE (e, ei, succe->dest->preds)
3142 if (e != succe
3143 && (e->flags & EDGE_EXECUTABLE))
3145 succe = NULL;
3146 break;
3149 if (succe)
3151 bb2 = succe->dest;
3153 /* Re-do the dominance check with changed bb2. */
3154 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3155 return true;
3159 /* We could now iterate updating bb1 / bb2. */
3160 return false;
3163 /* Set the value number of FROM to TO, return true if it has changed
3164 as a result. */
3166 static inline bool
3167 set_ssa_val_to (tree from, tree to)
3169 tree currval = SSA_VAL (from);
3170 HOST_WIDE_INT toff, coff;
3172 /* The only thing we allow as value numbers are ssa_names
3173 and invariants. So assert that here. We don't allow VN_TOP
3174 as visiting a stmt should produce a value-number other than
3175 that.
3176 ??? Still VN_TOP can happen for unreachable code, so force
3177 it to varying in that case. Not all code is prepared to
3178 get VN_TOP on valueization. */
3179 if (to == VN_TOP)
3181 if (dump_file && (dump_flags & TDF_DETAILS))
3182 fprintf (dump_file, "Forcing value number to varying on "
3183 "receiving VN_TOP\n");
3184 to = from;
3187 gcc_assert (to != NULL_TREE
3188 && ((TREE_CODE (to) == SSA_NAME
3189 && (to == from || SSA_VAL (to) == to))
3190 || is_gimple_min_invariant (to)));
3192 if (from != to)
3194 if (currval == from)
3196 if (dump_file && (dump_flags & TDF_DETAILS))
3198 fprintf (dump_file, "Not changing value number of ");
3199 print_generic_expr (dump_file, from, 0);
3200 fprintf (dump_file, " from VARYING to ");
3201 print_generic_expr (dump_file, to, 0);
3202 fprintf (dump_file, "\n");
3204 return false;
3206 else if (TREE_CODE (to) == SSA_NAME
3207 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
3208 to = from;
3211 if (dump_file && (dump_flags & TDF_DETAILS))
3213 fprintf (dump_file, "Setting value number of ");
3214 print_generic_expr (dump_file, from, 0);
3215 fprintf (dump_file, " to ");
3216 print_generic_expr (dump_file, to, 0);
3219 if (currval != to
3220 && !operand_equal_p (currval, to, 0)
3221 /* ??? For addresses involving volatile objects or types operand_equal_p
3222 does not reliably detect ADDR_EXPRs as equal. We know we are only
3223 getting invariant gimple addresses here, so can use
3224 get_addr_base_and_unit_offset to do this comparison. */
3225 && !(TREE_CODE (currval) == ADDR_EXPR
3226 && TREE_CODE (to) == ADDR_EXPR
3227 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
3228 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
3229 && coff == toff))
3231 /* If we equate two SSA names we have to make the side-band info
3232 of the leader conservative (and remember whatever original value
3233 was present). */
3234 if (TREE_CODE (to) == SSA_NAME)
3236 if (INTEGRAL_TYPE_P (TREE_TYPE (to))
3237 && SSA_NAME_RANGE_INFO (to))
3239 if (SSA_NAME_IS_DEFAULT_DEF (to)
3240 || dominated_by_p_w_unex
3241 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3242 gimple_bb (SSA_NAME_DEF_STMT (to))))
3243 /* Keep the info from the dominator. */
3245 else if (SSA_NAME_IS_DEFAULT_DEF (from)
3246 || dominated_by_p_w_unex
3247 (gimple_bb (SSA_NAME_DEF_STMT (to)),
3248 gimple_bb (SSA_NAME_DEF_STMT (from))))
3250 /* Save old info. */
3251 if (! VN_INFO (to)->info.range_info)
3253 VN_INFO (to)->info.range_info = SSA_NAME_RANGE_INFO (to);
3254 VN_INFO (to)->range_info_anti_range_p
3255 = SSA_NAME_ANTI_RANGE_P (to);
3257 /* Use that from the dominator. */
3258 SSA_NAME_RANGE_INFO (to) = SSA_NAME_RANGE_INFO (from);
3259 SSA_NAME_ANTI_RANGE_P (to) = SSA_NAME_ANTI_RANGE_P (from);
3261 else
3263 /* Save old info. */
3264 if (! VN_INFO (to)->info.range_info)
3266 VN_INFO (to)->info.range_info = SSA_NAME_RANGE_INFO (to);
3267 VN_INFO (to)->range_info_anti_range_p
3268 = SSA_NAME_ANTI_RANGE_P (to);
3270 /* Rather than allocating memory and unioning the info
3271 just clear it. */
3272 SSA_NAME_RANGE_INFO (to) = NULL;
3275 else if (POINTER_TYPE_P (TREE_TYPE (to))
3276 && SSA_NAME_PTR_INFO (to))
3278 if (SSA_NAME_IS_DEFAULT_DEF (to)
3279 || dominated_by_p_w_unex
3280 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3281 gimple_bb (SSA_NAME_DEF_STMT (to))))
3282 /* Keep the info from the dominator. */
3284 else if (SSA_NAME_IS_DEFAULT_DEF (from)
3285 || dominated_by_p_w_unex
3286 (gimple_bb (SSA_NAME_DEF_STMT (to)),
3287 gimple_bb (SSA_NAME_DEF_STMT (from))))
3289 /* Save old info. */
3290 if (! VN_INFO (to)->info.ptr_info)
3291 VN_INFO (to)->info.ptr_info = SSA_NAME_PTR_INFO (to);
3292 /* Use that from the dominator. */
3293 SSA_NAME_PTR_INFO (to) = SSA_NAME_PTR_INFO (from);
3295 else if (! SSA_NAME_PTR_INFO (from)
3296 /* Handle the case of trivially equivalent info. */
3297 || memcmp (SSA_NAME_PTR_INFO (to),
3298 SSA_NAME_PTR_INFO (from),
3299 sizeof (ptr_info_def)) != 0)
3301 /* Save old info. */
3302 if (! VN_INFO (to)->info.ptr_info)
3303 VN_INFO (to)->info.ptr_info = SSA_NAME_PTR_INFO (to);
3304 /* Rather than allocating memory and unioning the info
3305 just clear it. */
3306 SSA_NAME_PTR_INFO (to) = NULL;
3311 VN_INFO (from)->valnum = to;
3312 if (dump_file && (dump_flags & TDF_DETAILS))
3313 fprintf (dump_file, " (changed)\n");
3314 return true;
3316 if (dump_file && (dump_flags & TDF_DETAILS))
3317 fprintf (dump_file, "\n");
3318 return false;
3321 /* Mark as processed all the definitions in the defining stmt of USE, or
3322 the USE itself. */
3324 static void
3325 mark_use_processed (tree use)
3327 ssa_op_iter iter;
3328 def_operand_p defp;
3329 gimple *stmt = SSA_NAME_DEF_STMT (use);
3331 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
3333 VN_INFO (use)->use_processed = true;
3334 return;
3337 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3339 tree def = DEF_FROM_PTR (defp);
3341 VN_INFO (def)->use_processed = true;
3345 /* Set all definitions in STMT to value number to themselves.
3346 Return true if a value number changed. */
3348 static bool
3349 defs_to_varying (gimple *stmt)
3351 bool changed = false;
3352 ssa_op_iter iter;
3353 def_operand_p defp;
3355 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3357 tree def = DEF_FROM_PTR (defp);
3358 changed |= set_ssa_val_to (def, def);
3360 return changed;
3363 /* Visit a copy between LHS and RHS, return true if the value number
3364 changed. */
3366 static bool
3367 visit_copy (tree lhs, tree rhs)
3369 /* Valueize. */
3370 rhs = SSA_VAL (rhs);
3372 return set_ssa_val_to (lhs, rhs);
3375 /* Visit a nary operator RHS, value number it, and return true if the
3376 value number of LHS has changed as a result. */
3378 static bool
3379 visit_nary_op (tree lhs, gimple *stmt)
3381 bool changed = false;
3382 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
3384 if (result)
3385 changed = set_ssa_val_to (lhs, result);
3386 else
3388 changed = set_ssa_val_to (lhs, lhs);
3389 vn_nary_op_insert_stmt (stmt, lhs);
3392 return changed;
3395 /* Visit a call STMT storing into LHS. Return true if the value number
3396 of the LHS has changed as a result. */
3398 static bool
3399 visit_reference_op_call (tree lhs, gcall *stmt)
3401 bool changed = false;
3402 struct vn_reference_s vr1;
3403 vn_reference_t vnresult = NULL;
3404 tree vdef = gimple_vdef (stmt);
3406 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
3407 if (lhs && TREE_CODE (lhs) != SSA_NAME)
3408 lhs = NULL_TREE;
3410 vn_reference_lookup_call (stmt, &vnresult, &vr1);
3411 if (vnresult)
3413 if (vnresult->result_vdef && vdef)
3414 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
3416 if (!vnresult->result && lhs)
3417 vnresult->result = lhs;
3419 if (vnresult->result && lhs)
3420 changed |= set_ssa_val_to (lhs, vnresult->result);
3422 else
3424 vn_reference_t vr2;
3425 vn_reference_s **slot;
3426 if (vdef)
3427 changed |= set_ssa_val_to (vdef, vdef);
3428 if (lhs)
3429 changed |= set_ssa_val_to (lhs, lhs);
3430 vr2 = current_info->references_pool->allocate ();
3431 vr2->vuse = vr1.vuse;
3432 /* As we are not walking the virtual operand chain we know the
3433 shared_lookup_references are still original so we can re-use
3434 them here. */
3435 vr2->operands = vr1.operands.copy ();
3436 vr2->type = vr1.type;
3437 vr2->set = vr1.set;
3438 vr2->hashcode = vr1.hashcode;
3439 vr2->result = lhs;
3440 vr2->result_vdef = vdef;
3441 slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3442 INSERT);
3443 gcc_assert (!*slot);
3444 *slot = vr2;
3447 return changed;
3450 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3451 and return true if the value number of the LHS has changed as a result. */
3453 static bool
3454 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
3456 bool changed = false;
3457 tree last_vuse;
3458 tree result;
3460 last_vuse = gimple_vuse (stmt);
3461 last_vuse_ptr = &last_vuse;
3462 result = vn_reference_lookup (op, gimple_vuse (stmt),
3463 default_vn_walk_kind, NULL, true);
3464 last_vuse_ptr = NULL;
3466 /* We handle type-punning through unions by value-numbering based
3467 on offset and size of the access. Be prepared to handle a
3468 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3469 if (result
3470 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3472 /* We will be setting the value number of lhs to the value number
3473 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3474 So first simplify and lookup this expression to see if it
3475 is already available. */
3476 code_helper rcode = VIEW_CONVERT_EXPR;
3477 tree ops[3] = { result };
3478 result = vn_nary_build_or_lookup (rcode, TREE_TYPE (op), ops);
3481 if (result)
3482 changed = set_ssa_val_to (lhs, result);
3483 else
3485 changed = set_ssa_val_to (lhs, lhs);
3486 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
3489 return changed;
3493 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3494 and return true if the value number of the LHS has changed as a result. */
3496 static bool
3497 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
3499 bool changed = false;
3500 vn_reference_t vnresult = NULL;
3501 tree result, assign;
3502 bool resultsame = false;
3503 tree vuse = gimple_vuse (stmt);
3504 tree vdef = gimple_vdef (stmt);
3506 if (TREE_CODE (op) == SSA_NAME)
3507 op = SSA_VAL (op);
3509 /* First we want to lookup using the *vuses* from the store and see
3510 if there the last store to this location with the same address
3511 had the same value.
3513 The vuses represent the memory state before the store. If the
3514 memory state, address, and value of the store is the same as the
3515 last store to this location, then this store will produce the
3516 same memory state as that store.
3518 In this case the vdef versions for this store are value numbered to those
3519 vuse versions, since they represent the same memory state after
3520 this store.
3522 Otherwise, the vdefs for the store are used when inserting into
3523 the table, since the store generates a new memory state. */
3525 result = vn_reference_lookup (lhs, vuse, VN_NOWALK, NULL, false);
3527 if (result)
3529 if (TREE_CODE (result) == SSA_NAME)
3530 result = SSA_VAL (result);
3531 resultsame = expressions_equal_p (result, op);
3534 if ((!result || !resultsame)
3535 /* Only perform the following when being called from PRE
3536 which embeds tail merging. */
3537 && default_vn_walk_kind == VN_WALK)
3539 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3540 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
3541 if (vnresult)
3543 VN_INFO (vdef)->use_processed = true;
3544 return set_ssa_val_to (vdef, vnresult->result_vdef);
3548 if (!result || !resultsame)
3550 if (dump_file && (dump_flags & TDF_DETAILS))
3552 fprintf (dump_file, "No store match\n");
3553 fprintf (dump_file, "Value numbering store ");
3554 print_generic_expr (dump_file, lhs, 0);
3555 fprintf (dump_file, " to ");
3556 print_generic_expr (dump_file, op, 0);
3557 fprintf (dump_file, "\n");
3559 /* Have to set value numbers before insert, since insert is
3560 going to valueize the references in-place. */
3561 if (vdef)
3563 changed |= set_ssa_val_to (vdef, vdef);
3566 /* Do not insert structure copies into the tables. */
3567 if (is_gimple_min_invariant (op)
3568 || is_gimple_reg (op))
3569 vn_reference_insert (lhs, op, vdef, NULL);
3571 /* Only perform the following when being called from PRE
3572 which embeds tail merging. */
3573 if (default_vn_walk_kind == VN_WALK)
3575 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3576 vn_reference_insert (assign, lhs, vuse, vdef);
3579 else
3581 /* We had a match, so value number the vdef to have the value
3582 number of the vuse it came from. */
3584 if (dump_file && (dump_flags & TDF_DETAILS))
3585 fprintf (dump_file, "Store matched earlier value,"
3586 "value numbering store vdefs to matching vuses.\n");
3588 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3591 return changed;
3594 /* Visit and value number PHI, return true if the value number
3595 changed. */
3597 static bool
3598 visit_phi (gimple *phi)
3600 bool changed = false;
3601 tree result;
3602 tree sameval = VN_TOP;
3603 bool allsame = true;
3604 unsigned n_executable = 0;
3606 /* TODO: We could check for this in init_sccvn, and replace this
3607 with a gcc_assert. */
3608 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3609 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3611 /* See if all non-TOP arguments have the same value. TOP is
3612 equivalent to everything, so we can ignore it. */
3613 edge_iterator ei;
3614 edge e;
3615 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3616 if (e->flags & EDGE_EXECUTABLE)
3618 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3620 ++n_executable;
3621 if (TREE_CODE (def) == SSA_NAME)
3622 def = SSA_VAL (def);
3623 if (def == VN_TOP)
3624 continue;
3625 if (sameval == VN_TOP)
3626 sameval = def;
3627 else if (!expressions_equal_p (def, sameval))
3629 allsame = false;
3630 break;
3634 /* If none of the edges was executable or all incoming values are
3635 undefined keep the value-number at VN_TOP. If only a single edge
3636 is exectuable use its value. */
3637 if (sameval == VN_TOP
3638 || n_executable == 1)
3639 return set_ssa_val_to (PHI_RESULT (phi), sameval);
3641 /* First see if it is equivalent to a phi node in this block. We prefer
3642 this as it allows IV elimination - see PRs 66502 and 67167. */
3643 result = vn_phi_lookup (phi);
3644 if (result)
3645 changed = set_ssa_val_to (PHI_RESULT (phi), result);
3646 /* Otherwise all value numbered to the same value, the phi node has that
3647 value. */
3648 else if (allsame)
3649 changed = set_ssa_val_to (PHI_RESULT (phi), sameval);
3650 else
3652 vn_phi_insert (phi, PHI_RESULT (phi));
3653 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3656 return changed;
3659 /* Try to simplify RHS using equivalences and constant folding. */
3661 static tree
3662 try_to_simplify (gassign *stmt)
3664 enum tree_code code = gimple_assign_rhs_code (stmt);
3665 tree tem;
3667 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3668 in this case, there is no point in doing extra work. */
3669 if (code == SSA_NAME)
3670 return NULL_TREE;
3672 /* First try constant folding based on our current lattice. */
3673 mprts_hook = vn_lookup_simplify_result;
3674 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
3675 mprts_hook = NULL;
3676 if (tem
3677 && (TREE_CODE (tem) == SSA_NAME
3678 || is_gimple_min_invariant (tem)))
3679 return tem;
3681 return NULL_TREE;
3684 /* Visit and value number USE, return true if the value number
3685 changed. */
3687 static bool
3688 visit_use (tree use)
3690 bool changed = false;
3691 gimple *stmt = SSA_NAME_DEF_STMT (use);
3693 mark_use_processed (use);
3695 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3696 if (dump_file && (dump_flags & TDF_DETAILS)
3697 && !SSA_NAME_IS_DEFAULT_DEF (use))
3699 fprintf (dump_file, "Value numbering ");
3700 print_generic_expr (dump_file, use, 0);
3701 fprintf (dump_file, " stmt = ");
3702 print_gimple_stmt (dump_file, stmt, 0, 0);
3705 /* Handle uninitialized uses. */
3706 if (SSA_NAME_IS_DEFAULT_DEF (use))
3707 changed = set_ssa_val_to (use, use);
3708 else if (gimple_code (stmt) == GIMPLE_PHI)
3709 changed = visit_phi (stmt);
3710 else if (gimple_has_volatile_ops (stmt))
3711 changed = defs_to_varying (stmt);
3712 else if (gassign *ass = dyn_cast <gassign *> (stmt))
3714 enum tree_code code = gimple_assign_rhs_code (ass);
3715 tree lhs = gimple_assign_lhs (ass);
3716 tree rhs1 = gimple_assign_rhs1 (ass);
3717 tree simplified;
3719 /* Shortcut for copies. Simplifying copies is pointless,
3720 since we copy the expression and value they represent. */
3721 if (code == SSA_NAME
3722 && TREE_CODE (lhs) == SSA_NAME)
3724 changed = visit_copy (lhs, rhs1);
3725 goto done;
3727 simplified = try_to_simplify (ass);
3728 if (simplified)
3730 if (dump_file && (dump_flags & TDF_DETAILS))
3732 fprintf (dump_file, "RHS ");
3733 print_gimple_expr (dump_file, ass, 0, 0);
3734 fprintf (dump_file, " simplified to ");
3735 print_generic_expr (dump_file, simplified, 0);
3736 fprintf (dump_file, "\n");
3739 /* Setting value numbers to constants will occasionally
3740 screw up phi congruence because constants are not
3741 uniquely associated with a single ssa name that can be
3742 looked up. */
3743 if (simplified
3744 && is_gimple_min_invariant (simplified)
3745 && TREE_CODE (lhs) == SSA_NAME)
3747 changed = set_ssa_val_to (lhs, simplified);
3748 goto done;
3750 else if (simplified
3751 && TREE_CODE (simplified) == SSA_NAME
3752 && TREE_CODE (lhs) == SSA_NAME)
3754 changed = visit_copy (lhs, simplified);
3755 goto done;
3758 if ((TREE_CODE (lhs) == SSA_NAME
3759 /* We can substitute SSA_NAMEs that are live over
3760 abnormal edges with their constant value. */
3761 && !(gimple_assign_copy_p (ass)
3762 && is_gimple_min_invariant (rhs1))
3763 && !(simplified
3764 && is_gimple_min_invariant (simplified))
3765 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3766 /* Stores or copies from SSA_NAMEs that are live over
3767 abnormal edges are a problem. */
3768 || (code == SSA_NAME
3769 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3770 changed = defs_to_varying (ass);
3771 else if (REFERENCE_CLASS_P (lhs)
3772 || DECL_P (lhs))
3773 changed = visit_reference_op_store (lhs, rhs1, ass);
3774 else if (TREE_CODE (lhs) == SSA_NAME)
3776 if ((gimple_assign_copy_p (ass)
3777 && is_gimple_min_invariant (rhs1))
3778 || (simplified
3779 && is_gimple_min_invariant (simplified)))
3781 if (simplified)
3782 changed = set_ssa_val_to (lhs, simplified);
3783 else
3784 changed = set_ssa_val_to (lhs, rhs1);
3786 else
3788 /* Visit the original statement. */
3789 switch (vn_get_stmt_kind (ass))
3791 case VN_NARY:
3792 changed = visit_nary_op (lhs, ass);
3793 break;
3794 case VN_REFERENCE:
3795 changed = visit_reference_op_load (lhs, rhs1, ass);
3796 break;
3797 default:
3798 changed = defs_to_varying (ass);
3799 break;
3803 else
3804 changed = defs_to_varying (ass);
3806 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
3808 tree lhs = gimple_call_lhs (call_stmt);
3809 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3811 /* Try constant folding based on our current lattice. */
3812 tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
3813 vn_valueize);
3814 if (simplified)
3816 if (dump_file && (dump_flags & TDF_DETAILS))
3818 fprintf (dump_file, "call ");
3819 print_gimple_expr (dump_file, call_stmt, 0, 0);
3820 fprintf (dump_file, " simplified to ");
3821 print_generic_expr (dump_file, simplified, 0);
3822 fprintf (dump_file, "\n");
3825 /* Setting value numbers to constants will occasionally
3826 screw up phi congruence because constants are not
3827 uniquely associated with a single ssa name that can be
3828 looked up. */
3829 if (simplified
3830 && is_gimple_min_invariant (simplified))
3832 changed = set_ssa_val_to (lhs, simplified);
3833 if (gimple_vdef (call_stmt))
3834 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
3835 SSA_VAL (gimple_vuse (call_stmt)));
3836 goto done;
3838 else if (simplified
3839 && TREE_CODE (simplified) == SSA_NAME)
3841 changed = visit_copy (lhs, simplified);
3842 if (gimple_vdef (call_stmt))
3843 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
3844 SSA_VAL (gimple_vuse (call_stmt)));
3845 goto done;
3847 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3849 changed = defs_to_varying (call_stmt);
3850 goto done;
3854 if (!gimple_call_internal_p (call_stmt)
3855 && (/* Calls to the same function with the same vuse
3856 and the same operands do not necessarily return the same
3857 value, unless they're pure or const. */
3858 gimple_call_flags (call_stmt) & (ECF_PURE | ECF_CONST)
3859 /* If calls have a vdef, subsequent calls won't have
3860 the same incoming vuse. So, if 2 calls with vdef have the
3861 same vuse, we know they're not subsequent.
3862 We can value number 2 calls to the same function with the
3863 same vuse and the same operands which are not subsequent
3864 the same, because there is no code in the program that can
3865 compare the 2 values... */
3866 || (gimple_vdef (call_stmt)
3867 /* ... unless the call returns a pointer which does
3868 not alias with anything else. In which case the
3869 information that the values are distinct are encoded
3870 in the IL. */
3871 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
3872 /* Only perform the following when being called from PRE
3873 which embeds tail merging. */
3874 && default_vn_walk_kind == VN_WALK)))
3875 changed = visit_reference_op_call (lhs, call_stmt);
3876 else
3877 changed = defs_to_varying (call_stmt);
3879 else
3880 changed = defs_to_varying (stmt);
3881 done:
3882 return changed;
3885 /* Compare two operands by reverse postorder index */
3887 static int
3888 compare_ops (const void *pa, const void *pb)
3890 const tree opa = *((const tree *)pa);
3891 const tree opb = *((const tree *)pb);
3892 gimple *opstmta = SSA_NAME_DEF_STMT (opa);
3893 gimple *opstmtb = SSA_NAME_DEF_STMT (opb);
3894 basic_block bba;
3895 basic_block bbb;
3897 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3898 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3899 else if (gimple_nop_p (opstmta))
3900 return -1;
3901 else if (gimple_nop_p (opstmtb))
3902 return 1;
3904 bba = gimple_bb (opstmta);
3905 bbb = gimple_bb (opstmtb);
3907 if (!bba && !bbb)
3908 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3909 else if (!bba)
3910 return -1;
3911 else if (!bbb)
3912 return 1;
3914 if (bba == bbb)
3916 if (gimple_code (opstmta) == GIMPLE_PHI
3917 && gimple_code (opstmtb) == GIMPLE_PHI)
3918 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3919 else if (gimple_code (opstmta) == GIMPLE_PHI)
3920 return -1;
3921 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3922 return 1;
3923 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3924 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3925 else
3926 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3928 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3931 /* Sort an array containing members of a strongly connected component
3932 SCC so that the members are ordered by RPO number.
3933 This means that when the sort is complete, iterating through the
3934 array will give you the members in RPO order. */
3936 static void
3937 sort_scc (vec<tree> scc)
3939 scc.qsort (compare_ops);
3942 /* Insert the no longer used nary ONARY to the hash INFO. */
3944 static void
3945 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3947 size_t size = sizeof_vn_nary_op (onary->length);
3948 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3949 &info->nary_obstack);
3950 memcpy (nary, onary, size);
3951 vn_nary_op_insert_into (nary, info->nary, false);
3954 /* Insert the no longer used phi OPHI to the hash INFO. */
3956 static void
3957 copy_phi (vn_phi_t ophi, vn_tables_t info)
3959 vn_phi_t phi = info->phis_pool->allocate ();
3960 vn_phi_s **slot;
3961 memcpy (phi, ophi, sizeof (*phi));
3962 ophi->phiargs.create (0);
3963 slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
3964 gcc_assert (!*slot);
3965 *slot = phi;
3968 /* Insert the no longer used reference OREF to the hash INFO. */
3970 static void
3971 copy_reference (vn_reference_t oref, vn_tables_t info)
3973 vn_reference_t ref;
3974 vn_reference_s **slot;
3975 ref = info->references_pool->allocate ();
3976 memcpy (ref, oref, sizeof (*ref));
3977 oref->operands.create (0);
3978 slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
3979 if (*slot)
3980 free_reference (*slot);
3981 *slot = ref;
3984 /* Process a strongly connected component in the SSA graph. */
3986 static void
3987 process_scc (vec<tree> scc)
3989 tree var;
3990 unsigned int i;
3991 unsigned int iterations = 0;
3992 bool changed = true;
3993 vn_nary_op_iterator_type hin;
3994 vn_phi_iterator_type hip;
3995 vn_reference_iterator_type hir;
3996 vn_nary_op_t nary;
3997 vn_phi_t phi;
3998 vn_reference_t ref;
4000 /* If the SCC has a single member, just visit it. */
4001 if (scc.length () == 1)
4003 tree use = scc[0];
4004 if (VN_INFO (use)->use_processed)
4005 return;
4006 /* We need to make sure it doesn't form a cycle itself, which can
4007 happen for self-referential PHI nodes. In that case we would
4008 end up inserting an expression with VN_TOP operands into the
4009 valid table which makes us derive bogus equivalences later.
4010 The cheapest way to check this is to assume it for all PHI nodes. */
4011 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
4012 /* Fallthru to iteration. */ ;
4013 else
4015 visit_use (use);
4016 return;
4020 if (dump_file && (dump_flags & TDF_DETAILS))
4021 print_scc (dump_file, scc);
4023 /* Iterate over the SCC with the optimistic table until it stops
4024 changing. */
4025 current_info = optimistic_info;
4026 while (changed)
4028 changed = false;
4029 iterations++;
4030 if (dump_file && (dump_flags & TDF_DETAILS))
4031 fprintf (dump_file, "Starting iteration %d\n", iterations);
4032 /* As we are value-numbering optimistically we have to
4033 clear the expression tables and the simplified expressions
4034 in each iteration until we converge. */
4035 optimistic_info->nary->empty ();
4036 optimistic_info->phis->empty ();
4037 optimistic_info->references->empty ();
4038 obstack_free (&optimistic_info->nary_obstack, NULL);
4039 gcc_obstack_init (&optimistic_info->nary_obstack);
4040 optimistic_info->phis_pool->release ();
4041 optimistic_info->references_pool->release ();
4042 FOR_EACH_VEC_ELT (scc, i, var)
4043 gcc_assert (!VN_INFO (var)->needs_insertion
4044 && VN_INFO (var)->expr == NULL);
4045 FOR_EACH_VEC_ELT (scc, i, var)
4046 changed |= visit_use (var);
4049 if (dump_file && (dump_flags & TDF_DETAILS))
4050 fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
4051 statistics_histogram_event (cfun, "SCC iterations", iterations);
4053 /* Finally, copy the contents of the no longer used optimistic
4054 table to the valid table. */
4055 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
4056 copy_nary (nary, valid_info);
4057 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
4058 copy_phi (phi, valid_info);
4059 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
4060 ref, vn_reference_t, hir)
4061 copy_reference (ref, valid_info);
4063 current_info = valid_info;
4067 /* Pop the components of the found SCC for NAME off the SCC stack
4068 and process them. Returns true if all went well, false if
4069 we run into resource limits. */
4071 static bool
4072 extract_and_process_scc_for_name (tree name)
4074 auto_vec<tree> scc;
4075 tree x;
4077 /* Found an SCC, pop the components off the SCC stack and
4078 process them. */
4081 x = sccstack.pop ();
4083 VN_INFO (x)->on_sccstack = false;
4084 scc.safe_push (x);
4085 } while (x != name);
4087 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
4088 if (scc.length ()
4089 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
4091 if (dump_file)
4092 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
4093 "SCC size %u exceeding %u\n", scc.length (),
4094 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
4096 return false;
4099 if (scc.length () > 1)
4100 sort_scc (scc);
4102 process_scc (scc);
4104 return true;
4107 /* Depth first search on NAME to discover and process SCC's in the SSA
4108 graph.
4109 Execution of this algorithm relies on the fact that the SCC's are
4110 popped off the stack in topological order.
4111 Returns true if successful, false if we stopped processing SCC's due
4112 to resource constraints. */
4114 static bool
4115 DFS (tree name)
4117 vec<ssa_op_iter> itervec = vNULL;
4118 vec<tree> namevec = vNULL;
4119 use_operand_p usep = NULL;
4120 gimple *defstmt;
4121 tree use;
4122 ssa_op_iter iter;
4124 start_over:
4125 /* SCC info */
4126 VN_INFO (name)->dfsnum = next_dfs_num++;
4127 VN_INFO (name)->visited = true;
4128 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
4130 sccstack.safe_push (name);
4131 VN_INFO (name)->on_sccstack = true;
4132 defstmt = SSA_NAME_DEF_STMT (name);
4134 /* Recursively DFS on our operands, looking for SCC's. */
4135 if (!gimple_nop_p (defstmt))
4137 /* Push a new iterator. */
4138 if (gphi *phi = dyn_cast <gphi *> (defstmt))
4139 usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
4140 else
4141 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
4143 else
4144 clear_and_done_ssa_iter (&iter);
4146 while (1)
4148 /* If we are done processing uses of a name, go up the stack
4149 of iterators and process SCCs as we found them. */
4150 if (op_iter_done (&iter))
4152 /* See if we found an SCC. */
4153 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
4154 if (!extract_and_process_scc_for_name (name))
4156 namevec.release ();
4157 itervec.release ();
4158 return false;
4161 /* Check if we are done. */
4162 if (namevec.is_empty ())
4164 namevec.release ();
4165 itervec.release ();
4166 return true;
4169 /* Restore the last use walker and continue walking there. */
4170 use = name;
4171 name = namevec.pop ();
4172 memcpy (&iter, &itervec.last (),
4173 sizeof (ssa_op_iter));
4174 itervec.pop ();
4175 goto continue_walking;
4178 use = USE_FROM_PTR (usep);
4180 /* Since we handle phi nodes, we will sometimes get
4181 invariants in the use expression. */
4182 if (TREE_CODE (use) == SSA_NAME)
4184 if (! (VN_INFO (use)->visited))
4186 /* Recurse by pushing the current use walking state on
4187 the stack and starting over. */
4188 itervec.safe_push (iter);
4189 namevec.safe_push (name);
4190 name = use;
4191 goto start_over;
4193 continue_walking:
4194 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
4195 VN_INFO (use)->low);
4197 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
4198 && VN_INFO (use)->on_sccstack)
4200 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
4201 VN_INFO (name)->low);
4205 usep = op_iter_next_use (&iter);
4209 /* Allocate a value number table. */
4211 static void
4212 allocate_vn_table (vn_tables_t table)
4214 table->phis = new vn_phi_table_type (23);
4215 table->nary = new vn_nary_op_table_type (23);
4216 table->references = new vn_reference_table_type (23);
4218 gcc_obstack_init (&table->nary_obstack);
4219 table->phis_pool = new object_allocator<vn_phi_s> ("VN phis");
4220 table->references_pool = new object_allocator<vn_reference_s>
4221 ("VN references");
4224 /* Free a value number table. */
4226 static void
4227 free_vn_table (vn_tables_t table)
4229 delete table->phis;
4230 table->phis = NULL;
4231 delete table->nary;
4232 table->nary = NULL;
4233 delete table->references;
4234 table->references = NULL;
4235 obstack_free (&table->nary_obstack, NULL);
4236 delete table->phis_pool;
4237 delete table->references_pool;
4240 static void
4241 init_scc_vn (void)
4243 size_t i;
4244 int j;
4245 int *rpo_numbers_temp;
4247 calculate_dominance_info (CDI_DOMINATORS);
4248 mark_dfs_back_edges ();
4250 sccstack.create (0);
4251 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
4253 constant_value_ids = BITMAP_ALLOC (NULL);
4255 next_dfs_num = 1;
4256 next_value_id = 1;
4258 vn_ssa_aux_table.create (num_ssa_names + 1);
4259 /* VEC_alloc doesn't actually grow it to the right size, it just
4260 preallocates the space to do so. */
4261 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
4262 gcc_obstack_init (&vn_ssa_aux_obstack);
4264 shared_lookup_phiargs.create (0);
4265 shared_lookup_references.create (0);
4266 rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
4267 rpo_numbers_temp =
4268 XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
4269 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4271 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4272 the i'th block in RPO order is bb. We want to map bb's to RPO
4273 numbers, so we need to rearrange this array. */
4274 for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
4275 rpo_numbers[rpo_numbers_temp[j]] = j;
4277 XDELETE (rpo_numbers_temp);
4279 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
4281 renumber_gimple_stmt_uids ();
4283 /* Create the valid and optimistic value numbering tables. */
4284 valid_info = XCNEW (struct vn_tables_s);
4285 allocate_vn_table (valid_info);
4286 optimistic_info = XCNEW (struct vn_tables_s);
4287 allocate_vn_table (optimistic_info);
4288 current_info = valid_info;
4290 /* Create the VN_INFO structures, and initialize value numbers to
4291 TOP or VARYING for parameters. */
4292 for (i = 1; i < num_ssa_names; i++)
4294 tree name = ssa_name (i);
4295 if (!name)
4296 continue;
4298 VN_INFO_GET (name)->valnum = VN_TOP;
4299 VN_INFO (name)->needs_insertion = false;
4300 VN_INFO (name)->expr = NULL;
4301 VN_INFO (name)->value_id = 0;
4303 if (!SSA_NAME_IS_DEFAULT_DEF (name))
4304 continue;
4306 switch (TREE_CODE (SSA_NAME_VAR (name)))
4308 case VAR_DECL:
4309 /* Undefined vars keep TOP. */
4310 break;
4312 case PARM_DECL:
4313 /* Parameters are VARYING but we can record a condition
4314 if we know it is a non-NULL pointer. */
4315 VN_INFO (name)->visited = true;
4316 VN_INFO (name)->valnum = name;
4317 if (POINTER_TYPE_P (TREE_TYPE (name))
4318 && nonnull_arg_p (SSA_NAME_VAR (name)))
4320 tree ops[2];
4321 ops[0] = name;
4322 ops[1] = build_int_cst (TREE_TYPE (name), 0);
4323 vn_nary_op_insert_pieces (2, NE_EXPR, boolean_type_node, ops,
4324 boolean_true_node, 0);
4325 if (dump_file && (dump_flags & TDF_DETAILS))
4327 fprintf (dump_file, "Recording ");
4328 print_generic_expr (dump_file, name, TDF_SLIM);
4329 fprintf (dump_file, " != 0\n");
4332 break;
4334 case RESULT_DECL:
4335 /* If the result is passed by invisible reference the default
4336 def is initialized, otherwise it's uninitialized. */
4337 if (DECL_BY_REFERENCE (SSA_NAME_VAR (name)))
4339 VN_INFO (name)->visited = true;
4340 VN_INFO (name)->valnum = name;
4342 break;
4344 default:
4345 gcc_unreachable ();
4350 /* Restore SSA info that has been reset on value leaders. */
4352 void
4353 scc_vn_restore_ssa_info (void)
4355 for (unsigned i = 0; i < num_ssa_names; i++)
4357 tree name = ssa_name (i);
4358 if (name
4359 && has_VN_INFO (name))
4361 if (VN_INFO (name)->needs_insertion)
4363 else if (POINTER_TYPE_P (TREE_TYPE (name))
4364 && VN_INFO (name)->info.ptr_info)
4365 SSA_NAME_PTR_INFO (name) = VN_INFO (name)->info.ptr_info;
4366 else if (INTEGRAL_TYPE_P (TREE_TYPE (name))
4367 && VN_INFO (name)->info.range_info)
4369 SSA_NAME_RANGE_INFO (name) = VN_INFO (name)->info.range_info;
4370 SSA_NAME_ANTI_RANGE_P (name)
4371 = VN_INFO (name)->range_info_anti_range_p;
4377 void
4378 free_scc_vn (void)
4380 size_t i;
4382 delete constant_to_value_id;
4383 constant_to_value_id = NULL;
4384 BITMAP_FREE (constant_value_ids);
4385 shared_lookup_phiargs.release ();
4386 shared_lookup_references.release ();
4387 XDELETEVEC (rpo_numbers);
4389 for (i = 0; i < num_ssa_names; i++)
4391 tree name = ssa_name (i);
4392 if (name
4393 && has_VN_INFO (name)
4394 && VN_INFO (name)->needs_insertion)
4395 release_ssa_name (name);
4397 obstack_free (&vn_ssa_aux_obstack, NULL);
4398 vn_ssa_aux_table.release ();
4400 sccstack.release ();
4401 free_vn_table (valid_info);
4402 XDELETE (valid_info);
4403 free_vn_table (optimistic_info);
4404 XDELETE (optimistic_info);
4406 BITMAP_FREE (const_parms);
4409 /* Set *ID according to RESULT. */
4411 static void
4412 set_value_id_for_result (tree result, unsigned int *id)
4414 if (result && TREE_CODE (result) == SSA_NAME)
4415 *id = VN_INFO (result)->value_id;
4416 else if (result && is_gimple_min_invariant (result))
4417 *id = get_or_alloc_constant_value_id (result);
4418 else
4419 *id = get_next_value_id ();
4422 /* Set the value ids in the valid hash tables. */
4424 static void
4425 set_hashtable_value_ids (void)
4427 vn_nary_op_iterator_type hin;
4428 vn_phi_iterator_type hip;
4429 vn_reference_iterator_type hir;
4430 vn_nary_op_t vno;
4431 vn_reference_t vr;
4432 vn_phi_t vp;
4434 /* Now set the value ids of the things we had put in the hash
4435 table. */
4437 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4438 set_value_id_for_result (vno->result, &vno->value_id);
4440 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4441 set_value_id_for_result (vp->result, &vp->value_id);
4443 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4444 hir)
4445 set_value_id_for_result (vr->result, &vr->value_id);
4448 class sccvn_dom_walker : public dom_walker
4450 public:
4451 sccvn_dom_walker ()
4452 : dom_walker (CDI_DOMINATORS, true), fail (false), cond_stack (vNULL) {}
4453 ~sccvn_dom_walker ();
4455 virtual edge before_dom_children (basic_block);
4456 virtual void after_dom_children (basic_block);
4458 void record_cond (basic_block,
4459 enum tree_code code, tree lhs, tree rhs, bool value);
4460 void record_conds (basic_block,
4461 enum tree_code code, tree lhs, tree rhs, bool value);
4463 bool fail;
4464 vec<std::pair <basic_block, std::pair <vn_nary_op_t, vn_nary_op_t> > >
4465 cond_stack;
4468 sccvn_dom_walker::~sccvn_dom_walker ()
4470 cond_stack.release ();
4473 /* Record a temporary condition for the BB and its dominated blocks. */
4475 void
4476 sccvn_dom_walker::record_cond (basic_block bb,
4477 enum tree_code code, tree lhs, tree rhs,
4478 bool value)
4480 tree ops[2] = { lhs, rhs };
4481 vn_nary_op_t old = NULL;
4482 if (vn_nary_op_lookup_pieces (2, code, boolean_type_node, ops, &old))
4483 current_info->nary->remove_elt_with_hash (old, old->hashcode);
4484 vn_nary_op_t cond
4485 = vn_nary_op_insert_pieces (2, code, boolean_type_node, ops,
4486 value
4487 ? boolean_true_node
4488 : boolean_false_node, 0);
4489 if (dump_file && (dump_flags & TDF_DETAILS))
4491 fprintf (dump_file, "Recording temporarily ");
4492 print_generic_expr (dump_file, ops[0], TDF_SLIM);
4493 fprintf (dump_file, " %s ", get_tree_code_name (code));
4494 print_generic_expr (dump_file, ops[1], TDF_SLIM);
4495 fprintf (dump_file, " == %s%s\n",
4496 value ? "true" : "false",
4497 old ? " (old entry saved)" : "");
4499 cond_stack.safe_push (std::make_pair (bb, std::make_pair (cond, old)));
4502 /* Record temporary conditions for the BB and its dominated blocks
4503 according to LHS CODE RHS == VALUE and its dominated conditions. */
4505 void
4506 sccvn_dom_walker::record_conds (basic_block bb,
4507 enum tree_code code, tree lhs, tree rhs,
4508 bool value)
4510 /* Record the original condition. */
4511 record_cond (bb, code, lhs, rhs, value);
4513 if (!value)
4514 return;
4516 /* Record dominated conditions if the condition is true. Note that
4517 the inversion is already recorded. */
4518 switch (code)
4520 case LT_EXPR:
4521 case GT_EXPR:
4522 record_cond (bb, code == LT_EXPR ? LE_EXPR : GE_EXPR, lhs, rhs, true);
4523 record_cond (bb, NE_EXPR, lhs, rhs, true);
4524 record_cond (bb, EQ_EXPR, lhs, rhs, false);
4525 break;
4527 case EQ_EXPR:
4528 record_cond (bb, LE_EXPR, lhs, rhs, true);
4529 record_cond (bb, GE_EXPR, lhs, rhs, true);
4530 record_cond (bb, LT_EXPR, lhs, rhs, false);
4531 record_cond (bb, GT_EXPR, lhs, rhs, false);
4532 break;
4534 default:
4535 break;
4539 /* Restore expressions and values derived from conditionals. */
4541 void
4542 sccvn_dom_walker::after_dom_children (basic_block bb)
4544 while (!cond_stack.is_empty ()
4545 && cond_stack.last ().first == bb)
4547 vn_nary_op_t cond = cond_stack.last ().second.first;
4548 vn_nary_op_t old = cond_stack.last ().second.second;
4549 current_info->nary->remove_elt_with_hash (cond, cond->hashcode);
4550 if (old)
4551 vn_nary_op_insert_into (old, current_info->nary, false);
4552 cond_stack.pop ();
4556 /* Value number all statements in BB. */
4558 edge
4559 sccvn_dom_walker::before_dom_children (basic_block bb)
4561 edge e;
4562 edge_iterator ei;
4564 if (fail)
4565 return NULL;
4567 if (dump_file && (dump_flags & TDF_DETAILS))
4568 fprintf (dump_file, "Visiting BB %d\n", bb->index);
4570 /* If we have a single predecessor record the equivalence from a
4571 possible condition on the predecessor edge. */
4572 edge pred_e = NULL;
4573 FOR_EACH_EDGE (e, ei, bb->preds)
4575 /* Ignore simple backedges from this to allow recording conditions
4576 in loop headers. */
4577 if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
4578 continue;
4579 if (! pred_e)
4580 pred_e = e;
4581 else
4583 pred_e = NULL;
4584 break;
4587 if (pred_e)
4589 /* Check if there are multiple executable successor edges in
4590 the source block. Otherwise there is no additional info
4591 to be recorded. */
4592 edge e2;
4593 FOR_EACH_EDGE (e2, ei, pred_e->src->succs)
4594 if (e2 != pred_e
4595 && e2->flags & EDGE_EXECUTABLE)
4596 break;
4597 if (e2 && (e2->flags & EDGE_EXECUTABLE))
4599 gimple *stmt = last_stmt (pred_e->src);
4600 if (stmt
4601 && gimple_code (stmt) == GIMPLE_COND)
4603 enum tree_code code = gimple_cond_code (stmt);
4604 tree lhs = gimple_cond_lhs (stmt);
4605 tree rhs = gimple_cond_rhs (stmt);
4606 record_conds (bb, code, lhs, rhs,
4607 (pred_e->flags & EDGE_TRUE_VALUE) != 0);
4608 code = invert_tree_comparison (code, HONOR_NANS (lhs));
4609 if (code != ERROR_MARK)
4610 record_conds (bb, code, lhs, rhs,
4611 (pred_e->flags & EDGE_TRUE_VALUE) == 0);
4616 /* Value-number all defs in the basic-block. */
4617 for (gphi_iterator gsi = gsi_start_phis (bb);
4618 !gsi_end_p (gsi); gsi_next (&gsi))
4620 gphi *phi = gsi.phi ();
4621 tree res = PHI_RESULT (phi);
4622 if (!VN_INFO (res)->visited
4623 && !DFS (res))
4625 fail = true;
4626 return NULL;
4629 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
4630 !gsi_end_p (gsi); gsi_next (&gsi))
4632 ssa_op_iter i;
4633 tree op;
4634 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
4635 if (!VN_INFO (op)->visited
4636 && !DFS (op))
4638 fail = true;
4639 return NULL;
4643 /* Finally look at the last stmt. */
4644 gimple *stmt = last_stmt (bb);
4645 if (!stmt)
4646 return NULL;
4648 enum gimple_code code = gimple_code (stmt);
4649 if (code != GIMPLE_COND
4650 && code != GIMPLE_SWITCH
4651 && code != GIMPLE_GOTO)
4652 return NULL;
4654 if (dump_file && (dump_flags & TDF_DETAILS))
4656 fprintf (dump_file, "Visiting control stmt ending BB %d: ", bb->index);
4657 print_gimple_stmt (dump_file, stmt, 0, 0);
4660 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
4661 if value-numbering can prove they are not reachable. Handling
4662 computed gotos is also possible. */
4663 tree val;
4664 switch (code)
4666 case GIMPLE_COND:
4668 tree lhs = vn_valueize (gimple_cond_lhs (stmt));
4669 tree rhs = vn_valueize (gimple_cond_rhs (stmt));
4670 val = gimple_simplify (gimple_cond_code (stmt),
4671 boolean_type_node, lhs, rhs,
4672 NULL, vn_valueize);
4673 /* If that didn't simplify to a constant see if we have recorded
4674 temporary expressions from taken edges. */
4675 if (!val || TREE_CODE (val) != INTEGER_CST)
4677 tree ops[2];
4678 ops[0] = lhs;
4679 ops[1] = rhs;
4680 val = vn_nary_op_lookup_pieces (2, gimple_cond_code (stmt),
4681 boolean_type_node, ops, NULL);
4683 break;
4685 case GIMPLE_SWITCH:
4686 val = gimple_switch_index (as_a <gswitch *> (stmt));
4687 break;
4688 case GIMPLE_GOTO:
4689 val = gimple_goto_dest (stmt);
4690 break;
4691 default:
4692 gcc_unreachable ();
4694 if (!val)
4695 return NULL;
4697 edge taken = find_taken_edge (bb, vn_valueize (val));
4698 if (!taken)
4699 return NULL;
4701 if (dump_file && (dump_flags & TDF_DETAILS))
4702 fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
4703 "not executable\n", bb->index, bb->index, taken->dest->index);
4705 return taken;
4708 /* Do SCCVN. Returns true if it finished, false if we bailed out
4709 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
4710 how we use the alias oracle walking during the VN process. */
4712 bool
4713 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
4715 size_t i;
4717 default_vn_walk_kind = default_vn_walk_kind_;
4719 init_scc_vn ();
4721 /* Collect pointers we know point to readonly memory. */
4722 const_parms = BITMAP_ALLOC (NULL);
4723 tree fnspec = lookup_attribute ("fn spec",
4724 TYPE_ATTRIBUTES (TREE_TYPE (cfun->decl)));
4725 if (fnspec)
4727 fnspec = TREE_VALUE (TREE_VALUE (fnspec));
4728 i = 1;
4729 for (tree arg = DECL_ARGUMENTS (cfun->decl);
4730 arg; arg = DECL_CHAIN (arg), ++i)
4732 if (i >= (unsigned) TREE_STRING_LENGTH (fnspec))
4733 break;
4734 if (TREE_STRING_POINTER (fnspec)[i] == 'R'
4735 || TREE_STRING_POINTER (fnspec)[i] == 'r')
4737 tree name = ssa_default_def (cfun, arg);
4738 if (name)
4739 bitmap_set_bit (const_parms, SSA_NAME_VERSION (name));
4744 /* Walk all blocks in dominator order, value-numbering stmts
4745 SSA defs and decide whether outgoing edges are not executable. */
4746 sccvn_dom_walker walker;
4747 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
4748 if (walker.fail)
4750 free_scc_vn ();
4751 return false;
4754 /* Initialize the value ids and prune out remaining VN_TOPs
4755 from dead code. */
4756 for (i = 1; i < num_ssa_names; ++i)
4758 tree name = ssa_name (i);
4759 vn_ssa_aux_t info;
4760 if (!name)
4761 continue;
4762 info = VN_INFO (name);
4763 if (!info->visited)
4764 info->valnum = name;
4765 if (info->valnum == name
4766 || info->valnum == VN_TOP)
4767 info->value_id = get_next_value_id ();
4768 else if (is_gimple_min_invariant (info->valnum))
4769 info->value_id = get_or_alloc_constant_value_id (info->valnum);
4772 /* Propagate. */
4773 for (i = 1; i < num_ssa_names; ++i)
4775 tree name = ssa_name (i);
4776 vn_ssa_aux_t info;
4777 if (!name)
4778 continue;
4779 info = VN_INFO (name);
4780 if (TREE_CODE (info->valnum) == SSA_NAME
4781 && info->valnum != name
4782 && info->value_id != VN_INFO (info->valnum)->value_id)
4783 info->value_id = VN_INFO (info->valnum)->value_id;
4786 set_hashtable_value_ids ();
4788 if (dump_file && (dump_flags & TDF_DETAILS))
4790 fprintf (dump_file, "Value numbers:\n");
4791 for (i = 0; i < num_ssa_names; i++)
4793 tree name = ssa_name (i);
4794 if (name
4795 && VN_INFO (name)->visited
4796 && SSA_VAL (name) != name)
4798 print_generic_expr (dump_file, name, 0);
4799 fprintf (dump_file, " = ");
4800 print_generic_expr (dump_file, SSA_VAL (name), 0);
4801 fprintf (dump_file, "\n");
4806 return true;
4809 /* Return the maximum value id we have ever seen. */
4811 unsigned int
4812 get_max_value_id (void)
4814 return next_value_id;
4817 /* Return the next unique value id. */
4819 unsigned int
4820 get_next_value_id (void)
4822 return next_value_id++;
4826 /* Compare two expressions E1 and E2 and return true if they are equal. */
4828 bool
4829 expressions_equal_p (tree e1, tree e2)
4831 /* The obvious case. */
4832 if (e1 == e2)
4833 return true;
4835 /* If either one is VN_TOP consider them equal. */
4836 if (e1 == VN_TOP || e2 == VN_TOP)
4837 return true;
4839 /* If only one of them is null, they cannot be equal. */
4840 if (!e1 || !e2)
4841 return false;
4843 /* Now perform the actual comparison. */
4844 if (TREE_CODE (e1) == TREE_CODE (e2)
4845 && operand_equal_p (e1, e2, OEP_PURE_SAME))
4846 return true;
4848 return false;
4852 /* Return true if the nary operation NARY may trap. This is a copy
4853 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4855 bool
4856 vn_nary_may_trap (vn_nary_op_t nary)
4858 tree type;
4859 tree rhs2 = NULL_TREE;
4860 bool honor_nans = false;
4861 bool honor_snans = false;
4862 bool fp_operation = false;
4863 bool honor_trapv = false;
4864 bool handled, ret;
4865 unsigned i;
4867 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4868 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4869 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4871 type = nary->type;
4872 fp_operation = FLOAT_TYPE_P (type);
4873 if (fp_operation)
4875 honor_nans = flag_trapping_math && !flag_finite_math_only;
4876 honor_snans = flag_signaling_nans != 0;
4878 else if (INTEGRAL_TYPE_P (type)
4879 && TYPE_OVERFLOW_TRAPS (type))
4880 honor_trapv = true;
4882 if (nary->length >= 2)
4883 rhs2 = nary->op[1];
4884 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4885 honor_trapv,
4886 honor_nans, honor_snans, rhs2,
4887 &handled);
4888 if (handled
4889 && ret)
4890 return true;
4892 for (i = 0; i < nary->length; ++i)
4893 if (tree_could_trap_p (nary->op[i]))
4894 return true;
4896 return false;