Update ChangeLog and version files for release
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blob038d21f28fe710e1e0f8a90c2322523863ac6fd7
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2016 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "insn-config.h"
32 #include "emit-rtl.h"
33 #include "cgraph.h"
34 #include "gimple-pretty-print.h"
35 #include "alias.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "cfganal.h"
39 #include "tree-inline.h"
40 #include "internal-fn.h"
41 #include "gimple-fold.h"
42 #include "tree-eh.h"
43 #include "gimplify.h"
44 #include "flags.h"
45 #include "dojump.h"
46 #include "explow.h"
47 #include "calls.h"
48 #include "varasm.h"
49 #include "stmt.h"
50 #include "expr.h"
51 #include "tree-dfa.h"
52 #include "tree-ssa.h"
53 #include "dumpfile.h"
54 #include "cfgloop.h"
55 #include "params.h"
56 #include "tree-ssa-propagate.h"
57 #include "tree-ssa-sccvn.h"
58 #include "tree-cfg.h"
59 #include "domwalk.h"
60 #include "gimple-iterator.h"
61 #include "gimple-match.h"
63 /* This algorithm is based on the SCC algorithm presented by Keith
64 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
65 (http://citeseer.ist.psu.edu/41805.html). In
66 straight line code, it is equivalent to a regular hash based value
67 numbering that is performed in reverse postorder.
69 For code with cycles, there are two alternatives, both of which
70 require keeping the hashtables separate from the actual list of
71 value numbers for SSA names.
73 1. Iterate value numbering in an RPO walk of the blocks, removing
74 all the entries from the hashtable after each iteration (but
75 keeping the SSA name->value number mapping between iterations).
76 Iterate until it does not change.
78 2. Perform value numbering as part of an SCC walk on the SSA graph,
79 iterating only the cycles in the SSA graph until they do not change
80 (using a separate, optimistic hashtable for value numbering the SCC
81 operands).
83 The second is not just faster in practice (because most SSA graph
84 cycles do not involve all the variables in the graph), it also has
85 some nice properties.
87 One of these nice properties is that when we pop an SCC off the
88 stack, we are guaranteed to have processed all the operands coming from
89 *outside of that SCC*, so we do not need to do anything special to
90 ensure they have value numbers.
92 Another nice property is that the SCC walk is done as part of a DFS
93 of the SSA graph, which makes it easy to perform combining and
94 simplifying operations at the same time.
96 The code below is deliberately written in a way that makes it easy
97 to separate the SCC walk from the other work it does.
99 In order to propagate constants through the code, we track which
100 expressions contain constants, and use those while folding. In
101 theory, we could also track expressions whose value numbers are
102 replaced, in case we end up folding based on expression
103 identities.
105 In order to value number memory, we assign value numbers to vuses.
106 This enables us to note that, for example, stores to the same
107 address of the same value from the same starting memory states are
108 equivalent.
109 TODO:
111 1. We can iterate only the changing portions of the SCC's, but
112 I have not seen an SCC big enough for this to be a win.
113 2. If you differentiate between phi nodes for loops and phi nodes
114 for if-then-else, you can properly consider phi nodes in different
115 blocks for equivalence.
116 3. We could value number vuses in more cases, particularly, whole
117 structure copies.
121 static tree *last_vuse_ptr;
122 static vn_lookup_kind vn_walk_kind;
123 static vn_lookup_kind default_vn_walk_kind;
124 bitmap const_parms;
126 /* vn_nary_op hashtable helpers. */
128 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
130 typedef vn_nary_op_s *compare_type;
131 static inline hashval_t hash (const vn_nary_op_s *);
132 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
135 /* Return the computed hashcode for nary operation P1. */
137 inline hashval_t
138 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
140 return vno1->hashcode;
143 /* Compare nary operations P1 and P2 and return true if they are
144 equivalent. */
146 inline bool
147 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
149 return vn_nary_op_eq (vno1, vno2);
152 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
153 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
156 /* vn_phi hashtable helpers. */
158 static int
159 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
161 struct vn_phi_hasher : pointer_hash <vn_phi_s>
163 static inline hashval_t hash (const vn_phi_s *);
164 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
165 static inline void remove (vn_phi_s *);
168 /* Return the computed hashcode for phi operation P1. */
170 inline hashval_t
171 vn_phi_hasher::hash (const vn_phi_s *vp1)
173 return vp1->hashcode;
176 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
178 inline bool
179 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
181 return vn_phi_eq (vp1, vp2);
184 /* Free a phi operation structure VP. */
186 inline void
187 vn_phi_hasher::remove (vn_phi_s *phi)
189 phi->phiargs.release ();
192 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
193 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
196 /* Compare two reference operands P1 and P2 for equality. Return true if
197 they are equal, and false otherwise. */
199 static int
200 vn_reference_op_eq (const void *p1, const void *p2)
202 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
203 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
205 return (vro1->opcode == vro2->opcode
206 /* We do not care for differences in type qualification. */
207 && (vro1->type == vro2->type
208 || (vro1->type && vro2->type
209 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
210 TYPE_MAIN_VARIANT (vro2->type))))
211 && expressions_equal_p (vro1->op0, vro2->op0)
212 && expressions_equal_p (vro1->op1, vro2->op1)
213 && expressions_equal_p (vro1->op2, vro2->op2));
216 /* Free a reference operation structure VP. */
218 static inline void
219 free_reference (vn_reference_s *vr)
221 vr->operands.release ();
225 /* vn_reference hashtable helpers. */
227 struct vn_reference_hasher : pointer_hash <vn_reference_s>
229 static inline hashval_t hash (const vn_reference_s *);
230 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
231 static inline void remove (vn_reference_s *);
234 /* Return the hashcode for a given reference operation P1. */
236 inline hashval_t
237 vn_reference_hasher::hash (const vn_reference_s *vr1)
239 return vr1->hashcode;
242 inline bool
243 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
245 return vn_reference_eq (v, c);
248 inline void
249 vn_reference_hasher::remove (vn_reference_s *v)
251 free_reference (v);
254 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
255 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
258 /* The set of hashtables and alloc_pool's for their items. */
260 typedef struct vn_tables_s
262 vn_nary_op_table_type *nary;
263 vn_phi_table_type *phis;
264 vn_reference_table_type *references;
265 struct obstack nary_obstack;
266 object_allocator<vn_phi_s> *phis_pool;
267 object_allocator<vn_reference_s> *references_pool;
268 } *vn_tables_t;
271 /* vn_constant hashtable helpers. */
273 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
275 static inline hashval_t hash (const vn_constant_s *);
276 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
279 /* Hash table hash function for vn_constant_t. */
281 inline hashval_t
282 vn_constant_hasher::hash (const vn_constant_s *vc1)
284 return vc1->hashcode;
287 /* Hash table equality function for vn_constant_t. */
289 inline bool
290 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
292 if (vc1->hashcode != vc2->hashcode)
293 return false;
295 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
298 static hash_table<vn_constant_hasher> *constant_to_value_id;
299 static bitmap constant_value_ids;
302 /* Valid hashtables storing information we have proven to be
303 correct. */
305 static vn_tables_t valid_info;
307 /* Optimistic hashtables storing information we are making assumptions about
308 during iterations. */
310 static vn_tables_t optimistic_info;
312 /* Pointer to the set of hashtables that is currently being used.
313 Should always point to either the optimistic_info, or the
314 valid_info. */
316 static vn_tables_t current_info;
319 /* Reverse post order index for each basic block. */
321 static int *rpo_numbers;
323 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
325 /* Return the SSA value of the VUSE x, supporting released VDEFs
326 during elimination which will value-number the VDEF to the
327 associated VUSE (but not substitute in the whole lattice). */
329 static inline tree
330 vuse_ssa_val (tree x)
332 if (!x)
333 return NULL_TREE;
337 x = SSA_VAL (x);
339 while (SSA_NAME_IN_FREE_LIST (x));
341 return x;
344 /* This represents the top of the VN lattice, which is the universal
345 value. */
347 tree VN_TOP;
349 /* Unique counter for our value ids. */
351 static unsigned int next_value_id;
353 /* Next DFS number and the stack for strongly connected component
354 detection. */
356 static unsigned int next_dfs_num;
357 static vec<tree> sccstack;
361 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
362 are allocated on an obstack for locality reasons, and to free them
363 without looping over the vec. */
365 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
366 static struct obstack vn_ssa_aux_obstack;
368 /* Return whether there is value numbering information for a given SSA name. */
370 bool
371 has_VN_INFO (tree name)
373 if (SSA_NAME_VERSION (name) < vn_ssa_aux_table.length ())
374 return vn_ssa_aux_table[SSA_NAME_VERSION (name)] != NULL;
375 return false;
378 /* Return the value numbering information for a given SSA name. */
380 vn_ssa_aux_t
381 VN_INFO (tree name)
383 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
384 gcc_checking_assert (res);
385 return res;
388 /* Set the value numbering info for a given SSA name to a given
389 value. */
391 static inline void
392 VN_INFO_SET (tree name, vn_ssa_aux_t value)
394 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
397 /* Initialize the value numbering info for a given SSA name.
398 This should be called just once for every SSA name. */
400 vn_ssa_aux_t
401 VN_INFO_GET (tree name)
403 vn_ssa_aux_t newinfo;
405 gcc_assert (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ()
406 || vn_ssa_aux_table[SSA_NAME_VERSION (name)] == NULL);
407 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
408 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
409 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
410 vn_ssa_aux_table.safe_grow_cleared (SSA_NAME_VERSION (name) + 1);
411 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
412 return newinfo;
416 /* Return the vn_kind the expression computed by the stmt should be
417 associated with. */
419 enum vn_kind
420 vn_get_stmt_kind (gimple *stmt)
422 switch (gimple_code (stmt))
424 case GIMPLE_CALL:
425 return VN_REFERENCE;
426 case GIMPLE_PHI:
427 return VN_PHI;
428 case GIMPLE_ASSIGN:
430 enum tree_code code = gimple_assign_rhs_code (stmt);
431 tree rhs1 = gimple_assign_rhs1 (stmt);
432 switch (get_gimple_rhs_class (code))
434 case GIMPLE_UNARY_RHS:
435 case GIMPLE_BINARY_RHS:
436 case GIMPLE_TERNARY_RHS:
437 return VN_NARY;
438 case GIMPLE_SINGLE_RHS:
439 switch (TREE_CODE_CLASS (code))
441 case tcc_reference:
442 /* VOP-less references can go through unary case. */
443 if ((code == REALPART_EXPR
444 || code == IMAGPART_EXPR
445 || code == VIEW_CONVERT_EXPR
446 || code == BIT_FIELD_REF)
447 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
448 return VN_NARY;
450 /* Fallthrough. */
451 case tcc_declaration:
452 return VN_REFERENCE;
454 case tcc_constant:
455 return VN_CONSTANT;
457 default:
458 if (code == ADDR_EXPR)
459 return (is_gimple_min_invariant (rhs1)
460 ? VN_CONSTANT : VN_REFERENCE);
461 else if (code == CONSTRUCTOR)
462 return VN_NARY;
463 return VN_NONE;
465 default:
466 return VN_NONE;
469 default:
470 return VN_NONE;
474 /* Lookup a value id for CONSTANT and return it. If it does not
475 exist returns 0. */
477 unsigned int
478 get_constant_value_id (tree constant)
480 vn_constant_s **slot;
481 struct vn_constant_s vc;
483 vc.hashcode = vn_hash_constant_with_type (constant);
484 vc.constant = constant;
485 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
486 if (slot)
487 return (*slot)->value_id;
488 return 0;
491 /* Lookup a value id for CONSTANT, and if it does not exist, create a
492 new one and return it. If it does exist, return it. */
494 unsigned int
495 get_or_alloc_constant_value_id (tree constant)
497 vn_constant_s **slot;
498 struct vn_constant_s vc;
499 vn_constant_t vcp;
501 vc.hashcode = vn_hash_constant_with_type (constant);
502 vc.constant = constant;
503 slot = constant_to_value_id->find_slot (&vc, INSERT);
504 if (*slot)
505 return (*slot)->value_id;
507 vcp = XNEW (struct vn_constant_s);
508 vcp->hashcode = vc.hashcode;
509 vcp->constant = constant;
510 vcp->value_id = get_next_value_id ();
511 *slot = vcp;
512 bitmap_set_bit (constant_value_ids, vcp->value_id);
513 return vcp->value_id;
516 /* Return true if V is a value id for a constant. */
518 bool
519 value_id_constant_p (unsigned int v)
521 return bitmap_bit_p (constant_value_ids, v);
524 /* Compute the hash for a reference operand VRO1. */
526 static void
527 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
529 hstate.add_int (vro1->opcode);
530 if (vro1->op0)
531 inchash::add_expr (vro1->op0, hstate);
532 if (vro1->op1)
533 inchash::add_expr (vro1->op1, hstate);
534 if (vro1->op2)
535 inchash::add_expr (vro1->op2, hstate);
538 /* Compute a hash for the reference operation VR1 and return it. */
540 static hashval_t
541 vn_reference_compute_hash (const vn_reference_t vr1)
543 inchash::hash hstate;
544 hashval_t result;
545 int i;
546 vn_reference_op_t vro;
547 HOST_WIDE_INT off = -1;
548 bool deref = false;
550 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
552 if (vro->opcode == MEM_REF)
553 deref = true;
554 else if (vro->opcode != ADDR_EXPR)
555 deref = false;
556 if (vro->off != -1)
558 if (off == -1)
559 off = 0;
560 off += vro->off;
562 else
564 if (off != -1
565 && off != 0)
566 hstate.add_int (off);
567 off = -1;
568 if (deref
569 && vro->opcode == ADDR_EXPR)
571 if (vro->op0)
573 tree op = TREE_OPERAND (vro->op0, 0);
574 hstate.add_int (TREE_CODE (op));
575 inchash::add_expr (op, hstate);
578 else
579 vn_reference_op_compute_hash (vro, hstate);
582 result = hstate.end ();
583 /* ??? We would ICE later if we hash instead of adding that in. */
584 if (vr1->vuse)
585 result += SSA_NAME_VERSION (vr1->vuse);
587 return result;
590 /* Return true if reference operations VR1 and VR2 are equivalent. This
591 means they have the same set of operands and vuses. */
593 bool
594 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
596 unsigned i, j;
598 /* Early out if this is not a hash collision. */
599 if (vr1->hashcode != vr2->hashcode)
600 return false;
602 /* The VOP needs to be the same. */
603 if (vr1->vuse != vr2->vuse)
604 return false;
606 /* If the operands are the same we are done. */
607 if (vr1->operands == vr2->operands)
608 return true;
610 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
611 return false;
613 if (INTEGRAL_TYPE_P (vr1->type)
614 && INTEGRAL_TYPE_P (vr2->type))
616 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
617 return false;
619 else if (INTEGRAL_TYPE_P (vr1->type)
620 && (TYPE_PRECISION (vr1->type)
621 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
622 return false;
623 else if (INTEGRAL_TYPE_P (vr2->type)
624 && (TYPE_PRECISION (vr2->type)
625 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
626 return false;
628 i = 0;
629 j = 0;
632 HOST_WIDE_INT off1 = 0, off2 = 0;
633 vn_reference_op_t vro1, vro2;
634 vn_reference_op_s tem1, tem2;
635 bool deref1 = false, deref2 = false;
636 for (; vr1->operands.iterate (i, &vro1); i++)
638 if (vro1->opcode == MEM_REF)
639 deref1 = true;
640 /* Do not look through a storage order barrier. */
641 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
642 return false;
643 if (vro1->off == -1)
644 break;
645 off1 += vro1->off;
647 for (; vr2->operands.iterate (j, &vro2); j++)
649 if (vro2->opcode == MEM_REF)
650 deref2 = true;
651 /* Do not look through a storage order barrier. */
652 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
653 return false;
654 if (vro2->off == -1)
655 break;
656 off2 += vro2->off;
658 if (off1 != off2)
659 return false;
660 if (deref1 && vro1->opcode == ADDR_EXPR)
662 memset (&tem1, 0, sizeof (tem1));
663 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
664 tem1.type = TREE_TYPE (tem1.op0);
665 tem1.opcode = TREE_CODE (tem1.op0);
666 vro1 = &tem1;
667 deref1 = false;
669 if (deref2 && vro2->opcode == ADDR_EXPR)
671 memset (&tem2, 0, sizeof (tem2));
672 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
673 tem2.type = TREE_TYPE (tem2.op0);
674 tem2.opcode = TREE_CODE (tem2.op0);
675 vro2 = &tem2;
676 deref2 = false;
678 if (deref1 != deref2)
679 return false;
680 if (!vn_reference_op_eq (vro1, vro2))
681 return false;
682 ++j;
683 ++i;
685 while (vr1->operands.length () != i
686 || vr2->operands.length () != j);
688 return true;
691 /* Copy the operations present in load/store REF into RESULT, a vector of
692 vn_reference_op_s's. */
694 static void
695 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
697 if (TREE_CODE (ref) == TARGET_MEM_REF)
699 vn_reference_op_s temp;
701 result->reserve (3);
703 memset (&temp, 0, sizeof (temp));
704 temp.type = TREE_TYPE (ref);
705 temp.opcode = TREE_CODE (ref);
706 temp.op0 = TMR_INDEX (ref);
707 temp.op1 = TMR_STEP (ref);
708 temp.op2 = TMR_OFFSET (ref);
709 temp.off = -1;
710 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
711 temp.base = MR_DEPENDENCE_BASE (ref);
712 result->quick_push (temp);
714 memset (&temp, 0, sizeof (temp));
715 temp.type = NULL_TREE;
716 temp.opcode = ERROR_MARK;
717 temp.op0 = TMR_INDEX2 (ref);
718 temp.off = -1;
719 result->quick_push (temp);
721 memset (&temp, 0, sizeof (temp));
722 temp.type = NULL_TREE;
723 temp.opcode = TREE_CODE (TMR_BASE (ref));
724 temp.op0 = TMR_BASE (ref);
725 temp.off = -1;
726 result->quick_push (temp);
727 return;
730 /* For non-calls, store the information that makes up the address. */
731 tree orig = ref;
732 while (ref)
734 vn_reference_op_s temp;
736 memset (&temp, 0, sizeof (temp));
737 temp.type = TREE_TYPE (ref);
738 temp.opcode = TREE_CODE (ref);
739 temp.off = -1;
741 switch (temp.opcode)
743 case MODIFY_EXPR:
744 temp.op0 = TREE_OPERAND (ref, 1);
745 break;
746 case WITH_SIZE_EXPR:
747 temp.op0 = TREE_OPERAND (ref, 1);
748 temp.off = 0;
749 break;
750 case MEM_REF:
751 /* The base address gets its own vn_reference_op_s structure. */
752 temp.op0 = TREE_OPERAND (ref, 1);
754 offset_int off = mem_ref_offset (ref);
755 if (wi::fits_shwi_p (off))
756 temp.off = off.to_shwi ();
758 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
759 temp.base = MR_DEPENDENCE_BASE (ref);
760 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
761 break;
762 case BIT_FIELD_REF:
763 /* Record bits, position and storage order. */
764 temp.op0 = TREE_OPERAND (ref, 1);
765 temp.op1 = TREE_OPERAND (ref, 2);
766 if (tree_fits_shwi_p (TREE_OPERAND (ref, 2)))
768 HOST_WIDE_INT off = tree_to_shwi (TREE_OPERAND (ref, 2));
769 if (off % BITS_PER_UNIT == 0)
770 temp.off = off / BITS_PER_UNIT;
772 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
773 break;
774 case COMPONENT_REF:
775 /* The field decl is enough to unambiguously specify the field,
776 a matching type is not necessary and a mismatching type
777 is always a spurious difference. */
778 temp.type = NULL_TREE;
779 temp.op0 = TREE_OPERAND (ref, 1);
780 temp.op1 = TREE_OPERAND (ref, 2);
782 tree this_offset = component_ref_field_offset (ref);
783 if (this_offset
784 && TREE_CODE (this_offset) == INTEGER_CST)
786 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
787 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
789 offset_int off
790 = (wi::to_offset (this_offset)
791 + wi::lrshift (wi::to_offset (bit_offset),
792 LOG2_BITS_PER_UNIT));
793 if (wi::fits_shwi_p (off)
794 /* Probibit value-numbering zero offset components
795 of addresses the same before the pass folding
796 __builtin_object_size had a chance to run
797 (checking cfun->after_inlining does the
798 trick here). */
799 && (TREE_CODE (orig) != ADDR_EXPR
800 || off != 0
801 || cfun->after_inlining))
802 temp.off = off.to_shwi ();
806 break;
807 case ARRAY_RANGE_REF:
808 case ARRAY_REF:
809 /* Record index as operand. */
810 temp.op0 = TREE_OPERAND (ref, 1);
811 /* Always record lower bounds and element size. */
812 temp.op1 = array_ref_low_bound (ref);
813 temp.op2 = array_ref_element_size (ref);
814 if (TREE_CODE (temp.op0) == INTEGER_CST
815 && TREE_CODE (temp.op1) == INTEGER_CST
816 && TREE_CODE (temp.op2) == INTEGER_CST)
818 offset_int off = ((wi::to_offset (temp.op0)
819 - wi::to_offset (temp.op1))
820 * wi::to_offset (temp.op2));
821 if (wi::fits_shwi_p (off))
822 temp.off = off.to_shwi();
824 break;
825 case VAR_DECL:
826 if (DECL_HARD_REGISTER (ref))
828 temp.op0 = ref;
829 break;
831 /* Fallthru. */
832 case PARM_DECL:
833 case CONST_DECL:
834 case RESULT_DECL:
835 /* Canonicalize decls to MEM[&decl] which is what we end up with
836 when valueizing MEM[ptr] with ptr = &decl. */
837 temp.opcode = MEM_REF;
838 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
839 temp.off = 0;
840 result->safe_push (temp);
841 temp.opcode = ADDR_EXPR;
842 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
843 temp.type = TREE_TYPE (temp.op0);
844 temp.off = -1;
845 break;
846 case STRING_CST:
847 case INTEGER_CST:
848 case COMPLEX_CST:
849 case VECTOR_CST:
850 case REAL_CST:
851 case FIXED_CST:
852 case CONSTRUCTOR:
853 case SSA_NAME:
854 temp.op0 = ref;
855 break;
856 case ADDR_EXPR:
857 if (is_gimple_min_invariant (ref))
859 temp.op0 = ref;
860 break;
862 break;
863 /* These are only interesting for their operands, their
864 existence, and their type. They will never be the last
865 ref in the chain of references (IE they require an
866 operand), so we don't have to put anything
867 for op* as it will be handled by the iteration */
868 case REALPART_EXPR:
869 temp.off = 0;
870 break;
871 case VIEW_CONVERT_EXPR:
872 temp.off = 0;
873 temp.reverse = storage_order_barrier_p (ref);
874 break;
875 case IMAGPART_EXPR:
876 /* This is only interesting for its constant offset. */
877 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
878 break;
879 default:
880 gcc_unreachable ();
882 result->safe_push (temp);
884 if (REFERENCE_CLASS_P (ref)
885 || TREE_CODE (ref) == MODIFY_EXPR
886 || TREE_CODE (ref) == WITH_SIZE_EXPR
887 || (TREE_CODE (ref) == ADDR_EXPR
888 && !is_gimple_min_invariant (ref)))
889 ref = TREE_OPERAND (ref, 0);
890 else
891 ref = NULL_TREE;
895 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
896 operands in *OPS, the reference alias set SET and the reference type TYPE.
897 Return true if something useful was produced. */
899 bool
900 ao_ref_init_from_vn_reference (ao_ref *ref,
901 alias_set_type set, tree type,
902 vec<vn_reference_op_s> ops)
904 vn_reference_op_t op;
905 unsigned i;
906 tree base = NULL_TREE;
907 tree *op0_p = &base;
908 offset_int offset = 0;
909 offset_int max_size;
910 offset_int size = -1;
911 tree size_tree = NULL_TREE;
912 alias_set_type base_alias_set = -1;
914 /* First get the final access size from just the outermost expression. */
915 op = &ops[0];
916 if (op->opcode == COMPONENT_REF)
917 size_tree = DECL_SIZE (op->op0);
918 else if (op->opcode == BIT_FIELD_REF)
919 size_tree = op->op0;
920 else
922 machine_mode mode = TYPE_MODE (type);
923 if (mode == BLKmode)
924 size_tree = TYPE_SIZE (type);
925 else
926 size = int (GET_MODE_BITSIZE (mode));
928 if (size_tree != NULL_TREE
929 && TREE_CODE (size_tree) == INTEGER_CST)
930 size = wi::to_offset (size_tree);
932 /* Initially, maxsize is the same as the accessed element size.
933 In the following it will only grow (or become -1). */
934 max_size = size;
936 /* Compute cumulative bit-offset for nested component-refs and array-refs,
937 and find the ultimate containing object. */
938 FOR_EACH_VEC_ELT (ops, i, op)
940 switch (op->opcode)
942 /* These may be in the reference ops, but we cannot do anything
943 sensible with them here. */
944 case ADDR_EXPR:
945 /* Apart from ADDR_EXPR arguments to MEM_REF. */
946 if (base != NULL_TREE
947 && TREE_CODE (base) == MEM_REF
948 && op->op0
949 && DECL_P (TREE_OPERAND (op->op0, 0)))
951 vn_reference_op_t pop = &ops[i-1];
952 base = TREE_OPERAND (op->op0, 0);
953 if (pop->off == -1)
955 max_size = -1;
956 offset = 0;
958 else
959 offset += pop->off * BITS_PER_UNIT;
960 op0_p = NULL;
961 break;
963 /* Fallthru. */
964 case CALL_EXPR:
965 return false;
967 /* Record the base objects. */
968 case MEM_REF:
969 base_alias_set = get_deref_alias_set (op->op0);
970 *op0_p = build2 (MEM_REF, op->type,
971 NULL_TREE, op->op0);
972 MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
973 MR_DEPENDENCE_BASE (*op0_p) = op->base;
974 op0_p = &TREE_OPERAND (*op0_p, 0);
975 break;
977 case VAR_DECL:
978 case PARM_DECL:
979 case RESULT_DECL:
980 case SSA_NAME:
981 *op0_p = op->op0;
982 op0_p = NULL;
983 break;
985 /* And now the usual component-reference style ops. */
986 case BIT_FIELD_REF:
987 offset += wi::to_offset (op->op1);
988 break;
990 case COMPONENT_REF:
992 tree field = op->op0;
993 /* We do not have a complete COMPONENT_REF tree here so we
994 cannot use component_ref_field_offset. Do the interesting
995 parts manually. */
996 tree this_offset = DECL_FIELD_OFFSET (field);
998 if (op->op1 || TREE_CODE (this_offset) != INTEGER_CST)
999 max_size = -1;
1000 else
1002 offset_int woffset = wi::lshift (wi::to_offset (this_offset),
1003 LOG2_BITS_PER_UNIT);
1004 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1005 offset += woffset;
1007 break;
1010 case ARRAY_RANGE_REF:
1011 case ARRAY_REF:
1012 /* We recorded the lower bound and the element size. */
1013 if (TREE_CODE (op->op0) != INTEGER_CST
1014 || TREE_CODE (op->op1) != INTEGER_CST
1015 || TREE_CODE (op->op2) != INTEGER_CST)
1016 max_size = -1;
1017 else
1019 offset_int woffset
1020 = wi::sext (wi::to_offset (op->op0) - wi::to_offset (op->op1),
1021 TYPE_PRECISION (TREE_TYPE (op->op0)));
1022 woffset *= wi::to_offset (op->op2);
1023 woffset = wi::lshift (woffset, LOG2_BITS_PER_UNIT);
1024 offset += woffset;
1026 break;
1028 case REALPART_EXPR:
1029 break;
1031 case IMAGPART_EXPR:
1032 offset += size;
1033 break;
1035 case VIEW_CONVERT_EXPR:
1036 break;
1038 case STRING_CST:
1039 case INTEGER_CST:
1040 case COMPLEX_CST:
1041 case VECTOR_CST:
1042 case REAL_CST:
1043 case CONSTRUCTOR:
1044 case CONST_DECL:
1045 return false;
1047 default:
1048 return false;
1052 if (base == NULL_TREE)
1053 return false;
1055 ref->ref = NULL_TREE;
1056 ref->base = base;
1057 ref->ref_alias_set = set;
1058 if (base_alias_set != -1)
1059 ref->base_alias_set = base_alias_set;
1060 else
1061 ref->base_alias_set = get_alias_set (base);
1062 /* We discount volatiles from value-numbering elsewhere. */
1063 ref->volatile_p = false;
1065 if (!wi::fits_shwi_p (size) || wi::neg_p (size))
1067 ref->offset = 0;
1068 ref->size = -1;
1069 ref->max_size = -1;
1070 return true;
1073 ref->size = size.to_shwi ();
1075 if (!wi::fits_shwi_p (offset))
1077 ref->offset = 0;
1078 ref->max_size = -1;
1079 return true;
1082 ref->offset = offset.to_shwi ();
1084 if (!wi::fits_shwi_p (max_size) || wi::neg_p (max_size))
1085 ref->max_size = -1;
1086 else
1087 ref->max_size = max_size.to_shwi ();
1089 return true;
1092 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1093 vn_reference_op_s's. */
1095 static void
1096 copy_reference_ops_from_call (gcall *call,
1097 vec<vn_reference_op_s> *result)
1099 vn_reference_op_s temp;
1100 unsigned i;
1101 tree lhs = gimple_call_lhs (call);
1102 int lr;
1104 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1105 different. By adding the lhs here in the vector, we ensure that the
1106 hashcode is different, guaranteeing a different value number. */
1107 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1109 memset (&temp, 0, sizeof (temp));
1110 temp.opcode = MODIFY_EXPR;
1111 temp.type = TREE_TYPE (lhs);
1112 temp.op0 = lhs;
1113 temp.off = -1;
1114 result->safe_push (temp);
1117 /* Copy the type, opcode, function, static chain and EH region, if any. */
1118 memset (&temp, 0, sizeof (temp));
1119 temp.type = gimple_call_return_type (call);
1120 temp.opcode = CALL_EXPR;
1121 temp.op0 = gimple_call_fn (call);
1122 temp.op1 = gimple_call_chain (call);
1123 if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1124 temp.op2 = size_int (lr);
1125 temp.off = -1;
1126 if (gimple_call_with_bounds_p (call))
1127 temp.with_bounds = 1;
1128 result->safe_push (temp);
1130 /* Copy the call arguments. As they can be references as well,
1131 just chain them together. */
1132 for (i = 0; i < gimple_call_num_args (call); ++i)
1134 tree callarg = gimple_call_arg (call, i);
1135 copy_reference_ops_from_ref (callarg, result);
1139 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1140 *I_P to point to the last element of the replacement. */
1141 static bool
1142 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1143 unsigned int *i_p)
1145 unsigned int i = *i_p;
1146 vn_reference_op_t op = &(*ops)[i];
1147 vn_reference_op_t mem_op = &(*ops)[i - 1];
1148 tree addr_base;
1149 HOST_WIDE_INT addr_offset = 0;
1151 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1152 from .foo.bar to the preceding MEM_REF offset and replace the
1153 address with &OBJ. */
1154 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1155 &addr_offset);
1156 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1157 if (addr_base != TREE_OPERAND (op->op0, 0))
1159 offset_int off = offset_int::from (mem_op->op0, SIGNED);
1160 off += addr_offset;
1161 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1162 op->op0 = build_fold_addr_expr (addr_base);
1163 if (tree_fits_shwi_p (mem_op->op0))
1164 mem_op->off = tree_to_shwi (mem_op->op0);
1165 else
1166 mem_op->off = -1;
1167 return true;
1169 return false;
1172 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1173 *I_P to point to the last element of the replacement. */
1174 static bool
1175 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1176 unsigned int *i_p)
1178 unsigned int i = *i_p;
1179 vn_reference_op_t op = &(*ops)[i];
1180 vn_reference_op_t mem_op = &(*ops)[i - 1];
1181 gimple *def_stmt;
1182 enum tree_code code;
1183 offset_int off;
1185 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1186 if (!is_gimple_assign (def_stmt))
1187 return false;
1189 code = gimple_assign_rhs_code (def_stmt);
1190 if (code != ADDR_EXPR
1191 && code != POINTER_PLUS_EXPR)
1192 return false;
1194 off = offset_int::from (mem_op->op0, SIGNED);
1196 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1197 from .foo.bar to the preceding MEM_REF offset and replace the
1198 address with &OBJ. */
1199 if (code == ADDR_EXPR)
1201 tree addr, addr_base;
1202 HOST_WIDE_INT addr_offset;
1204 addr = gimple_assign_rhs1 (def_stmt);
1205 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1206 &addr_offset);
1207 /* If that didn't work because the address isn't invariant propagate
1208 the reference tree from the address operation in case the current
1209 dereference isn't offsetted. */
1210 if (!addr_base
1211 && *i_p == ops->length () - 1
1212 && off == 0
1213 /* This makes us disable this transform for PRE where the
1214 reference ops might be also used for code insertion which
1215 is invalid. */
1216 && default_vn_walk_kind == VN_WALKREWRITE)
1218 auto_vec<vn_reference_op_s, 32> tem;
1219 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1220 /* Make sure to preserve TBAA info. The only objects not
1221 wrapped in MEM_REFs that can have their address taken are
1222 STRING_CSTs. */
1223 if (tem.length () >= 2
1224 && tem[tem.length () - 2].opcode == MEM_REF)
1226 vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1227 new_mem_op->op0 = fold_convert (TREE_TYPE (mem_op->op0),
1228 new_mem_op->op0);
1230 else
1231 gcc_assert (tem.last ().opcode == STRING_CST);
1232 ops->pop ();
1233 ops->pop ();
1234 ops->safe_splice (tem);
1235 --*i_p;
1236 return true;
1238 if (!addr_base
1239 || TREE_CODE (addr_base) != MEM_REF)
1240 return false;
1242 off += addr_offset;
1243 off += mem_ref_offset (addr_base);
1244 op->op0 = TREE_OPERAND (addr_base, 0);
1246 else
1248 tree ptr, ptroff;
1249 ptr = gimple_assign_rhs1 (def_stmt);
1250 ptroff = gimple_assign_rhs2 (def_stmt);
1251 if (TREE_CODE (ptr) != SSA_NAME
1252 || TREE_CODE (ptroff) != INTEGER_CST)
1253 return false;
1255 off += wi::to_offset (ptroff);
1256 op->op0 = ptr;
1259 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1260 if (tree_fits_shwi_p (mem_op->op0))
1261 mem_op->off = tree_to_shwi (mem_op->op0);
1262 else
1263 mem_op->off = -1;
1264 if (TREE_CODE (op->op0) == SSA_NAME)
1265 op->op0 = SSA_VAL (op->op0);
1266 if (TREE_CODE (op->op0) != SSA_NAME)
1267 op->opcode = TREE_CODE (op->op0);
1269 /* And recurse. */
1270 if (TREE_CODE (op->op0) == SSA_NAME)
1271 vn_reference_maybe_forwprop_address (ops, i_p);
1272 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1273 vn_reference_fold_indirect (ops, i_p);
1274 return true;
1277 /* Optimize the reference REF to a constant if possible or return
1278 NULL_TREE if not. */
1280 tree
1281 fully_constant_vn_reference_p (vn_reference_t ref)
1283 vec<vn_reference_op_s> operands = ref->operands;
1284 vn_reference_op_t op;
1286 /* Try to simplify the translated expression if it is
1287 a call to a builtin function with at most two arguments. */
1288 op = &operands[0];
1289 if (op->opcode == CALL_EXPR
1290 && TREE_CODE (op->op0) == ADDR_EXPR
1291 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1292 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1293 && operands.length () >= 2
1294 && operands.length () <= 3)
1296 vn_reference_op_t arg0, arg1 = NULL;
1297 bool anyconst = false;
1298 arg0 = &operands[1];
1299 if (operands.length () > 2)
1300 arg1 = &operands[2];
1301 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1302 || (arg0->opcode == ADDR_EXPR
1303 && is_gimple_min_invariant (arg0->op0)))
1304 anyconst = true;
1305 if (arg1
1306 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1307 || (arg1->opcode == ADDR_EXPR
1308 && is_gimple_min_invariant (arg1->op0))))
1309 anyconst = true;
1310 if (anyconst)
1312 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1313 arg1 ? 2 : 1,
1314 arg0->op0,
1315 arg1 ? arg1->op0 : NULL);
1316 if (folded
1317 && TREE_CODE (folded) == NOP_EXPR)
1318 folded = TREE_OPERAND (folded, 0);
1319 if (folded
1320 && is_gimple_min_invariant (folded))
1321 return folded;
1325 /* Simplify reads from constants or constant initializers. */
1326 else if (BITS_PER_UNIT == 8
1327 && is_gimple_reg_type (ref->type)
1328 && (!INTEGRAL_TYPE_P (ref->type)
1329 || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
1331 HOST_WIDE_INT off = 0;
1332 HOST_WIDE_INT size;
1333 if (INTEGRAL_TYPE_P (ref->type))
1334 size = TYPE_PRECISION (ref->type);
1335 else
1336 size = tree_to_shwi (TYPE_SIZE (ref->type));
1337 if (size % BITS_PER_UNIT != 0
1338 || size > MAX_BITSIZE_MODE_ANY_MODE)
1339 return NULL_TREE;
1340 size /= BITS_PER_UNIT;
1341 unsigned i;
1342 for (i = 0; i < operands.length (); ++i)
1344 if (operands[i].off == -1)
1345 return NULL_TREE;
1346 off += operands[i].off;
1347 if (operands[i].opcode == MEM_REF)
1349 ++i;
1350 break;
1353 vn_reference_op_t base = &operands[--i];
1354 tree ctor = error_mark_node;
1355 tree decl = NULL_TREE;
1356 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1357 ctor = base->op0;
1358 else if (base->opcode == MEM_REF
1359 && base[1].opcode == ADDR_EXPR
1360 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1361 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL))
1363 decl = TREE_OPERAND (base[1].op0, 0);
1364 ctor = ctor_for_folding (decl);
1366 if (ctor == NULL_TREE)
1367 return build_zero_cst (ref->type);
1368 else if (ctor != error_mark_node)
1370 if (decl)
1372 tree res = fold_ctor_reference (ref->type, ctor,
1373 off * BITS_PER_UNIT,
1374 size * BITS_PER_UNIT, decl);
1375 if (res)
1377 STRIP_USELESS_TYPE_CONVERSION (res);
1378 if (is_gimple_min_invariant (res))
1379 return res;
1382 else
1384 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1385 int len = native_encode_expr (ctor, buf, size, off);
1386 if (len > 0)
1387 return native_interpret_expr (ref->type, buf, len);
1392 return NULL_TREE;
1395 /* Return true if OPS contain a storage order barrier. */
1397 static bool
1398 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1400 vn_reference_op_t op;
1401 unsigned i;
1403 FOR_EACH_VEC_ELT (ops, i, op)
1404 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1405 return true;
1407 return false;
1410 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1411 structures into their value numbers. This is done in-place, and
1412 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1413 whether any operands were valueized. */
1415 static vec<vn_reference_op_s>
1416 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1418 vn_reference_op_t vro;
1419 unsigned int i;
1421 *valueized_anything = false;
1423 FOR_EACH_VEC_ELT (orig, i, vro)
1425 if (vro->opcode == SSA_NAME
1426 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1428 tree tem = SSA_VAL (vro->op0);
1429 if (tem != vro->op0)
1431 *valueized_anything = true;
1432 vro->op0 = tem;
1434 /* If it transforms from an SSA_NAME to a constant, update
1435 the opcode. */
1436 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1437 vro->opcode = TREE_CODE (vro->op0);
1439 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1441 tree tem = SSA_VAL (vro->op1);
1442 if (tem != vro->op1)
1444 *valueized_anything = true;
1445 vro->op1 = tem;
1448 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1450 tree tem = SSA_VAL (vro->op2);
1451 if (tem != vro->op2)
1453 *valueized_anything = true;
1454 vro->op2 = tem;
1457 /* If it transforms from an SSA_NAME to an address, fold with
1458 a preceding indirect reference. */
1459 if (i > 0
1460 && vro->op0
1461 && TREE_CODE (vro->op0) == ADDR_EXPR
1462 && orig[i - 1].opcode == MEM_REF)
1464 if (vn_reference_fold_indirect (&orig, &i))
1465 *valueized_anything = true;
1467 else if (i > 0
1468 && vro->opcode == SSA_NAME
1469 && orig[i - 1].opcode == MEM_REF)
1471 if (vn_reference_maybe_forwprop_address (&orig, &i))
1472 *valueized_anything = true;
1474 /* If it transforms a non-constant ARRAY_REF into a constant
1475 one, adjust the constant offset. */
1476 else if (vro->opcode == ARRAY_REF
1477 && vro->off == -1
1478 && TREE_CODE (vro->op0) == INTEGER_CST
1479 && TREE_CODE (vro->op1) == INTEGER_CST
1480 && TREE_CODE (vro->op2) == INTEGER_CST)
1482 offset_int off = ((wi::to_offset (vro->op0)
1483 - wi::to_offset (vro->op1))
1484 * wi::to_offset (vro->op2));
1485 if (wi::fits_shwi_p (off))
1486 vro->off = off.to_shwi ();
1490 return orig;
1493 static vec<vn_reference_op_s>
1494 valueize_refs (vec<vn_reference_op_s> orig)
1496 bool tem;
1497 return valueize_refs_1 (orig, &tem);
1500 static vec<vn_reference_op_s> shared_lookup_references;
1502 /* Create a vector of vn_reference_op_s structures from REF, a
1503 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1504 this function. *VALUEIZED_ANYTHING will specify whether any
1505 operands were valueized. */
1507 static vec<vn_reference_op_s>
1508 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1510 if (!ref)
1511 return vNULL;
1512 shared_lookup_references.truncate (0);
1513 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1514 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1515 valueized_anything);
1516 return shared_lookup_references;
1519 /* Create a vector of vn_reference_op_s structures from CALL, a
1520 call statement. The vector is shared among all callers of
1521 this function. */
1523 static vec<vn_reference_op_s>
1524 valueize_shared_reference_ops_from_call (gcall *call)
1526 if (!call)
1527 return vNULL;
1528 shared_lookup_references.truncate (0);
1529 copy_reference_ops_from_call (call, &shared_lookup_references);
1530 shared_lookup_references = valueize_refs (shared_lookup_references);
1531 return shared_lookup_references;
1534 /* Lookup a SCCVN reference operation VR in the current hash table.
1535 Returns the resulting value number if it exists in the hash table,
1536 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1537 vn_reference_t stored in the hashtable if something is found. */
1539 static tree
1540 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1542 vn_reference_s **slot;
1543 hashval_t hash;
1545 hash = vr->hashcode;
1546 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1547 if (!slot && current_info == optimistic_info)
1548 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1549 if (slot)
1551 if (vnresult)
1552 *vnresult = (vn_reference_t)*slot;
1553 return ((vn_reference_t)*slot)->result;
1556 return NULL_TREE;
1559 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1560 with the current VUSE and performs the expression lookup. */
1562 static void *
1563 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1564 unsigned int cnt, void *vr_)
1566 vn_reference_t vr = (vn_reference_t)vr_;
1567 vn_reference_s **slot;
1568 hashval_t hash;
1570 /* This bounds the stmt walks we perform on reference lookups
1571 to O(1) instead of O(N) where N is the number of dominating
1572 stores. */
1573 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1574 return (void *)-1;
1576 if (last_vuse_ptr)
1577 *last_vuse_ptr = vuse;
1579 /* Fixup vuse and hash. */
1580 if (vr->vuse)
1581 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1582 vr->vuse = vuse_ssa_val (vuse);
1583 if (vr->vuse)
1584 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1586 hash = vr->hashcode;
1587 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1588 if (!slot && current_info == optimistic_info)
1589 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1590 if (slot)
1591 return *slot;
1593 return NULL;
1596 /* Lookup an existing or insert a new vn_reference entry into the
1597 value table for the VUSE, SET, TYPE, OPERANDS reference which
1598 has the value VALUE which is either a constant or an SSA name. */
1600 static vn_reference_t
1601 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1602 alias_set_type set,
1603 tree type,
1604 vec<vn_reference_op_s,
1605 va_heap> operands,
1606 tree value)
1608 vn_reference_s vr1;
1609 vn_reference_t result;
1610 unsigned value_id;
1611 vr1.vuse = vuse;
1612 vr1.operands = operands;
1613 vr1.type = type;
1614 vr1.set = set;
1615 vr1.hashcode = vn_reference_compute_hash (&vr1);
1616 if (vn_reference_lookup_1 (&vr1, &result))
1617 return result;
1618 if (TREE_CODE (value) == SSA_NAME)
1619 value_id = VN_INFO (value)->value_id;
1620 else
1621 value_id = get_or_alloc_constant_value_id (value);
1622 return vn_reference_insert_pieces (vuse, set, type,
1623 operands.copy (), value, value_id);
1626 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1627 from the statement defining VUSE and if not successful tries to
1628 translate *REFP and VR_ through an aggregate copy at the definition
1629 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
1630 of *REF and *VR. If only disambiguation was performed then
1631 *DISAMBIGUATE_ONLY is set to true. */
1633 static void *
1634 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1635 bool *disambiguate_only)
1637 vn_reference_t vr = (vn_reference_t)vr_;
1638 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
1639 tree base = ao_ref_base (ref);
1640 HOST_WIDE_INT offset, maxsize;
1641 static vec<vn_reference_op_s>
1642 lhs_ops = vNULL;
1643 ao_ref lhs_ref;
1644 bool lhs_ref_ok = false;
1646 /* If the reference is based on a parameter that was determined as
1647 pointing to readonly memory it doesn't change. */
1648 if (TREE_CODE (base) == MEM_REF
1649 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
1650 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
1651 && bitmap_bit_p (const_parms,
1652 SSA_NAME_VERSION (TREE_OPERAND (base, 0))))
1654 *disambiguate_only = true;
1655 return NULL;
1658 /* First try to disambiguate after value-replacing in the definitions LHS. */
1659 if (is_gimple_assign (def_stmt))
1661 tree lhs = gimple_assign_lhs (def_stmt);
1662 bool valueized_anything = false;
1663 /* Avoid re-allocation overhead. */
1664 lhs_ops.truncate (0);
1665 copy_reference_ops_from_ref (lhs, &lhs_ops);
1666 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1667 if (valueized_anything)
1669 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1670 get_alias_set (lhs),
1671 TREE_TYPE (lhs), lhs_ops);
1672 if (lhs_ref_ok
1673 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1675 *disambiguate_only = true;
1676 return NULL;
1679 else
1681 ao_ref_init (&lhs_ref, lhs);
1682 lhs_ref_ok = true;
1685 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1686 && gimple_call_num_args (def_stmt) <= 4)
1688 /* For builtin calls valueize its arguments and call the
1689 alias oracle again. Valueization may improve points-to
1690 info of pointers and constify size and position arguments.
1691 Originally this was motivated by PR61034 which has
1692 conditional calls to free falsely clobbering ref because
1693 of imprecise points-to info of the argument. */
1694 tree oldargs[4];
1695 bool valueized_anything = false;
1696 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1698 oldargs[i] = gimple_call_arg (def_stmt, i);
1699 if (TREE_CODE (oldargs[i]) == SSA_NAME
1700 && VN_INFO (oldargs[i])->valnum != oldargs[i])
1702 gimple_call_set_arg (def_stmt, i, VN_INFO (oldargs[i])->valnum);
1703 valueized_anything = true;
1706 if (valueized_anything)
1708 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1709 ref);
1710 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1711 gimple_call_set_arg (def_stmt, i, oldargs[i]);
1712 if (!res)
1714 *disambiguate_only = true;
1715 return NULL;
1720 if (*disambiguate_only)
1721 return (void *)-1;
1723 offset = ref->offset;
1724 maxsize = ref->max_size;
1726 /* If we cannot constrain the size of the reference we cannot
1727 test if anything kills it. */
1728 if (maxsize == -1)
1729 return (void *)-1;
1731 /* We can't deduce anything useful from clobbers. */
1732 if (gimple_clobber_p (def_stmt))
1733 return (void *)-1;
1735 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1736 from that definition.
1737 1) Memset. */
1738 if (is_gimple_reg_type (vr->type)
1739 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1740 && integer_zerop (gimple_call_arg (def_stmt, 1))
1741 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2))
1742 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1744 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1745 tree base2;
1746 HOST_WIDE_INT offset2, size2, maxsize2;
1747 bool reverse;
1748 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
1749 &reverse);
1750 size2 = tree_to_uhwi (gimple_call_arg (def_stmt, 2)) * 8;
1751 if ((unsigned HOST_WIDE_INT)size2 / 8
1752 == tree_to_uhwi (gimple_call_arg (def_stmt, 2))
1753 && maxsize2 != -1
1754 && operand_equal_p (base, base2, 0)
1755 && offset2 <= offset
1756 && offset2 + size2 >= offset + maxsize)
1758 tree val = build_zero_cst (vr->type);
1759 return vn_reference_lookup_or_insert_for_pieces
1760 (vuse, vr->set, vr->type, vr->operands, val);
1764 /* 2) Assignment from an empty CONSTRUCTOR. */
1765 else if (is_gimple_reg_type (vr->type)
1766 && gimple_assign_single_p (def_stmt)
1767 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1768 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1770 tree base2;
1771 HOST_WIDE_INT offset2, size2, maxsize2;
1772 bool reverse;
1773 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1774 &offset2, &size2, &maxsize2, &reverse);
1775 if (maxsize2 != -1
1776 && operand_equal_p (base, base2, 0)
1777 && offset2 <= offset
1778 && offset2 + size2 >= offset + maxsize)
1780 tree val = build_zero_cst (vr->type);
1781 return vn_reference_lookup_or_insert_for_pieces
1782 (vuse, vr->set, vr->type, vr->operands, val);
1786 /* 3) Assignment from a constant. We can use folds native encode/interpret
1787 routines to extract the assigned bits. */
1788 else if (vn_walk_kind == VN_WALKREWRITE
1789 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1790 && ref->size == maxsize
1791 && maxsize % BITS_PER_UNIT == 0
1792 && offset % BITS_PER_UNIT == 0
1793 && is_gimple_reg_type (vr->type)
1794 && !contains_storage_order_barrier_p (vr->operands)
1795 && gimple_assign_single_p (def_stmt)
1796 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
1798 tree base2;
1799 HOST_WIDE_INT offset2, size2, maxsize2;
1800 bool reverse;
1801 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1802 &offset2, &size2, &maxsize2, &reverse);
1803 if (!reverse
1804 && maxsize2 != -1
1805 && maxsize2 == size2
1806 && size2 % BITS_PER_UNIT == 0
1807 && offset2 % BITS_PER_UNIT == 0
1808 && operand_equal_p (base, base2, 0)
1809 && offset2 <= offset
1810 && offset2 + size2 >= offset + maxsize)
1812 /* We support up to 512-bit values (for V8DFmode). */
1813 unsigned char buffer[64];
1814 int len;
1816 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1817 buffer, sizeof (buffer));
1818 if (len > 0)
1820 tree val = native_interpret_expr (vr->type,
1821 buffer
1822 + ((offset - offset2)
1823 / BITS_PER_UNIT),
1824 ref->size / BITS_PER_UNIT);
1825 if (val)
1826 return vn_reference_lookup_or_insert_for_pieces
1827 (vuse, vr->set, vr->type, vr->operands, val);
1832 /* 4) Assignment from an SSA name which definition we may be able
1833 to access pieces from. */
1834 else if (ref->size == maxsize
1835 && is_gimple_reg_type (vr->type)
1836 && !contains_storage_order_barrier_p (vr->operands)
1837 && gimple_assign_single_p (def_stmt)
1838 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
1840 tree rhs1 = gimple_assign_rhs1 (def_stmt);
1841 gimple *def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
1842 if (is_gimple_assign (def_stmt2)
1843 && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
1844 || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
1845 && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1))))
1847 tree base2;
1848 HOST_WIDE_INT offset2, size2, maxsize2, off;
1849 bool reverse;
1850 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1851 &offset2, &size2, &maxsize2,
1852 &reverse);
1853 off = offset - offset2;
1854 if (!reverse
1855 && maxsize2 != -1
1856 && maxsize2 == size2
1857 && operand_equal_p (base, base2, 0)
1858 && offset2 <= offset
1859 && offset2 + size2 >= offset + maxsize)
1861 tree val = NULL_TREE;
1862 HOST_WIDE_INT elsz
1863 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1))));
1864 if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR)
1866 if (off == 0)
1867 val = gimple_assign_rhs1 (def_stmt2);
1868 else if (off == elsz)
1869 val = gimple_assign_rhs2 (def_stmt2);
1871 else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR
1872 && off % elsz == 0)
1874 tree ctor = gimple_assign_rhs1 (def_stmt2);
1875 unsigned i = off / elsz;
1876 if (i < CONSTRUCTOR_NELTS (ctor))
1878 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
1879 if (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
1881 if (TREE_CODE (TREE_TYPE (elt->value))
1882 != VECTOR_TYPE)
1883 val = elt->value;
1887 if (val)
1888 return vn_reference_lookup_or_insert_for_pieces
1889 (vuse, vr->set, vr->type, vr->operands, val);
1894 /* 5) For aggregate copies translate the reference through them if
1895 the copy kills ref. */
1896 else if (vn_walk_kind == VN_WALKREWRITE
1897 && gimple_assign_single_p (def_stmt)
1898 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1899 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1900 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1902 tree base2;
1903 HOST_WIDE_INT maxsize2;
1904 int i, j, k;
1905 auto_vec<vn_reference_op_s> rhs;
1906 vn_reference_op_t vro;
1907 ao_ref r;
1909 if (!lhs_ref_ok)
1910 return (void *)-1;
1912 /* See if the assignment kills REF. */
1913 base2 = ao_ref_base (&lhs_ref);
1914 maxsize2 = lhs_ref.max_size;
1915 if (maxsize2 == -1
1916 || (base != base2
1917 && (TREE_CODE (base) != MEM_REF
1918 || TREE_CODE (base2) != MEM_REF
1919 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
1920 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
1921 TREE_OPERAND (base2, 1))))
1922 || !stmt_kills_ref_p (def_stmt, ref))
1923 return (void *)-1;
1925 /* Find the common base of ref and the lhs. lhs_ops already
1926 contains valueized operands for the lhs. */
1927 i = vr->operands.length () - 1;
1928 j = lhs_ops.length () - 1;
1929 while (j >= 0 && i >= 0
1930 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
1932 i--;
1933 j--;
1936 /* ??? The innermost op should always be a MEM_REF and we already
1937 checked that the assignment to the lhs kills vr. Thus for
1938 aggregate copies using char[] types the vn_reference_op_eq
1939 may fail when comparing types for compatibility. But we really
1940 don't care here - further lookups with the rewritten operands
1941 will simply fail if we messed up types too badly. */
1942 HOST_WIDE_INT extra_off = 0;
1943 if (j == 0 && i >= 0
1944 && lhs_ops[0].opcode == MEM_REF
1945 && lhs_ops[0].off != -1)
1947 if (lhs_ops[0].off == vr->operands[i].off)
1948 i--, j--;
1949 else if (vr->operands[i].opcode == MEM_REF
1950 && vr->operands[i].off != -1)
1952 extra_off = vr->operands[i].off - lhs_ops[0].off;
1953 i--, j--;
1957 /* i now points to the first additional op.
1958 ??? LHS may not be completely contained in VR, one or more
1959 VIEW_CONVERT_EXPRs could be in its way. We could at least
1960 try handling outermost VIEW_CONVERT_EXPRs. */
1961 if (j != -1)
1962 return (void *)-1;
1964 /* Punt if the additional ops contain a storage order barrier. */
1965 for (k = i; k >= 0; k--)
1967 vro = &vr->operands[k];
1968 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
1969 return (void *)-1;
1972 /* Now re-write REF to be based on the rhs of the assignment. */
1973 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1975 /* Apply an extra offset to the inner MEM_REF of the RHS. */
1976 if (extra_off != 0)
1978 if (rhs.length () < 2
1979 || rhs[0].opcode != MEM_REF
1980 || rhs[0].off == -1)
1981 return (void *)-1;
1982 rhs[0].off += extra_off;
1983 rhs[0].op0 = int_const_binop (PLUS_EXPR, rhs[0].op0,
1984 build_int_cst (TREE_TYPE (rhs[0].op0),
1985 extra_off));
1988 /* We need to pre-pend vr->operands[0..i] to rhs. */
1989 vec<vn_reference_op_s> old = vr->operands;
1990 if (i + 1 + rhs.length () > vr->operands.length ())
1991 vr->operands.safe_grow (i + 1 + rhs.length ());
1992 else
1993 vr->operands.truncate (i + 1 + rhs.length ());
1994 FOR_EACH_VEC_ELT (rhs, j, vro)
1995 vr->operands[i + 1 + j] = *vro;
1996 vr->operands = valueize_refs (vr->operands);
1997 if (old == shared_lookup_references)
1998 shared_lookup_references = vr->operands;
1999 vr->hashcode = vn_reference_compute_hash (vr);
2001 /* Try folding the new reference to a constant. */
2002 tree val = fully_constant_vn_reference_p (vr);
2003 if (val)
2004 return vn_reference_lookup_or_insert_for_pieces
2005 (vuse, vr->set, vr->type, vr->operands, val);
2007 /* Adjust *ref from the new operands. */
2008 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2009 return (void *)-1;
2010 /* This can happen with bitfields. */
2011 if (ref->size != r.size)
2012 return (void *)-1;
2013 *ref = r;
2015 /* Do not update last seen VUSE after translating. */
2016 last_vuse_ptr = NULL;
2018 /* Keep looking for the adjusted *REF / VR pair. */
2019 return NULL;
2022 /* 6) For memcpy copies translate the reference through them if
2023 the copy kills ref. */
2024 else if (vn_walk_kind == VN_WALKREWRITE
2025 && is_gimple_reg_type (vr->type)
2026 /* ??? Handle BCOPY as well. */
2027 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2028 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2029 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2030 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2031 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2032 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2033 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2034 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2)))
2036 tree lhs, rhs;
2037 ao_ref r;
2038 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
2039 vn_reference_op_s op;
2040 HOST_WIDE_INT at;
2042 /* Only handle non-variable, addressable refs. */
2043 if (ref->size != maxsize
2044 || offset % BITS_PER_UNIT != 0
2045 || ref->size % BITS_PER_UNIT != 0)
2046 return (void *)-1;
2048 /* Extract a pointer base and an offset for the destination. */
2049 lhs = gimple_call_arg (def_stmt, 0);
2050 lhs_offset = 0;
2051 if (TREE_CODE (lhs) == SSA_NAME)
2053 lhs = SSA_VAL (lhs);
2054 if (TREE_CODE (lhs) == SSA_NAME)
2056 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
2057 if (gimple_assign_single_p (def_stmt)
2058 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2059 lhs = gimple_assign_rhs1 (def_stmt);
2062 if (TREE_CODE (lhs) == ADDR_EXPR)
2064 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2065 &lhs_offset);
2066 if (!tem)
2067 return (void *)-1;
2068 if (TREE_CODE (tem) == MEM_REF
2069 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2071 lhs = TREE_OPERAND (tem, 0);
2072 if (TREE_CODE (lhs) == SSA_NAME)
2073 lhs = SSA_VAL (lhs);
2074 lhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2076 else if (DECL_P (tem))
2077 lhs = build_fold_addr_expr (tem);
2078 else
2079 return (void *)-1;
2081 if (TREE_CODE (lhs) != SSA_NAME
2082 && TREE_CODE (lhs) != ADDR_EXPR)
2083 return (void *)-1;
2085 /* Extract a pointer base and an offset for the source. */
2086 rhs = gimple_call_arg (def_stmt, 1);
2087 rhs_offset = 0;
2088 if (TREE_CODE (rhs) == SSA_NAME)
2089 rhs = SSA_VAL (rhs);
2090 if (TREE_CODE (rhs) == ADDR_EXPR)
2092 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2093 &rhs_offset);
2094 if (!tem)
2095 return (void *)-1;
2096 if (TREE_CODE (tem) == MEM_REF
2097 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2099 rhs = TREE_OPERAND (tem, 0);
2100 rhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2102 else if (DECL_P (tem))
2103 rhs = build_fold_addr_expr (tem);
2104 else
2105 return (void *)-1;
2107 if (TREE_CODE (rhs) != SSA_NAME
2108 && TREE_CODE (rhs) != ADDR_EXPR)
2109 return (void *)-1;
2111 copy_size = tree_to_uhwi (gimple_call_arg (def_stmt, 2));
2113 /* The bases of the destination and the references have to agree. */
2114 if ((TREE_CODE (base) != MEM_REF
2115 && !DECL_P (base))
2116 || (TREE_CODE (base) == MEM_REF
2117 && (TREE_OPERAND (base, 0) != lhs
2118 || !tree_fits_uhwi_p (TREE_OPERAND (base, 1))))
2119 || (DECL_P (base)
2120 && (TREE_CODE (lhs) != ADDR_EXPR
2121 || TREE_OPERAND (lhs, 0) != base)))
2122 return (void *)-1;
2124 at = offset / BITS_PER_UNIT;
2125 if (TREE_CODE (base) == MEM_REF)
2126 at += tree_to_uhwi (TREE_OPERAND (base, 1));
2127 /* If the access is completely outside of the memcpy destination
2128 area there is no aliasing. */
2129 if (lhs_offset >= at + maxsize / BITS_PER_UNIT
2130 || lhs_offset + copy_size <= at)
2131 return NULL;
2132 /* And the access has to be contained within the memcpy destination. */
2133 if (lhs_offset > at
2134 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
2135 return (void *)-1;
2137 /* Make room for 2 operands in the new reference. */
2138 if (vr->operands.length () < 2)
2140 vec<vn_reference_op_s> old = vr->operands;
2141 vr->operands.safe_grow_cleared (2);
2142 if (old == shared_lookup_references)
2143 shared_lookup_references = vr->operands;
2145 else
2146 vr->operands.truncate (2);
2148 /* The looked-through reference is a simple MEM_REF. */
2149 memset (&op, 0, sizeof (op));
2150 op.type = vr->type;
2151 op.opcode = MEM_REF;
2152 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
2153 op.off = at - lhs_offset + rhs_offset;
2154 vr->operands[0] = op;
2155 op.type = TREE_TYPE (rhs);
2156 op.opcode = TREE_CODE (rhs);
2157 op.op0 = rhs;
2158 op.off = -1;
2159 vr->operands[1] = op;
2160 vr->hashcode = vn_reference_compute_hash (vr);
2162 /* Adjust *ref from the new operands. */
2163 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2164 return (void *)-1;
2165 /* This can happen with bitfields. */
2166 if (ref->size != r.size)
2167 return (void *)-1;
2168 *ref = r;
2170 /* Do not update last seen VUSE after translating. */
2171 last_vuse_ptr = NULL;
2173 /* Keep looking for the adjusted *REF / VR pair. */
2174 return NULL;
2177 /* Bail out and stop walking. */
2178 return (void *)-1;
2181 /* Lookup a reference operation by it's parts, in the current hash table.
2182 Returns the resulting value number if it exists in the hash table,
2183 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2184 vn_reference_t stored in the hashtable if something is found. */
2186 tree
2187 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2188 vec<vn_reference_op_s> operands,
2189 vn_reference_t *vnresult, vn_lookup_kind kind)
2191 struct vn_reference_s vr1;
2192 vn_reference_t tmp;
2193 tree cst;
2195 if (!vnresult)
2196 vnresult = &tmp;
2197 *vnresult = NULL;
2199 vr1.vuse = vuse_ssa_val (vuse);
2200 shared_lookup_references.truncate (0);
2201 shared_lookup_references.safe_grow (operands.length ());
2202 memcpy (shared_lookup_references.address (),
2203 operands.address (),
2204 sizeof (vn_reference_op_s)
2205 * operands.length ());
2206 vr1.operands = operands = shared_lookup_references
2207 = valueize_refs (shared_lookup_references);
2208 vr1.type = type;
2209 vr1.set = set;
2210 vr1.hashcode = vn_reference_compute_hash (&vr1);
2211 if ((cst = fully_constant_vn_reference_p (&vr1)))
2212 return cst;
2214 vn_reference_lookup_1 (&vr1, vnresult);
2215 if (!*vnresult
2216 && kind != VN_NOWALK
2217 && vr1.vuse)
2219 ao_ref r;
2220 vn_walk_kind = kind;
2221 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2222 *vnresult =
2223 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2224 vn_reference_lookup_2,
2225 vn_reference_lookup_3,
2226 vuse_ssa_val, &vr1);
2227 gcc_checking_assert (vr1.operands == shared_lookup_references);
2230 if (*vnresult)
2231 return (*vnresult)->result;
2233 return NULL_TREE;
2236 /* Lookup OP in the current hash table, and return the resulting value
2237 number if it exists in the hash table. Return NULL_TREE if it does
2238 not exist in the hash table or if the result field of the structure
2239 was NULL.. VNRESULT will be filled in with the vn_reference_t
2240 stored in the hashtable if one exists. When TBAA_P is false assume
2241 we are looking up a store and treat it as having alias-set zero. */
2243 tree
2244 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2245 vn_reference_t *vnresult, bool tbaa_p)
2247 vec<vn_reference_op_s> operands;
2248 struct vn_reference_s vr1;
2249 tree cst;
2250 bool valuezied_anything;
2252 if (vnresult)
2253 *vnresult = NULL;
2255 vr1.vuse = vuse_ssa_val (vuse);
2256 vr1.operands = operands
2257 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2258 vr1.type = TREE_TYPE (op);
2259 vr1.set = tbaa_p ? get_alias_set (op) : 0;
2260 vr1.hashcode = vn_reference_compute_hash (&vr1);
2261 if ((cst = fully_constant_vn_reference_p (&vr1)))
2262 return cst;
2264 if (kind != VN_NOWALK
2265 && vr1.vuse)
2267 vn_reference_t wvnresult;
2268 ao_ref r;
2269 /* Make sure to use a valueized reference if we valueized anything.
2270 Otherwise preserve the full reference for advanced TBAA. */
2271 if (!valuezied_anything
2272 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2273 vr1.operands))
2274 ao_ref_init (&r, op);
2275 if (! tbaa_p)
2276 r.ref_alias_set = r.base_alias_set = 0;
2277 vn_walk_kind = kind;
2278 wvnresult =
2279 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2280 vn_reference_lookup_2,
2281 vn_reference_lookup_3,
2282 vuse_ssa_val, &vr1);
2283 gcc_checking_assert (vr1.operands == shared_lookup_references);
2284 if (wvnresult)
2286 if (vnresult)
2287 *vnresult = wvnresult;
2288 return wvnresult->result;
2291 return NULL_TREE;
2294 return vn_reference_lookup_1 (&vr1, vnresult);
2297 /* Lookup CALL in the current hash table and return the entry in
2298 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2300 void
2301 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2302 vn_reference_t vr)
2304 if (vnresult)
2305 *vnresult = NULL;
2307 tree vuse = gimple_vuse (call);
2309 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2310 vr->operands = valueize_shared_reference_ops_from_call (call);
2311 vr->type = gimple_expr_type (call);
2312 vr->set = 0;
2313 vr->hashcode = vn_reference_compute_hash (vr);
2314 vn_reference_lookup_1 (vr, vnresult);
2317 /* Insert OP into the current hash table with a value number of
2318 RESULT, and return the resulting reference structure we created. */
2320 static vn_reference_t
2321 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2323 vn_reference_s **slot;
2324 vn_reference_t vr1;
2325 bool tem;
2327 vr1 = current_info->references_pool->allocate ();
2328 if (TREE_CODE (result) == SSA_NAME)
2329 vr1->value_id = VN_INFO (result)->value_id;
2330 else
2331 vr1->value_id = get_or_alloc_constant_value_id (result);
2332 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2333 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2334 vr1->type = TREE_TYPE (op);
2335 vr1->set = get_alias_set (op);
2336 vr1->hashcode = vn_reference_compute_hash (vr1);
2337 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2338 vr1->result_vdef = vdef;
2340 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2341 INSERT);
2343 /* Because we lookup stores using vuses, and value number failures
2344 using the vdefs (see visit_reference_op_store for how and why),
2345 it's possible that on failure we may try to insert an already
2346 inserted store. This is not wrong, there is no ssa name for a
2347 store that we could use as a differentiator anyway. Thus, unlike
2348 the other lookup functions, you cannot gcc_assert (!*slot)
2349 here. */
2351 /* But free the old slot in case of a collision. */
2352 if (*slot)
2353 free_reference (*slot);
2355 *slot = vr1;
2356 return vr1;
2359 /* Insert a reference by it's pieces into the current hash table with
2360 a value number of RESULT. Return the resulting reference
2361 structure we created. */
2363 vn_reference_t
2364 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2365 vec<vn_reference_op_s> operands,
2366 tree result, unsigned int value_id)
2369 vn_reference_s **slot;
2370 vn_reference_t vr1;
2372 vr1 = current_info->references_pool->allocate ();
2373 vr1->value_id = value_id;
2374 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2375 vr1->operands = valueize_refs (operands);
2376 vr1->type = type;
2377 vr1->set = set;
2378 vr1->hashcode = vn_reference_compute_hash (vr1);
2379 if (result && TREE_CODE (result) == SSA_NAME)
2380 result = SSA_VAL (result);
2381 vr1->result = result;
2383 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2384 INSERT);
2386 /* At this point we should have all the things inserted that we have
2387 seen before, and we should never try inserting something that
2388 already exists. */
2389 gcc_assert (!*slot);
2390 if (*slot)
2391 free_reference (*slot);
2393 *slot = vr1;
2394 return vr1;
2397 /* Compute and return the hash value for nary operation VBO1. */
2399 static hashval_t
2400 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2402 inchash::hash hstate;
2403 unsigned i;
2405 for (i = 0; i < vno1->length; ++i)
2406 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2407 vno1->op[i] = SSA_VAL (vno1->op[i]);
2409 if (((vno1->length == 2
2410 && commutative_tree_code (vno1->opcode))
2411 || (vno1->length == 3
2412 && commutative_ternary_tree_code (vno1->opcode)))
2413 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2414 std::swap (vno1->op[0], vno1->op[1]);
2415 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
2416 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2418 std::swap (vno1->op[0], vno1->op[1]);
2419 vno1->opcode = swap_tree_comparison (vno1->opcode);
2422 hstate.add_int (vno1->opcode);
2423 for (i = 0; i < vno1->length; ++i)
2424 inchash::add_expr (vno1->op[i], hstate);
2426 return hstate.end ();
2429 /* Compare nary operations VNO1 and VNO2 and return true if they are
2430 equivalent. */
2432 bool
2433 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2435 unsigned i;
2437 if (vno1->hashcode != vno2->hashcode)
2438 return false;
2440 if (vno1->length != vno2->length)
2441 return false;
2443 if (vno1->opcode != vno2->opcode
2444 || !types_compatible_p (vno1->type, vno2->type))
2445 return false;
2447 for (i = 0; i < vno1->length; ++i)
2448 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2449 return false;
2451 return true;
2454 /* Initialize VNO from the pieces provided. */
2456 static void
2457 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2458 enum tree_code code, tree type, tree *ops)
2460 vno->opcode = code;
2461 vno->length = length;
2462 vno->type = type;
2463 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2466 /* Initialize VNO from OP. */
2468 static void
2469 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2471 unsigned i;
2473 vno->opcode = TREE_CODE (op);
2474 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2475 vno->type = TREE_TYPE (op);
2476 for (i = 0; i < vno->length; ++i)
2477 vno->op[i] = TREE_OPERAND (op, i);
2480 /* Return the number of operands for a vn_nary ops structure from STMT. */
2482 static unsigned int
2483 vn_nary_length_from_stmt (gimple *stmt)
2485 switch (gimple_assign_rhs_code (stmt))
2487 case REALPART_EXPR:
2488 case IMAGPART_EXPR:
2489 case VIEW_CONVERT_EXPR:
2490 return 1;
2492 case BIT_FIELD_REF:
2493 return 3;
2495 case CONSTRUCTOR:
2496 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2498 default:
2499 return gimple_num_ops (stmt) - 1;
2503 /* Initialize VNO from STMT. */
2505 static void
2506 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
2508 unsigned i;
2510 vno->opcode = gimple_assign_rhs_code (stmt);
2511 vno->type = gimple_expr_type (stmt);
2512 switch (vno->opcode)
2514 case REALPART_EXPR:
2515 case IMAGPART_EXPR:
2516 case VIEW_CONVERT_EXPR:
2517 vno->length = 1;
2518 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2519 break;
2521 case BIT_FIELD_REF:
2522 vno->length = 3;
2523 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2524 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2525 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2526 break;
2528 case CONSTRUCTOR:
2529 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2530 for (i = 0; i < vno->length; ++i)
2531 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2532 break;
2534 default:
2535 gcc_checking_assert (!gimple_assign_single_p (stmt));
2536 vno->length = gimple_num_ops (stmt) - 1;
2537 for (i = 0; i < vno->length; ++i)
2538 vno->op[i] = gimple_op (stmt, i + 1);
2542 /* Compute the hashcode for VNO and look for it in the hash table;
2543 return the resulting value number if it exists in the hash table.
2544 Return NULL_TREE if it does not exist in the hash table or if the
2545 result field of the operation is NULL. VNRESULT will contain the
2546 vn_nary_op_t from the hashtable if it exists. */
2548 static tree
2549 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2551 vn_nary_op_s **slot;
2553 if (vnresult)
2554 *vnresult = NULL;
2556 vno->hashcode = vn_nary_op_compute_hash (vno);
2557 slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2558 NO_INSERT);
2559 if (!slot && current_info == optimistic_info)
2560 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2561 NO_INSERT);
2562 if (!slot)
2563 return NULL_TREE;
2564 if (vnresult)
2565 *vnresult = *slot;
2566 return (*slot)->result;
2569 /* Lookup a n-ary operation by its pieces and return the resulting value
2570 number if it exists in the hash table. Return NULL_TREE if it does
2571 not exist in the hash table or if the result field of the operation
2572 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2573 if it exists. */
2575 tree
2576 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2577 tree type, tree *ops, vn_nary_op_t *vnresult)
2579 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2580 sizeof_vn_nary_op (length));
2581 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2582 return vn_nary_op_lookup_1 (vno1, vnresult);
2585 /* Lookup OP in the current hash table, and return the resulting value
2586 number if it exists in the hash table. Return NULL_TREE if it does
2587 not exist in the hash table or if the result field of the operation
2588 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2589 if it exists. */
2591 tree
2592 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2594 vn_nary_op_t vno1
2595 = XALLOCAVAR (struct vn_nary_op_s,
2596 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2597 init_vn_nary_op_from_op (vno1, op);
2598 return vn_nary_op_lookup_1 (vno1, vnresult);
2601 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2602 value number if it exists in the hash table. Return NULL_TREE if
2603 it does not exist in the hash table. VNRESULT will contain the
2604 vn_nary_op_t from the hashtable if it exists. */
2606 tree
2607 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
2609 vn_nary_op_t vno1
2610 = XALLOCAVAR (struct vn_nary_op_s,
2611 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2612 init_vn_nary_op_from_stmt (vno1, stmt);
2613 return vn_nary_op_lookup_1 (vno1, vnresult);
2616 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
2618 static tree
2619 vn_lookup_simplify_result (code_helper rcode, tree type, tree *ops)
2621 if (!rcode.is_tree_code ())
2622 return NULL_TREE;
2623 vn_nary_op_t vnresult = NULL;
2624 return vn_nary_op_lookup_pieces (TREE_CODE_LENGTH ((tree_code) rcode),
2625 (tree_code) rcode, type, ops, &vnresult);
2628 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2630 static vn_nary_op_t
2631 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2633 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2636 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2637 obstack. */
2639 static vn_nary_op_t
2640 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2642 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2643 &current_info->nary_obstack);
2645 vno1->value_id = value_id;
2646 vno1->length = length;
2647 vno1->result = result;
2649 return vno1;
2652 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2653 VNO->HASHCODE first. */
2655 static vn_nary_op_t
2656 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
2657 bool compute_hash)
2659 vn_nary_op_s **slot;
2661 if (compute_hash)
2662 vno->hashcode = vn_nary_op_compute_hash (vno);
2664 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
2665 gcc_assert (!*slot);
2667 *slot = vno;
2668 return vno;
2671 /* Insert a n-ary operation into the current hash table using it's
2672 pieces. Return the vn_nary_op_t structure we created and put in
2673 the hashtable. */
2675 vn_nary_op_t
2676 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2677 tree type, tree *ops,
2678 tree result, unsigned int value_id)
2680 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2681 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2682 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2685 /* Insert OP into the current hash table with a value number of
2686 RESULT. Return the vn_nary_op_t structure we created and put in
2687 the hashtable. */
2689 vn_nary_op_t
2690 vn_nary_op_insert (tree op, tree result)
2692 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2693 vn_nary_op_t vno1;
2695 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2696 init_vn_nary_op_from_op (vno1, op);
2697 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2700 /* Insert the rhs of STMT into the current hash table with a value number of
2701 RESULT. */
2703 static vn_nary_op_t
2704 vn_nary_op_insert_stmt (gimple *stmt, tree result)
2706 vn_nary_op_t vno1
2707 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2708 result, VN_INFO (result)->value_id);
2709 init_vn_nary_op_from_stmt (vno1, stmt);
2710 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2713 /* Compute a hashcode for PHI operation VP1 and return it. */
2715 static inline hashval_t
2716 vn_phi_compute_hash (vn_phi_t vp1)
2718 inchash::hash hstate (vp1->phiargs.length () > 2
2719 ? vp1->block->index : vp1->phiargs.length ());
2720 tree phi1op;
2721 tree type;
2722 edge e;
2723 edge_iterator ei;
2725 /* If all PHI arguments are constants we need to distinguish
2726 the PHI node via its type. */
2727 type = vp1->type;
2728 hstate.merge_hash (vn_hash_type (type));
2730 FOR_EACH_EDGE (e, ei, vp1->block->preds)
2732 /* Don't hash backedge values they need to be handled as VN_TOP
2733 for optimistic value-numbering. */
2734 if (e->flags & EDGE_DFS_BACK)
2735 continue;
2737 phi1op = vp1->phiargs[e->dest_idx];
2738 if (phi1op == VN_TOP)
2739 continue;
2740 inchash::add_expr (phi1op, hstate);
2743 return hstate.end ();
2747 /* Return true if COND1 and COND2 represent the same condition, set
2748 *INVERTED_P if one needs to be inverted to make it the same as
2749 the other. */
2751 static bool
2752 cond_stmts_equal_p (gcond *cond1, gcond *cond2, bool *inverted_p)
2754 enum tree_code code1 = gimple_cond_code (cond1);
2755 enum tree_code code2 = gimple_cond_code (cond2);
2756 tree lhs1 = gimple_cond_lhs (cond1);
2757 tree lhs2 = gimple_cond_lhs (cond2);
2758 tree rhs1 = gimple_cond_rhs (cond1);
2759 tree rhs2 = gimple_cond_rhs (cond2);
2761 *inverted_p = false;
2762 if (code1 == code2)
2764 else if (code1 == swap_tree_comparison (code2))
2765 std::swap (lhs2, rhs2);
2766 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
2767 *inverted_p = true;
2768 else if (code1 == invert_tree_comparison
2769 (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
2771 std::swap (lhs2, rhs2);
2772 *inverted_p = true;
2774 else
2775 return false;
2777 lhs1 = vn_valueize (lhs1);
2778 rhs1 = vn_valueize (rhs1);
2779 lhs2 = vn_valueize (lhs2);
2780 rhs2 = vn_valueize (rhs2);
2781 return ((expressions_equal_p (lhs1, lhs2)
2782 && expressions_equal_p (rhs1, rhs2))
2783 || (commutative_tree_code (code1)
2784 && expressions_equal_p (lhs1, rhs2)
2785 && expressions_equal_p (rhs1, lhs2)));
2788 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2790 static int
2791 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
2793 if (vp1->hashcode != vp2->hashcode)
2794 return false;
2796 if (vp1->block != vp2->block)
2798 if (vp1->phiargs.length () != vp2->phiargs.length ())
2799 return false;
2801 switch (vp1->phiargs.length ())
2803 case 1:
2804 /* Single-arg PHIs are just copies. */
2805 break;
2807 case 2:
2809 /* Rule out backedges into the PHI. */
2810 if (vp1->block->loop_father->header == vp1->block
2811 || vp2->block->loop_father->header == vp2->block)
2812 return false;
2814 /* If the PHI nodes do not have compatible types
2815 they are not the same. */
2816 if (!types_compatible_p (vp1->type, vp2->type))
2817 return false;
2819 basic_block idom1
2820 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
2821 basic_block idom2
2822 = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
2823 /* If the immediate dominator end in switch stmts multiple
2824 values may end up in the same PHI arg via intermediate
2825 CFG merges. */
2826 if (EDGE_COUNT (idom1->succs) != 2
2827 || EDGE_COUNT (idom2->succs) != 2)
2828 return false;
2830 /* Verify the controlling stmt is the same. */
2831 gimple *last1 = last_stmt (idom1);
2832 gimple *last2 = last_stmt (idom2);
2833 if (gimple_code (last1) != GIMPLE_COND
2834 || gimple_code (last2) != GIMPLE_COND)
2835 return false;
2836 bool inverted_p;
2837 if (! cond_stmts_equal_p (as_a <gcond *> (last1),
2838 as_a <gcond *> (last2), &inverted_p))
2839 return false;
2841 /* Get at true/false controlled edges into the PHI. */
2842 edge te1, te2, fe1, fe2;
2843 if (! extract_true_false_controlled_edges (idom1, vp1->block,
2844 &te1, &fe1)
2845 || ! extract_true_false_controlled_edges (idom2, vp2->block,
2846 &te2, &fe2))
2847 return false;
2849 /* Swap edges if the second condition is the inverted of the
2850 first. */
2851 if (inverted_p)
2852 std::swap (te2, fe2);
2854 /* ??? Handle VN_TOP specially. */
2855 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
2856 vp2->phiargs[te2->dest_idx])
2857 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
2858 vp2->phiargs[fe2->dest_idx]))
2859 return false;
2861 return true;
2864 default:
2865 return false;
2869 /* If the PHI nodes do not have compatible types
2870 they are not the same. */
2871 if (!types_compatible_p (vp1->type, vp2->type))
2872 return false;
2874 /* Any phi in the same block will have it's arguments in the
2875 same edge order, because of how we store phi nodes. */
2876 int i;
2877 tree phi1op;
2878 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2880 tree phi2op = vp2->phiargs[i];
2881 if (phi1op == VN_TOP || phi2op == VN_TOP)
2882 continue;
2883 if (!expressions_equal_p (phi1op, phi2op))
2884 return false;
2887 return true;
2890 static vec<tree> shared_lookup_phiargs;
2892 /* Lookup PHI in the current hash table, and return the resulting
2893 value number if it exists in the hash table. Return NULL_TREE if
2894 it does not exist in the hash table. */
2896 static tree
2897 vn_phi_lookup (gimple *phi)
2899 vn_phi_s **slot;
2900 struct vn_phi_s vp1;
2901 edge e;
2902 edge_iterator ei;
2904 shared_lookup_phiargs.truncate (0);
2905 shared_lookup_phiargs.safe_grow (gimple_phi_num_args (phi));
2907 /* Canonicalize the SSA_NAME's to their value number. */
2908 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
2910 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
2911 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2912 shared_lookup_phiargs[e->dest_idx] = def;
2914 vp1.type = TREE_TYPE (gimple_phi_result (phi));
2915 vp1.phiargs = shared_lookup_phiargs;
2916 vp1.block = gimple_bb (phi);
2917 vp1.hashcode = vn_phi_compute_hash (&vp1);
2918 slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
2919 NO_INSERT);
2920 if (!slot && current_info == optimistic_info)
2921 slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
2922 NO_INSERT);
2923 if (!slot)
2924 return NULL_TREE;
2925 return (*slot)->result;
2928 /* Insert PHI into the current hash table with a value number of
2929 RESULT. */
2931 static vn_phi_t
2932 vn_phi_insert (gimple *phi, tree result)
2934 vn_phi_s **slot;
2935 vn_phi_t vp1 = current_info->phis_pool->allocate ();
2936 vec<tree> args = vNULL;
2937 edge e;
2938 edge_iterator ei;
2940 args.safe_grow (gimple_phi_num_args (phi));
2942 /* Canonicalize the SSA_NAME's to their value number. */
2943 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
2945 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
2946 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2947 args[e->dest_idx] = def;
2949 vp1->value_id = VN_INFO (result)->value_id;
2950 vp1->type = TREE_TYPE (gimple_phi_result (phi));
2951 vp1->phiargs = args;
2952 vp1->block = gimple_bb (phi);
2953 vp1->result = result;
2954 vp1->hashcode = vn_phi_compute_hash (vp1);
2956 slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
2958 /* Because we iterate over phi operations more than once, it's
2959 possible the slot might already exist here, hence no assert.*/
2960 *slot = vp1;
2961 return vp1;
2965 /* Print set of components in strongly connected component SCC to OUT. */
2967 static void
2968 print_scc (FILE *out, vec<tree> scc)
2970 tree var;
2971 unsigned int i;
2973 fprintf (out, "SCC consists of:");
2974 FOR_EACH_VEC_ELT (scc, i, var)
2976 fprintf (out, " ");
2977 print_generic_expr (out, var, 0);
2979 fprintf (out, "\n");
2982 /* Return true if BB1 is dominated by BB2 taking into account edges
2983 that are not executable. */
2985 static bool
2986 dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
2988 edge_iterator ei;
2989 edge e;
2991 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
2992 return true;
2994 /* Before iterating we'd like to know if there exists a
2995 (executable) path from bb2 to bb1 at all, if not we can
2996 directly return false. For now simply iterate once. */
2998 /* Iterate to the single executable bb1 predecessor. */
2999 if (EDGE_COUNT (bb1->preds) > 1)
3001 edge prede = NULL;
3002 FOR_EACH_EDGE (e, ei, bb1->preds)
3003 if (e->flags & EDGE_EXECUTABLE)
3005 if (prede)
3007 prede = NULL;
3008 break;
3010 prede = e;
3012 if (prede)
3014 bb1 = prede->src;
3016 /* Re-do the dominance check with changed bb1. */
3017 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3018 return true;
3022 /* Iterate to the single executable bb2 successor. */
3023 edge succe = NULL;
3024 FOR_EACH_EDGE (e, ei, bb2->succs)
3025 if (e->flags & EDGE_EXECUTABLE)
3027 if (succe)
3029 succe = NULL;
3030 break;
3032 succe = e;
3034 if (succe)
3036 /* Verify the reached block is only reached through succe.
3037 If there is only one edge we can spare us the dominator
3038 check and iterate directly. */
3039 if (EDGE_COUNT (succe->dest->preds) > 1)
3041 FOR_EACH_EDGE (e, ei, succe->dest->preds)
3042 if (e != succe
3043 && (e->flags & EDGE_EXECUTABLE))
3045 succe = NULL;
3046 break;
3049 if (succe)
3051 bb2 = succe->dest;
3053 /* Re-do the dominance check with changed bb2. */
3054 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3055 return true;
3059 /* We could now iterate updating bb1 / bb2. */
3060 return false;
3063 /* Set the value number of FROM to TO, return true if it has changed
3064 as a result. */
3066 static inline bool
3067 set_ssa_val_to (tree from, tree to)
3069 tree currval = SSA_VAL (from);
3070 HOST_WIDE_INT toff, coff;
3072 /* The only thing we allow as value numbers are ssa_names
3073 and invariants. So assert that here. We don't allow VN_TOP
3074 as visiting a stmt should produce a value-number other than
3075 that.
3076 ??? Still VN_TOP can happen for unreachable code, so force
3077 it to varying in that case. Not all code is prepared to
3078 get VN_TOP on valueization. */
3079 if (to == VN_TOP)
3081 if (dump_file && (dump_flags & TDF_DETAILS))
3082 fprintf (dump_file, "Forcing value number to varying on "
3083 "receiving VN_TOP\n");
3084 to = from;
3087 gcc_assert (to != NULL_TREE
3088 && ((TREE_CODE (to) == SSA_NAME
3089 && (to == from || SSA_VAL (to) == to))
3090 || is_gimple_min_invariant (to)));
3092 if (from != to)
3094 if (currval == from)
3096 if (dump_file && (dump_flags & TDF_DETAILS))
3098 fprintf (dump_file, "Not changing value number of ");
3099 print_generic_expr (dump_file, from, 0);
3100 fprintf (dump_file, " from VARYING to ");
3101 print_generic_expr (dump_file, to, 0);
3102 fprintf (dump_file, "\n");
3104 return false;
3106 else if (currval != VN_TOP
3107 && ! is_gimple_min_invariant (currval)
3108 && is_gimple_min_invariant (to))
3110 if (dump_file && (dump_flags & TDF_DETAILS))
3112 fprintf (dump_file, "Forcing VARYING instead of changing "
3113 "value number of ");
3114 print_generic_expr (dump_file, from, 0);
3115 fprintf (dump_file, " from ");
3116 print_generic_expr (dump_file, currval, 0);
3117 fprintf (dump_file, " (non-constant) to ");
3118 print_generic_expr (dump_file, to, 0);
3119 fprintf (dump_file, " (constant)\n");
3121 to = from;
3123 else if (TREE_CODE (to) == SSA_NAME
3124 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
3125 to = from;
3128 if (dump_file && (dump_flags & TDF_DETAILS))
3130 fprintf (dump_file, "Setting value number of ");
3131 print_generic_expr (dump_file, from, 0);
3132 fprintf (dump_file, " to ");
3133 print_generic_expr (dump_file, to, 0);
3136 if (currval != to
3137 && !operand_equal_p (currval, to, 0)
3138 /* ??? For addresses involving volatile objects or types operand_equal_p
3139 does not reliably detect ADDR_EXPRs as equal. We know we are only
3140 getting invariant gimple addresses here, so can use
3141 get_addr_base_and_unit_offset to do this comparison. */
3142 && !(TREE_CODE (currval) == ADDR_EXPR
3143 && TREE_CODE (to) == ADDR_EXPR
3144 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
3145 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
3146 && coff == toff))
3148 /* If we equate two SSA names we have to make the side-band info
3149 of the leader conservative (and remember whatever original value
3150 was present). */
3151 if (TREE_CODE (to) == SSA_NAME)
3153 if (INTEGRAL_TYPE_P (TREE_TYPE (to))
3154 && SSA_NAME_RANGE_INFO (to))
3156 if (SSA_NAME_IS_DEFAULT_DEF (to)
3157 || dominated_by_p_w_unex
3158 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3159 gimple_bb (SSA_NAME_DEF_STMT (to))))
3160 /* Keep the info from the dominator. */
3162 else if (SSA_NAME_IS_DEFAULT_DEF (from)
3163 || dominated_by_p_w_unex
3164 (gimple_bb (SSA_NAME_DEF_STMT (to)),
3165 gimple_bb (SSA_NAME_DEF_STMT (from))))
3167 /* Save old info. */
3168 if (! VN_INFO (to)->info.range_info)
3170 VN_INFO (to)->info.range_info = SSA_NAME_RANGE_INFO (to);
3171 VN_INFO (to)->range_info_anti_range_p
3172 = SSA_NAME_ANTI_RANGE_P (to);
3174 /* Use that from the dominator. */
3175 SSA_NAME_RANGE_INFO (to) = SSA_NAME_RANGE_INFO (from);
3176 SSA_NAME_ANTI_RANGE_P (to) = SSA_NAME_ANTI_RANGE_P (from);
3178 else
3180 /* Save old info. */
3181 if (! VN_INFO (to)->info.range_info)
3183 VN_INFO (to)->info.range_info = SSA_NAME_RANGE_INFO (to);
3184 VN_INFO (to)->range_info_anti_range_p
3185 = SSA_NAME_ANTI_RANGE_P (to);
3187 /* Rather than allocating memory and unioning the info
3188 just clear it. */
3189 SSA_NAME_RANGE_INFO (to) = NULL;
3192 else if (POINTER_TYPE_P (TREE_TYPE (to))
3193 && SSA_NAME_PTR_INFO (to))
3195 if (SSA_NAME_IS_DEFAULT_DEF (to)
3196 || dominated_by_p_w_unex
3197 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3198 gimple_bb (SSA_NAME_DEF_STMT (to))))
3199 /* Keep the info from the dominator. */
3201 else if (SSA_NAME_IS_DEFAULT_DEF (from)
3202 || dominated_by_p_w_unex
3203 (gimple_bb (SSA_NAME_DEF_STMT (to)),
3204 gimple_bb (SSA_NAME_DEF_STMT (from))))
3206 /* Save old info. */
3207 if (! VN_INFO (to)->info.ptr_info)
3208 VN_INFO (to)->info.ptr_info = SSA_NAME_PTR_INFO (to);
3209 /* Use that from the dominator. */
3210 SSA_NAME_PTR_INFO (to) = SSA_NAME_PTR_INFO (from);
3212 else if (! SSA_NAME_PTR_INFO (from)
3213 /* Handle the case of trivially equivalent info. */
3214 || memcmp (SSA_NAME_PTR_INFO (to),
3215 SSA_NAME_PTR_INFO (from),
3216 sizeof (ptr_info_def)) != 0)
3218 /* Save old info. */
3219 if (! VN_INFO (to)->info.ptr_info)
3220 VN_INFO (to)->info.ptr_info = SSA_NAME_PTR_INFO (to);
3221 /* Rather than allocating memory and unioning the info
3222 just clear it. */
3223 SSA_NAME_PTR_INFO (to) = NULL;
3228 VN_INFO (from)->valnum = to;
3229 if (dump_file && (dump_flags & TDF_DETAILS))
3230 fprintf (dump_file, " (changed)\n");
3231 return true;
3233 if (dump_file && (dump_flags & TDF_DETAILS))
3234 fprintf (dump_file, "\n");
3235 return false;
3238 /* Mark as processed all the definitions in the defining stmt of USE, or
3239 the USE itself. */
3241 static void
3242 mark_use_processed (tree use)
3244 ssa_op_iter iter;
3245 def_operand_p defp;
3246 gimple *stmt = SSA_NAME_DEF_STMT (use);
3248 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
3250 VN_INFO (use)->use_processed = true;
3251 return;
3254 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3256 tree def = DEF_FROM_PTR (defp);
3258 VN_INFO (def)->use_processed = true;
3262 /* Set all definitions in STMT to value number to themselves.
3263 Return true if a value number changed. */
3265 static bool
3266 defs_to_varying (gimple *stmt)
3268 bool changed = false;
3269 ssa_op_iter iter;
3270 def_operand_p defp;
3272 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3274 tree def = DEF_FROM_PTR (defp);
3275 changed |= set_ssa_val_to (def, def);
3277 return changed;
3280 /* Visit a copy between LHS and RHS, return true if the value number
3281 changed. */
3283 static bool
3284 visit_copy (tree lhs, tree rhs)
3286 /* Valueize. */
3287 rhs = SSA_VAL (rhs);
3289 return set_ssa_val_to (lhs, rhs);
3292 /* Visit a nary operator RHS, value number it, and return true if the
3293 value number of LHS has changed as a result. */
3295 static bool
3296 visit_nary_op (tree lhs, gimple *stmt)
3298 bool changed = false;
3299 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
3301 if (result)
3302 changed = set_ssa_val_to (lhs, result);
3303 else
3305 changed = set_ssa_val_to (lhs, lhs);
3306 vn_nary_op_insert_stmt (stmt, lhs);
3309 return changed;
3312 /* Visit a call STMT storing into LHS. Return true if the value number
3313 of the LHS has changed as a result. */
3315 static bool
3316 visit_reference_op_call (tree lhs, gcall *stmt)
3318 bool changed = false;
3319 struct vn_reference_s vr1;
3320 vn_reference_t vnresult = NULL;
3321 tree vdef = gimple_vdef (stmt);
3323 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
3324 if (lhs && TREE_CODE (lhs) != SSA_NAME)
3325 lhs = NULL_TREE;
3327 vn_reference_lookup_call (stmt, &vnresult, &vr1);
3328 if (vnresult)
3330 if (vnresult->result_vdef && vdef)
3331 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
3332 else if (vdef)
3333 /* If the call was discovered to be pure or const reflect
3334 that as far as possible. */
3335 changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt)));
3337 if (!vnresult->result && lhs)
3338 vnresult->result = lhs;
3340 if (vnresult->result && lhs)
3341 changed |= set_ssa_val_to (lhs, vnresult->result);
3343 else
3345 vn_reference_t vr2;
3346 vn_reference_s **slot;
3347 if (vdef)
3348 changed |= set_ssa_val_to (vdef, vdef);
3349 if (lhs)
3350 changed |= set_ssa_val_to (lhs, lhs);
3351 vr2 = current_info->references_pool->allocate ();
3352 vr2->vuse = vr1.vuse;
3353 /* As we are not walking the virtual operand chain we know the
3354 shared_lookup_references are still original so we can re-use
3355 them here. */
3356 vr2->operands = vr1.operands.copy ();
3357 vr2->type = vr1.type;
3358 vr2->set = vr1.set;
3359 vr2->hashcode = vr1.hashcode;
3360 vr2->result = lhs;
3361 vr2->result_vdef = vdef;
3362 slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3363 INSERT);
3364 gcc_assert (!*slot);
3365 *slot = vr2;
3368 return changed;
3371 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3372 and return true if the value number of the LHS has changed as a result. */
3374 static bool
3375 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
3377 bool changed = false;
3378 tree last_vuse;
3379 tree result;
3381 last_vuse = gimple_vuse (stmt);
3382 last_vuse_ptr = &last_vuse;
3383 result = vn_reference_lookup (op, gimple_vuse (stmt),
3384 default_vn_walk_kind, NULL, true);
3385 last_vuse_ptr = NULL;
3387 /* We handle type-punning through unions by value-numbering based
3388 on offset and size of the access. Be prepared to handle a
3389 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3390 if (result
3391 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3393 /* We will be setting the value number of lhs to the value number
3394 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3395 So first simplify and lookup this expression to see if it
3396 is already available. */
3397 mprts_hook = vn_lookup_simplify_result;
3398 code_helper rcode = VIEW_CONVERT_EXPR;
3399 tree ops[3] = { result };
3400 bool res = gimple_resimplify1 (NULL, &rcode, TREE_TYPE (op), ops,
3401 vn_valueize);
3402 mprts_hook = NULL;
3403 gimple *new_stmt = NULL;
3404 if (res
3405 && gimple_simplified_result_is_gimple_val (rcode, ops))
3406 /* The expression is already available. */
3407 result = ops[0];
3408 else
3410 tree val = vn_lookup_simplify_result (rcode, TREE_TYPE (op), ops);
3411 if (!val)
3413 gimple_seq stmts = NULL;
3414 result = maybe_push_res_to_seq (rcode, TREE_TYPE (op), ops,
3415 &stmts);
3416 if (result)
3418 gcc_assert (gimple_seq_singleton_p (stmts));
3419 new_stmt = gimple_seq_first_stmt (stmts);
3422 else
3423 /* The expression is already available. */
3424 result = val;
3426 if (new_stmt)
3428 /* The expression is not yet available, value-number lhs to
3429 the new SSA_NAME we created. */
3430 /* Initialize value-number information properly. */
3431 VN_INFO_GET (result)->valnum = result;
3432 VN_INFO (result)->value_id = get_next_value_id ();
3433 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
3434 new_stmt);
3435 VN_INFO (result)->needs_insertion = true;
3436 /* As all "inserted" statements are singleton SCCs, insert
3437 to the valid table. This is strictly needed to
3438 avoid re-generating new value SSA_NAMEs for the same
3439 expression during SCC iteration over and over (the
3440 optimistic table gets cleared after each iteration).
3441 We do not need to insert into the optimistic table, as
3442 lookups there will fall back to the valid table. */
3443 if (current_info == optimistic_info)
3445 current_info = valid_info;
3446 vn_nary_op_insert_stmt (new_stmt, result);
3447 current_info = optimistic_info;
3449 else
3450 vn_nary_op_insert_stmt (new_stmt, result);
3451 if (dump_file && (dump_flags & TDF_DETAILS))
3453 fprintf (dump_file, "Inserting name ");
3454 print_generic_expr (dump_file, result, 0);
3455 fprintf (dump_file, " for expression ");
3456 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
3457 fprintf (dump_file, "\n");
3462 if (result)
3463 changed = set_ssa_val_to (lhs, result);
3464 else
3466 changed = set_ssa_val_to (lhs, lhs);
3467 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
3470 return changed;
3474 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3475 and return true if the value number of the LHS has changed as a result. */
3477 static bool
3478 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
3480 bool changed = false;
3481 vn_reference_t vnresult = NULL;
3482 tree assign;
3483 bool resultsame = false;
3484 tree vuse = gimple_vuse (stmt);
3485 tree vdef = gimple_vdef (stmt);
3487 if (TREE_CODE (op) == SSA_NAME)
3488 op = SSA_VAL (op);
3490 /* First we want to lookup using the *vuses* from the store and see
3491 if there the last store to this location with the same address
3492 had the same value.
3494 The vuses represent the memory state before the store. If the
3495 memory state, address, and value of the store is the same as the
3496 last store to this location, then this store will produce the
3497 same memory state as that store.
3499 In this case the vdef versions for this store are value numbered to those
3500 vuse versions, since they represent the same memory state after
3501 this store.
3503 Otherwise, the vdefs for the store are used when inserting into
3504 the table, since the store generates a new memory state. */
3506 vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
3507 if (vnresult
3508 && vnresult->result)
3510 tree result = vnresult->result;
3511 if (TREE_CODE (result) == SSA_NAME)
3512 result = SSA_VAL (result);
3513 resultsame = expressions_equal_p (result, op);
3514 if (resultsame)
3516 /* If the TBAA state isn't compatible for downstream reads
3517 we cannot value-number the VDEFs the same. */
3518 alias_set_type set = get_alias_set (lhs);
3519 if (vnresult->set != set
3520 && ! alias_set_subset_of (set, vnresult->set))
3521 resultsame = false;
3525 if (!resultsame)
3527 /* Only perform the following when being called from PRE
3528 which embeds tail merging. */
3529 if (default_vn_walk_kind == VN_WALK)
3531 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3532 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
3533 if (vnresult)
3535 VN_INFO (vdef)->use_processed = true;
3536 return set_ssa_val_to (vdef, vnresult->result_vdef);
3540 if (dump_file && (dump_flags & TDF_DETAILS))
3542 fprintf (dump_file, "No store match\n");
3543 fprintf (dump_file, "Value numbering store ");
3544 print_generic_expr (dump_file, lhs, 0);
3545 fprintf (dump_file, " to ");
3546 print_generic_expr (dump_file, op, 0);
3547 fprintf (dump_file, "\n");
3549 /* Have to set value numbers before insert, since insert is
3550 going to valueize the references in-place. */
3551 if (vdef)
3552 changed |= set_ssa_val_to (vdef, vdef);
3554 /* Do not insert structure copies into the tables. */
3555 if (is_gimple_min_invariant (op)
3556 || is_gimple_reg (op))
3557 vn_reference_insert (lhs, op, vdef, NULL);
3559 /* Only perform the following when being called from PRE
3560 which embeds tail merging. */
3561 if (default_vn_walk_kind == VN_WALK)
3563 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3564 vn_reference_insert (assign, lhs, vuse, vdef);
3567 else
3569 /* We had a match, so value number the vdef to have the value
3570 number of the vuse it came from. */
3572 if (dump_file && (dump_flags & TDF_DETAILS))
3573 fprintf (dump_file, "Store matched earlier value,"
3574 "value numbering store vdefs to matching vuses.\n");
3576 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3579 return changed;
3582 /* Visit and value number PHI, return true if the value number
3583 changed. */
3585 static bool
3586 visit_phi (gimple *phi)
3588 bool changed = false;
3589 tree result;
3590 tree sameval = VN_TOP;
3591 bool allsame = true;
3592 unsigned n_executable = 0;
3594 /* TODO: We could check for this in init_sccvn, and replace this
3595 with a gcc_assert. */
3596 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3597 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3599 /* See if all non-TOP arguments have the same value. TOP is
3600 equivalent to everything, so we can ignore it. */
3601 edge_iterator ei;
3602 edge e;
3603 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3604 if (e->flags & EDGE_EXECUTABLE)
3606 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3608 ++n_executable;
3609 if (TREE_CODE (def) == SSA_NAME)
3610 def = SSA_VAL (def);
3611 if (def == VN_TOP)
3612 continue;
3613 if (sameval == VN_TOP)
3614 sameval = def;
3615 else if (!expressions_equal_p (def, sameval))
3617 allsame = false;
3618 break;
3622 /* If none of the edges was executable or all incoming values are
3623 undefined keep the value-number at VN_TOP. If only a single edge
3624 is exectuable use its value. */
3625 if (sameval == VN_TOP
3626 || n_executable == 1)
3627 return set_ssa_val_to (PHI_RESULT (phi), sameval);
3629 /* First see if it is equivalent to a phi node in this block. We prefer
3630 this as it allows IV elimination - see PRs 66502 and 67167. */
3631 result = vn_phi_lookup (phi);
3632 if (result)
3633 changed = set_ssa_val_to (PHI_RESULT (phi), result);
3634 /* Otherwise all value numbered to the same value, the phi node has that
3635 value. */
3636 else if (allsame)
3637 changed = set_ssa_val_to (PHI_RESULT (phi), sameval);
3638 else
3640 vn_phi_insert (phi, PHI_RESULT (phi));
3641 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3644 return changed;
3647 /* Try to simplify RHS using equivalences and constant folding. */
3649 static tree
3650 try_to_simplify (gassign *stmt)
3652 enum tree_code code = gimple_assign_rhs_code (stmt);
3653 tree tem;
3655 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3656 in this case, there is no point in doing extra work. */
3657 if (code == SSA_NAME)
3658 return NULL_TREE;
3660 /* First try constant folding based on our current lattice. */
3661 mprts_hook = vn_lookup_simplify_result;
3662 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
3663 mprts_hook = NULL;
3664 if (tem
3665 && (TREE_CODE (tem) == SSA_NAME
3666 || is_gimple_min_invariant (tem)))
3667 return tem;
3669 return NULL_TREE;
3672 /* Visit and value number USE, return true if the value number
3673 changed. */
3675 static bool
3676 visit_use (tree use)
3678 bool changed = false;
3679 gimple *stmt = SSA_NAME_DEF_STMT (use);
3681 mark_use_processed (use);
3683 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3684 if (dump_file && (dump_flags & TDF_DETAILS)
3685 && !SSA_NAME_IS_DEFAULT_DEF (use))
3687 fprintf (dump_file, "Value numbering ");
3688 print_generic_expr (dump_file, use, 0);
3689 fprintf (dump_file, " stmt = ");
3690 print_gimple_stmt (dump_file, stmt, 0, 0);
3693 /* Handle uninitialized uses. */
3694 if (SSA_NAME_IS_DEFAULT_DEF (use))
3695 changed = set_ssa_val_to (use, use);
3696 else if (gimple_code (stmt) == GIMPLE_PHI)
3697 changed = visit_phi (stmt);
3698 else if (gimple_has_volatile_ops (stmt))
3699 changed = defs_to_varying (stmt);
3700 else if (gassign *ass = dyn_cast <gassign *> (stmt))
3702 enum tree_code code = gimple_assign_rhs_code (ass);
3703 tree lhs = gimple_assign_lhs (ass);
3704 tree rhs1 = gimple_assign_rhs1 (ass);
3705 tree simplified;
3707 /* Shortcut for copies. Simplifying copies is pointless,
3708 since we copy the expression and value they represent. */
3709 if (code == SSA_NAME
3710 && TREE_CODE (lhs) == SSA_NAME)
3712 changed = visit_copy (lhs, rhs1);
3713 goto done;
3715 simplified = try_to_simplify (ass);
3716 if (simplified)
3718 if (dump_file && (dump_flags & TDF_DETAILS))
3720 fprintf (dump_file, "RHS ");
3721 print_gimple_expr (dump_file, ass, 0, 0);
3722 fprintf (dump_file, " simplified to ");
3723 print_generic_expr (dump_file, simplified, 0);
3724 fprintf (dump_file, "\n");
3727 /* Setting value numbers to constants will occasionally
3728 screw up phi congruence because constants are not
3729 uniquely associated with a single ssa name that can be
3730 looked up. */
3731 if (simplified
3732 && is_gimple_min_invariant (simplified)
3733 && TREE_CODE (lhs) == SSA_NAME)
3735 changed = set_ssa_val_to (lhs, simplified);
3736 goto done;
3738 else if (simplified
3739 && TREE_CODE (simplified) == SSA_NAME
3740 && TREE_CODE (lhs) == SSA_NAME)
3742 changed = visit_copy (lhs, simplified);
3743 goto done;
3746 if ((TREE_CODE (lhs) == SSA_NAME
3747 /* We can substitute SSA_NAMEs that are live over
3748 abnormal edges with their constant value. */
3749 && !(gimple_assign_copy_p (ass)
3750 && is_gimple_min_invariant (rhs1))
3751 && !(simplified
3752 && is_gimple_min_invariant (simplified))
3753 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3754 /* Stores or copies from SSA_NAMEs that are live over
3755 abnormal edges are a problem. */
3756 || (code == SSA_NAME
3757 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3758 changed = defs_to_varying (ass);
3759 else if (REFERENCE_CLASS_P (lhs)
3760 || DECL_P (lhs))
3761 changed = visit_reference_op_store (lhs, rhs1, ass);
3762 else if (TREE_CODE (lhs) == SSA_NAME)
3764 if ((gimple_assign_copy_p (ass)
3765 && is_gimple_min_invariant (rhs1))
3766 || (simplified
3767 && is_gimple_min_invariant (simplified)))
3769 if (simplified)
3770 changed = set_ssa_val_to (lhs, simplified);
3771 else
3772 changed = set_ssa_val_to (lhs, rhs1);
3774 else
3776 /* Visit the original statement. */
3777 switch (vn_get_stmt_kind (ass))
3779 case VN_NARY:
3780 changed = visit_nary_op (lhs, ass);
3781 break;
3782 case VN_REFERENCE:
3783 changed = visit_reference_op_load (lhs, rhs1, ass);
3784 break;
3785 default:
3786 changed = defs_to_varying (ass);
3787 break;
3791 else
3792 changed = defs_to_varying (ass);
3794 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
3796 tree lhs = gimple_call_lhs (call_stmt);
3797 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3799 /* Try constant folding based on our current lattice. */
3800 tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
3801 vn_valueize);
3802 if (simplified)
3804 if (dump_file && (dump_flags & TDF_DETAILS))
3806 fprintf (dump_file, "call ");
3807 print_gimple_expr (dump_file, call_stmt, 0, 0);
3808 fprintf (dump_file, " simplified to ");
3809 print_generic_expr (dump_file, simplified, 0);
3810 fprintf (dump_file, "\n");
3813 /* Setting value numbers to constants will occasionally
3814 screw up phi congruence because constants are not
3815 uniquely associated with a single ssa name that can be
3816 looked up. */
3817 if (simplified
3818 && is_gimple_min_invariant (simplified))
3820 changed = set_ssa_val_to (lhs, simplified);
3821 if (gimple_vdef (call_stmt))
3822 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
3823 SSA_VAL (gimple_vuse (call_stmt)));
3824 goto done;
3826 else if (simplified
3827 && TREE_CODE (simplified) == SSA_NAME)
3829 changed = visit_copy (lhs, simplified);
3830 if (gimple_vdef (call_stmt))
3831 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
3832 SSA_VAL (gimple_vuse (call_stmt)));
3833 goto done;
3835 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3837 changed = defs_to_varying (call_stmt);
3838 goto done;
3842 if (!gimple_call_internal_p (call_stmt)
3843 && (/* Calls to the same function with the same vuse
3844 and the same operands do not necessarily return the same
3845 value, unless they're pure or const. */
3846 gimple_call_flags (call_stmt) & (ECF_PURE | ECF_CONST)
3847 /* If calls have a vdef, subsequent calls won't have
3848 the same incoming vuse. So, if 2 calls with vdef have the
3849 same vuse, we know they're not subsequent.
3850 We can value number 2 calls to the same function with the
3851 same vuse and the same operands which are not subsequent
3852 the same, because there is no code in the program that can
3853 compare the 2 values... */
3854 || (gimple_vdef (call_stmt)
3855 /* ... unless the call returns a pointer which does
3856 not alias with anything else. In which case the
3857 information that the values are distinct are encoded
3858 in the IL. */
3859 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
3860 /* Only perform the following when being called from PRE
3861 which embeds tail merging. */
3862 && default_vn_walk_kind == VN_WALK)))
3863 changed = visit_reference_op_call (lhs, call_stmt);
3864 else
3865 changed = defs_to_varying (call_stmt);
3867 else
3868 changed = defs_to_varying (stmt);
3869 done:
3870 return changed;
3873 /* Compare two operands by reverse postorder index */
3875 static int
3876 compare_ops (const void *pa, const void *pb)
3878 const tree opa = *((const tree *)pa);
3879 const tree opb = *((const tree *)pb);
3880 gimple *opstmta = SSA_NAME_DEF_STMT (opa);
3881 gimple *opstmtb = SSA_NAME_DEF_STMT (opb);
3882 basic_block bba;
3883 basic_block bbb;
3885 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3886 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3887 else if (gimple_nop_p (opstmta))
3888 return -1;
3889 else if (gimple_nop_p (opstmtb))
3890 return 1;
3892 bba = gimple_bb (opstmta);
3893 bbb = gimple_bb (opstmtb);
3895 if (!bba && !bbb)
3896 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3897 else if (!bba)
3898 return -1;
3899 else if (!bbb)
3900 return 1;
3902 if (bba == bbb)
3904 if (gimple_code (opstmta) == GIMPLE_PHI
3905 && gimple_code (opstmtb) == GIMPLE_PHI)
3906 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3907 else if (gimple_code (opstmta) == GIMPLE_PHI)
3908 return -1;
3909 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3910 return 1;
3911 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3912 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3913 else
3914 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3916 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3919 /* Sort an array containing members of a strongly connected component
3920 SCC so that the members are ordered by RPO number.
3921 This means that when the sort is complete, iterating through the
3922 array will give you the members in RPO order. */
3924 static void
3925 sort_scc (vec<tree> scc)
3927 scc.qsort (compare_ops);
3930 /* Insert the no longer used nary ONARY to the hash INFO. */
3932 static void
3933 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3935 size_t size = sizeof_vn_nary_op (onary->length);
3936 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3937 &info->nary_obstack);
3938 memcpy (nary, onary, size);
3939 vn_nary_op_insert_into (nary, info->nary, false);
3942 /* Insert the no longer used phi OPHI to the hash INFO. */
3944 static void
3945 copy_phi (vn_phi_t ophi, vn_tables_t info)
3947 vn_phi_t phi = info->phis_pool->allocate ();
3948 vn_phi_s **slot;
3949 memcpy (phi, ophi, sizeof (*phi));
3950 ophi->phiargs.create (0);
3951 slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
3952 gcc_assert (!*slot);
3953 *slot = phi;
3956 /* Insert the no longer used reference OREF to the hash INFO. */
3958 static void
3959 copy_reference (vn_reference_t oref, vn_tables_t info)
3961 vn_reference_t ref;
3962 vn_reference_s **slot;
3963 ref = info->references_pool->allocate ();
3964 memcpy (ref, oref, sizeof (*ref));
3965 oref->operands.create (0);
3966 slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
3967 if (*slot)
3968 free_reference (*slot);
3969 *slot = ref;
3972 /* Process a strongly connected component in the SSA graph. */
3974 static void
3975 process_scc (vec<tree> scc)
3977 tree var;
3978 unsigned int i;
3979 unsigned int iterations = 0;
3980 bool changed = true;
3981 vn_nary_op_iterator_type hin;
3982 vn_phi_iterator_type hip;
3983 vn_reference_iterator_type hir;
3984 vn_nary_op_t nary;
3985 vn_phi_t phi;
3986 vn_reference_t ref;
3988 /* If the SCC has a single member, just visit it. */
3989 if (scc.length () == 1)
3991 tree use = scc[0];
3992 if (VN_INFO (use)->use_processed)
3993 return;
3994 /* We need to make sure it doesn't form a cycle itself, which can
3995 happen for self-referential PHI nodes. In that case we would
3996 end up inserting an expression with VN_TOP operands into the
3997 valid table which makes us derive bogus equivalences later.
3998 The cheapest way to check this is to assume it for all PHI nodes. */
3999 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
4000 /* Fallthru to iteration. */ ;
4001 else
4003 visit_use (use);
4004 return;
4008 if (dump_file && (dump_flags & TDF_DETAILS))
4009 print_scc (dump_file, scc);
4011 /* Iterate over the SCC with the optimistic table until it stops
4012 changing. */
4013 current_info = optimistic_info;
4014 while (changed)
4016 changed = false;
4017 iterations++;
4018 if (dump_file && (dump_flags & TDF_DETAILS))
4019 fprintf (dump_file, "Starting iteration %d\n", iterations);
4020 /* As we are value-numbering optimistically we have to
4021 clear the expression tables and the simplified expressions
4022 in each iteration until we converge. */
4023 optimistic_info->nary->empty ();
4024 optimistic_info->phis->empty ();
4025 optimistic_info->references->empty ();
4026 obstack_free (&optimistic_info->nary_obstack, NULL);
4027 gcc_obstack_init (&optimistic_info->nary_obstack);
4028 optimistic_info->phis_pool->release ();
4029 optimistic_info->references_pool->release ();
4030 FOR_EACH_VEC_ELT (scc, i, var)
4031 gcc_assert (!VN_INFO (var)->needs_insertion
4032 && VN_INFO (var)->expr == NULL);
4033 FOR_EACH_VEC_ELT (scc, i, var)
4034 changed |= visit_use (var);
4037 if (dump_file && (dump_flags & TDF_DETAILS))
4038 fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
4039 statistics_histogram_event (cfun, "SCC iterations", iterations);
4041 /* Finally, copy the contents of the no longer used optimistic
4042 table to the valid table. */
4043 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
4044 copy_nary (nary, valid_info);
4045 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
4046 copy_phi (phi, valid_info);
4047 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
4048 ref, vn_reference_t, hir)
4049 copy_reference (ref, valid_info);
4051 current_info = valid_info;
4055 /* Pop the components of the found SCC for NAME off the SCC stack
4056 and process them. Returns true if all went well, false if
4057 we run into resource limits. */
4059 static bool
4060 extract_and_process_scc_for_name (tree name)
4062 auto_vec<tree> scc;
4063 tree x;
4065 /* Found an SCC, pop the components off the SCC stack and
4066 process them. */
4069 x = sccstack.pop ();
4071 VN_INFO (x)->on_sccstack = false;
4072 scc.safe_push (x);
4073 } while (x != name);
4075 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
4076 if (scc.length ()
4077 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
4079 if (dump_file)
4080 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
4081 "SCC size %u exceeding %u\n", scc.length (),
4082 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
4084 return false;
4087 if (scc.length () > 1)
4088 sort_scc (scc);
4090 process_scc (scc);
4092 return true;
4095 /* Depth first search on NAME to discover and process SCC's in the SSA
4096 graph.
4097 Execution of this algorithm relies on the fact that the SCC's are
4098 popped off the stack in topological order.
4099 Returns true if successful, false if we stopped processing SCC's due
4100 to resource constraints. */
4102 static bool
4103 DFS (tree name)
4105 vec<ssa_op_iter> itervec = vNULL;
4106 vec<tree> namevec = vNULL;
4107 use_operand_p usep = NULL;
4108 gimple *defstmt;
4109 tree use;
4110 ssa_op_iter iter;
4112 start_over:
4113 /* SCC info */
4114 VN_INFO (name)->dfsnum = next_dfs_num++;
4115 VN_INFO (name)->visited = true;
4116 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
4118 sccstack.safe_push (name);
4119 VN_INFO (name)->on_sccstack = true;
4120 defstmt = SSA_NAME_DEF_STMT (name);
4122 /* Recursively DFS on our operands, looking for SCC's. */
4123 if (!gimple_nop_p (defstmt))
4125 /* Push a new iterator. */
4126 if (gphi *phi = dyn_cast <gphi *> (defstmt))
4127 usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
4128 else
4129 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
4131 else
4132 clear_and_done_ssa_iter (&iter);
4134 while (1)
4136 /* If we are done processing uses of a name, go up the stack
4137 of iterators and process SCCs as we found them. */
4138 if (op_iter_done (&iter))
4140 /* See if we found an SCC. */
4141 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
4142 if (!extract_and_process_scc_for_name (name))
4144 namevec.release ();
4145 itervec.release ();
4146 return false;
4149 /* Check if we are done. */
4150 if (namevec.is_empty ())
4152 namevec.release ();
4153 itervec.release ();
4154 return true;
4157 /* Restore the last use walker and continue walking there. */
4158 use = name;
4159 name = namevec.pop ();
4160 memcpy (&iter, &itervec.last (),
4161 sizeof (ssa_op_iter));
4162 itervec.pop ();
4163 goto continue_walking;
4166 use = USE_FROM_PTR (usep);
4168 /* Since we handle phi nodes, we will sometimes get
4169 invariants in the use expression. */
4170 if (TREE_CODE (use) == SSA_NAME)
4172 if (! (VN_INFO (use)->visited))
4174 /* Recurse by pushing the current use walking state on
4175 the stack and starting over. */
4176 itervec.safe_push (iter);
4177 namevec.safe_push (name);
4178 name = use;
4179 goto start_over;
4181 continue_walking:
4182 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
4183 VN_INFO (use)->low);
4185 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
4186 && VN_INFO (use)->on_sccstack)
4188 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
4189 VN_INFO (name)->low);
4193 usep = op_iter_next_use (&iter);
4197 /* Allocate a value number table. */
4199 static void
4200 allocate_vn_table (vn_tables_t table)
4202 table->phis = new vn_phi_table_type (23);
4203 table->nary = new vn_nary_op_table_type (23);
4204 table->references = new vn_reference_table_type (23);
4206 gcc_obstack_init (&table->nary_obstack);
4207 table->phis_pool = new object_allocator<vn_phi_s> ("VN phis");
4208 table->references_pool = new object_allocator<vn_reference_s>
4209 ("VN references");
4212 /* Free a value number table. */
4214 static void
4215 free_vn_table (vn_tables_t table)
4217 delete table->phis;
4218 table->phis = NULL;
4219 delete table->nary;
4220 table->nary = NULL;
4221 delete table->references;
4222 table->references = NULL;
4223 obstack_free (&table->nary_obstack, NULL);
4224 delete table->phis_pool;
4225 delete table->references_pool;
4228 static void
4229 init_scc_vn (void)
4231 size_t i;
4232 int j;
4233 int *rpo_numbers_temp;
4235 calculate_dominance_info (CDI_DOMINATORS);
4236 mark_dfs_back_edges ();
4238 sccstack.create (0);
4239 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
4241 constant_value_ids = BITMAP_ALLOC (NULL);
4243 next_dfs_num = 1;
4244 next_value_id = 1;
4246 vn_ssa_aux_table.create (num_ssa_names + 1);
4247 /* VEC_alloc doesn't actually grow it to the right size, it just
4248 preallocates the space to do so. */
4249 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
4250 gcc_obstack_init (&vn_ssa_aux_obstack);
4252 shared_lookup_phiargs.create (0);
4253 shared_lookup_references.create (0);
4254 rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
4255 rpo_numbers_temp =
4256 XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
4257 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4259 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4260 the i'th block in RPO order is bb. We want to map bb's to RPO
4261 numbers, so we need to rearrange this array. */
4262 for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
4263 rpo_numbers[rpo_numbers_temp[j]] = j;
4265 XDELETE (rpo_numbers_temp);
4267 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
4269 renumber_gimple_stmt_uids ();
4271 /* Create the valid and optimistic value numbering tables. */
4272 valid_info = XCNEW (struct vn_tables_s);
4273 allocate_vn_table (valid_info);
4274 optimistic_info = XCNEW (struct vn_tables_s);
4275 allocate_vn_table (optimistic_info);
4276 current_info = valid_info;
4278 /* Create the VN_INFO structures, and initialize value numbers to
4279 TOP or VARYING for parameters. */
4280 for (i = 1; i < num_ssa_names; i++)
4282 tree name = ssa_name (i);
4283 if (!name)
4284 continue;
4286 VN_INFO_GET (name)->valnum = VN_TOP;
4287 VN_INFO (name)->needs_insertion = false;
4288 VN_INFO (name)->expr = NULL;
4289 VN_INFO (name)->value_id = 0;
4291 if (!SSA_NAME_IS_DEFAULT_DEF (name))
4292 continue;
4294 switch (TREE_CODE (SSA_NAME_VAR (name)))
4296 case VAR_DECL:
4297 /* Undefined vars keep TOP. */
4298 break;
4300 case PARM_DECL:
4301 /* Parameters are VARYING but we can record a condition
4302 if we know it is a non-NULL pointer. */
4303 VN_INFO (name)->visited = true;
4304 VN_INFO (name)->valnum = name;
4305 if (POINTER_TYPE_P (TREE_TYPE (name))
4306 && nonnull_arg_p (SSA_NAME_VAR (name)))
4308 tree ops[2];
4309 ops[0] = name;
4310 ops[1] = build_int_cst (TREE_TYPE (name), 0);
4311 vn_nary_op_insert_pieces (2, NE_EXPR, boolean_type_node, ops,
4312 boolean_true_node, 0);
4313 if (dump_file && (dump_flags & TDF_DETAILS))
4315 fprintf (dump_file, "Recording ");
4316 print_generic_expr (dump_file, name, TDF_SLIM);
4317 fprintf (dump_file, " != 0\n");
4320 break;
4322 case RESULT_DECL:
4323 /* If the result is passed by invisible reference the default
4324 def is initialized, otherwise it's uninitialized. */
4325 if (DECL_BY_REFERENCE (SSA_NAME_VAR (name)))
4327 VN_INFO (name)->visited = true;
4328 VN_INFO (name)->valnum = name;
4330 break;
4332 default:
4333 gcc_unreachable ();
4338 /* Restore SSA info that has been reset on value leaders. */
4340 void
4341 scc_vn_restore_ssa_info (void)
4343 for (unsigned i = 0; i < num_ssa_names; i++)
4345 tree name = ssa_name (i);
4346 if (name
4347 && has_VN_INFO (name))
4349 if (VN_INFO (name)->needs_insertion)
4351 else if (POINTER_TYPE_P (TREE_TYPE (name))
4352 && VN_INFO (name)->info.ptr_info)
4353 SSA_NAME_PTR_INFO (name) = VN_INFO (name)->info.ptr_info;
4354 else if (INTEGRAL_TYPE_P (TREE_TYPE (name))
4355 && VN_INFO (name)->info.range_info)
4357 SSA_NAME_RANGE_INFO (name) = VN_INFO (name)->info.range_info;
4358 SSA_NAME_ANTI_RANGE_P (name)
4359 = VN_INFO (name)->range_info_anti_range_p;
4365 void
4366 free_scc_vn (void)
4368 size_t i;
4370 delete constant_to_value_id;
4371 constant_to_value_id = NULL;
4372 BITMAP_FREE (constant_value_ids);
4373 shared_lookup_phiargs.release ();
4374 shared_lookup_references.release ();
4375 XDELETEVEC (rpo_numbers);
4377 for (i = 0; i < num_ssa_names; i++)
4379 tree name = ssa_name (i);
4380 if (name
4381 && has_VN_INFO (name)
4382 && VN_INFO (name)->needs_insertion)
4383 release_ssa_name (name);
4385 obstack_free (&vn_ssa_aux_obstack, NULL);
4386 vn_ssa_aux_table.release ();
4388 sccstack.release ();
4389 free_vn_table (valid_info);
4390 XDELETE (valid_info);
4391 free_vn_table (optimistic_info);
4392 XDELETE (optimistic_info);
4394 BITMAP_FREE (const_parms);
4397 /* Set *ID according to RESULT. */
4399 static void
4400 set_value_id_for_result (tree result, unsigned int *id)
4402 if (result && TREE_CODE (result) == SSA_NAME)
4403 *id = VN_INFO (result)->value_id;
4404 else if (result && is_gimple_min_invariant (result))
4405 *id = get_or_alloc_constant_value_id (result);
4406 else
4407 *id = get_next_value_id ();
4410 /* Set the value ids in the valid hash tables. */
4412 static void
4413 set_hashtable_value_ids (void)
4415 vn_nary_op_iterator_type hin;
4416 vn_phi_iterator_type hip;
4417 vn_reference_iterator_type hir;
4418 vn_nary_op_t vno;
4419 vn_reference_t vr;
4420 vn_phi_t vp;
4422 /* Now set the value ids of the things we had put in the hash
4423 table. */
4425 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4426 set_value_id_for_result (vno->result, &vno->value_id);
4428 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4429 set_value_id_for_result (vp->result, &vp->value_id);
4431 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4432 hir)
4433 set_value_id_for_result (vr->result, &vr->value_id);
4436 class sccvn_dom_walker : public dom_walker
4438 public:
4439 sccvn_dom_walker ()
4440 : dom_walker (CDI_DOMINATORS, true), fail (false), cond_stack (vNULL) {}
4441 ~sccvn_dom_walker ();
4443 virtual edge before_dom_children (basic_block);
4444 virtual void after_dom_children (basic_block);
4446 void record_cond (basic_block,
4447 enum tree_code code, tree lhs, tree rhs, bool value);
4448 void record_conds (basic_block,
4449 enum tree_code code, tree lhs, tree rhs, bool value);
4451 bool fail;
4452 vec<std::pair <basic_block, std::pair <vn_nary_op_t, vn_nary_op_t> > >
4453 cond_stack;
4456 sccvn_dom_walker::~sccvn_dom_walker ()
4458 cond_stack.release ();
4461 /* Record a temporary condition for the BB and its dominated blocks. */
4463 void
4464 sccvn_dom_walker::record_cond (basic_block bb,
4465 enum tree_code code, tree lhs, tree rhs,
4466 bool value)
4468 tree ops[2] = { lhs, rhs };
4469 vn_nary_op_t old = NULL;
4470 if (vn_nary_op_lookup_pieces (2, code, boolean_type_node, ops, &old))
4471 current_info->nary->remove_elt_with_hash (old, old->hashcode);
4472 vn_nary_op_t cond
4473 = vn_nary_op_insert_pieces (2, code, boolean_type_node, ops,
4474 value
4475 ? boolean_true_node
4476 : boolean_false_node, 0);
4477 if (dump_file && (dump_flags & TDF_DETAILS))
4479 fprintf (dump_file, "Recording temporarily ");
4480 print_generic_expr (dump_file, ops[0], TDF_SLIM);
4481 fprintf (dump_file, " %s ", get_tree_code_name (code));
4482 print_generic_expr (dump_file, ops[1], TDF_SLIM);
4483 fprintf (dump_file, " == %s%s\n",
4484 value ? "true" : "false",
4485 old ? " (old entry saved)" : "");
4487 cond_stack.safe_push (std::make_pair (bb, std::make_pair (cond, old)));
4490 /* Record temporary conditions for the BB and its dominated blocks
4491 according to LHS CODE RHS == VALUE and its dominated conditions. */
4493 void
4494 sccvn_dom_walker::record_conds (basic_block bb,
4495 enum tree_code code, tree lhs, tree rhs,
4496 bool value)
4498 /* Record the original condition. */
4499 record_cond (bb, code, lhs, rhs, value);
4501 if (!value)
4502 return;
4504 /* Record dominated conditions if the condition is true. Note that
4505 the inversion is already recorded. */
4506 switch (code)
4508 case LT_EXPR:
4509 case GT_EXPR:
4510 record_cond (bb, code == LT_EXPR ? LE_EXPR : GE_EXPR, lhs, rhs, true);
4511 record_cond (bb, NE_EXPR, lhs, rhs, true);
4512 record_cond (bb, EQ_EXPR, lhs, rhs, false);
4513 break;
4515 case EQ_EXPR:
4516 record_cond (bb, LE_EXPR, lhs, rhs, true);
4517 record_cond (bb, GE_EXPR, lhs, rhs, true);
4518 record_cond (bb, LT_EXPR, lhs, rhs, false);
4519 record_cond (bb, GT_EXPR, lhs, rhs, false);
4520 break;
4522 default:
4523 break;
4527 /* Restore expressions and values derived from conditionals. */
4529 void
4530 sccvn_dom_walker::after_dom_children (basic_block bb)
4532 while (!cond_stack.is_empty ()
4533 && cond_stack.last ().first == bb)
4535 vn_nary_op_t cond = cond_stack.last ().second.first;
4536 vn_nary_op_t old = cond_stack.last ().second.second;
4537 current_info->nary->remove_elt_with_hash (cond, cond->hashcode);
4538 if (old)
4539 vn_nary_op_insert_into (old, current_info->nary, false);
4540 cond_stack.pop ();
4544 /* Value number all statements in BB. */
4546 edge
4547 sccvn_dom_walker::before_dom_children (basic_block bb)
4549 edge e;
4550 edge_iterator ei;
4552 if (fail)
4553 return NULL;
4555 if (dump_file && (dump_flags & TDF_DETAILS))
4556 fprintf (dump_file, "Visiting BB %d\n", bb->index);
4558 /* If we have a single predecessor record the equivalence from a
4559 possible condition on the predecessor edge. */
4560 edge pred_e = NULL;
4561 FOR_EACH_EDGE (e, ei, bb->preds)
4563 /* Ignore simple backedges from this to allow recording conditions
4564 in loop headers. */
4565 if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
4566 continue;
4567 if (! pred_e)
4568 pred_e = e;
4569 else
4571 pred_e = NULL;
4572 break;
4575 if (pred_e)
4577 /* Check if there are multiple executable successor edges in
4578 the source block. Otherwise there is no additional info
4579 to be recorded. */
4580 edge e2;
4581 FOR_EACH_EDGE (e2, ei, pred_e->src->succs)
4582 if (e2 != pred_e
4583 && e2->flags & EDGE_EXECUTABLE)
4584 break;
4585 if (e2 && (e2->flags & EDGE_EXECUTABLE))
4587 gimple *stmt = last_stmt (pred_e->src);
4588 if (stmt
4589 && gimple_code (stmt) == GIMPLE_COND)
4591 enum tree_code code = gimple_cond_code (stmt);
4592 tree lhs = gimple_cond_lhs (stmt);
4593 tree rhs = gimple_cond_rhs (stmt);
4594 record_conds (bb, code, lhs, rhs,
4595 (pred_e->flags & EDGE_TRUE_VALUE) != 0);
4596 code = invert_tree_comparison (code, HONOR_NANS (lhs));
4597 if (code != ERROR_MARK)
4598 record_conds (bb, code, lhs, rhs,
4599 (pred_e->flags & EDGE_TRUE_VALUE) == 0);
4604 /* Value-number all defs in the basic-block. */
4605 for (gphi_iterator gsi = gsi_start_phis (bb);
4606 !gsi_end_p (gsi); gsi_next (&gsi))
4608 gphi *phi = gsi.phi ();
4609 tree res = PHI_RESULT (phi);
4610 if (!VN_INFO (res)->visited
4611 && !DFS (res))
4613 fail = true;
4614 return NULL;
4617 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
4618 !gsi_end_p (gsi); gsi_next (&gsi))
4620 ssa_op_iter i;
4621 tree op;
4622 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
4623 if (!VN_INFO (op)->visited
4624 && !DFS (op))
4626 fail = true;
4627 return NULL;
4631 /* Finally look at the last stmt. */
4632 gimple *stmt = last_stmt (bb);
4633 if (!stmt)
4634 return NULL;
4636 enum gimple_code code = gimple_code (stmt);
4637 if (code != GIMPLE_COND
4638 && code != GIMPLE_SWITCH
4639 && code != GIMPLE_GOTO)
4640 return NULL;
4642 if (dump_file && (dump_flags & TDF_DETAILS))
4644 fprintf (dump_file, "Visiting control stmt ending BB %d: ", bb->index);
4645 print_gimple_stmt (dump_file, stmt, 0, 0);
4648 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
4649 if value-numbering can prove they are not reachable. Handling
4650 computed gotos is also possible. */
4651 tree val;
4652 switch (code)
4654 case GIMPLE_COND:
4656 tree lhs = vn_valueize (gimple_cond_lhs (stmt));
4657 tree rhs = vn_valueize (gimple_cond_rhs (stmt));
4658 val = gimple_simplify (gimple_cond_code (stmt),
4659 boolean_type_node, lhs, rhs,
4660 NULL, vn_valueize);
4661 /* If that didn't simplify to a constant see if we have recorded
4662 temporary expressions from taken edges. */
4663 if (!val || TREE_CODE (val) != INTEGER_CST)
4665 tree ops[2];
4666 ops[0] = lhs;
4667 ops[1] = rhs;
4668 val = vn_nary_op_lookup_pieces (2, gimple_cond_code (stmt),
4669 boolean_type_node, ops, NULL);
4671 break;
4673 case GIMPLE_SWITCH:
4674 val = gimple_switch_index (as_a <gswitch *> (stmt));
4675 break;
4676 case GIMPLE_GOTO:
4677 val = gimple_goto_dest (stmt);
4678 break;
4679 default:
4680 gcc_unreachable ();
4682 if (!val)
4683 return NULL;
4685 edge taken = find_taken_edge (bb, vn_valueize (val));
4686 if (!taken)
4687 return NULL;
4689 if (dump_file && (dump_flags & TDF_DETAILS))
4690 fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
4691 "not executable\n", bb->index, bb->index, taken->dest->index);
4693 return taken;
4696 /* Do SCCVN. Returns true if it finished, false if we bailed out
4697 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
4698 how we use the alias oracle walking during the VN process. */
4700 bool
4701 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
4703 size_t i;
4705 default_vn_walk_kind = default_vn_walk_kind_;
4707 init_scc_vn ();
4709 /* Collect pointers we know point to readonly memory. */
4710 const_parms = BITMAP_ALLOC (NULL);
4711 tree fnspec = lookup_attribute ("fn spec",
4712 TYPE_ATTRIBUTES (TREE_TYPE (cfun->decl)));
4713 if (fnspec)
4715 fnspec = TREE_VALUE (TREE_VALUE (fnspec));
4716 i = 1;
4717 for (tree arg = DECL_ARGUMENTS (cfun->decl);
4718 arg; arg = DECL_CHAIN (arg), ++i)
4720 if (i >= (unsigned) TREE_STRING_LENGTH (fnspec))
4721 break;
4722 if (TREE_STRING_POINTER (fnspec)[i] == 'R'
4723 || TREE_STRING_POINTER (fnspec)[i] == 'r')
4725 tree name = ssa_default_def (cfun, arg);
4726 if (name)
4727 bitmap_set_bit (const_parms, SSA_NAME_VERSION (name));
4732 /* Walk all blocks in dominator order, value-numbering stmts
4733 SSA defs and decide whether outgoing edges are not executable. */
4734 sccvn_dom_walker walker;
4735 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
4736 if (walker.fail)
4738 free_scc_vn ();
4739 return false;
4742 /* Initialize the value ids and prune out remaining VN_TOPs
4743 from dead code. */
4744 for (i = 1; i < num_ssa_names; ++i)
4746 tree name = ssa_name (i);
4747 vn_ssa_aux_t info;
4748 if (!name)
4749 continue;
4750 info = VN_INFO (name);
4751 if (!info->visited)
4752 info->valnum = name;
4753 if (info->valnum == name
4754 || info->valnum == VN_TOP)
4755 info->value_id = get_next_value_id ();
4756 else if (is_gimple_min_invariant (info->valnum))
4757 info->value_id = get_or_alloc_constant_value_id (info->valnum);
4760 /* Propagate. */
4761 for (i = 1; i < num_ssa_names; ++i)
4763 tree name = ssa_name (i);
4764 vn_ssa_aux_t info;
4765 if (!name)
4766 continue;
4767 info = VN_INFO (name);
4768 if (TREE_CODE (info->valnum) == SSA_NAME
4769 && info->valnum != name
4770 && info->value_id != VN_INFO (info->valnum)->value_id)
4771 info->value_id = VN_INFO (info->valnum)->value_id;
4774 set_hashtable_value_ids ();
4776 if (dump_file && (dump_flags & TDF_DETAILS))
4778 fprintf (dump_file, "Value numbers:\n");
4779 for (i = 0; i < num_ssa_names; i++)
4781 tree name = ssa_name (i);
4782 if (name
4783 && VN_INFO (name)->visited
4784 && SSA_VAL (name) != name)
4786 print_generic_expr (dump_file, name, 0);
4787 fprintf (dump_file, " = ");
4788 print_generic_expr (dump_file, SSA_VAL (name), 0);
4789 fprintf (dump_file, "\n");
4794 return true;
4797 /* Return the maximum value id we have ever seen. */
4799 unsigned int
4800 get_max_value_id (void)
4802 return next_value_id;
4805 /* Return the next unique value id. */
4807 unsigned int
4808 get_next_value_id (void)
4810 return next_value_id++;
4814 /* Compare two expressions E1 and E2 and return true if they are equal. */
4816 bool
4817 expressions_equal_p (tree e1, tree e2)
4819 /* The obvious case. */
4820 if (e1 == e2)
4821 return true;
4823 /* If either one is VN_TOP consider them equal. */
4824 if (e1 == VN_TOP || e2 == VN_TOP)
4825 return true;
4827 /* If only one of them is null, they cannot be equal. */
4828 if (!e1 || !e2)
4829 return false;
4831 /* Now perform the actual comparison. */
4832 if (TREE_CODE (e1) == TREE_CODE (e2)
4833 && operand_equal_p (e1, e2, OEP_PURE_SAME))
4834 return true;
4836 return false;
4840 /* Return true if the nary operation NARY may trap. This is a copy
4841 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4843 bool
4844 vn_nary_may_trap (vn_nary_op_t nary)
4846 tree type;
4847 tree rhs2 = NULL_TREE;
4848 bool honor_nans = false;
4849 bool honor_snans = false;
4850 bool fp_operation = false;
4851 bool honor_trapv = false;
4852 bool handled, ret;
4853 unsigned i;
4855 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4856 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4857 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4859 type = nary->type;
4860 fp_operation = FLOAT_TYPE_P (type);
4861 if (fp_operation)
4863 honor_nans = flag_trapping_math && !flag_finite_math_only;
4864 honor_snans = flag_signaling_nans != 0;
4866 else if (INTEGRAL_TYPE_P (type)
4867 && TYPE_OVERFLOW_TRAPS (type))
4868 honor_trapv = true;
4870 if (nary->length >= 2)
4871 rhs2 = nary->op[1];
4872 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4873 honor_trapv,
4874 honor_nans, honor_snans, rhs2,
4875 &handled);
4876 if (handled
4877 && ret)
4878 return true;
4880 for (i = 0; i < nary->length; ++i)
4881 if (tree_could_trap_p (nary->op[i]))
4882 return true;
4884 return false;