2013-11-08 Andrew MacLeod <amacleod@redhat.com>
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blobed4e1db718c6f6553a735f4e376a35c4ae694052
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2013 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "basic-block.h"
27 #include "gimple-pretty-print.h"
28 #include "tree-inline.h"
29 #include "gimple.h"
30 #include "gimple-ssa.h"
31 #include "tree-phinodes.h"
32 #include "ssa-iterators.h"
33 #include "tree-ssanames.h"
34 #include "tree-dfa.h"
35 #include "tree-ssa.h"
36 #include "dumpfile.h"
37 #include "hash-table.h"
38 #include "alloc-pool.h"
39 #include "flags.h"
40 #include "cfgloop.h"
41 #include "params.h"
42 #include "tree-ssa-propagate.h"
43 #include "tree-ssa-sccvn.h"
45 /* This algorithm is based on the SCC algorithm presented by Keith
46 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
47 (http://citeseer.ist.psu.edu/41805.html). In
48 straight line code, it is equivalent to a regular hash based value
49 numbering that is performed in reverse postorder.
51 For code with cycles, there are two alternatives, both of which
52 require keeping the hashtables separate from the actual list of
53 value numbers for SSA names.
55 1. Iterate value numbering in an RPO walk of the blocks, removing
56 all the entries from the hashtable after each iteration (but
57 keeping the SSA name->value number mapping between iterations).
58 Iterate until it does not change.
60 2. Perform value numbering as part of an SCC walk on the SSA graph,
61 iterating only the cycles in the SSA graph until they do not change
62 (using a separate, optimistic hashtable for value numbering the SCC
63 operands).
65 The second is not just faster in practice (because most SSA graph
66 cycles do not involve all the variables in the graph), it also has
67 some nice properties.
69 One of these nice properties is that when we pop an SCC off the
70 stack, we are guaranteed to have processed all the operands coming from
71 *outside of that SCC*, so we do not need to do anything special to
72 ensure they have value numbers.
74 Another nice property is that the SCC walk is done as part of a DFS
75 of the SSA graph, which makes it easy to perform combining and
76 simplifying operations at the same time.
78 The code below is deliberately written in a way that makes it easy
79 to separate the SCC walk from the other work it does.
81 In order to propagate constants through the code, we track which
82 expressions contain constants, and use those while folding. In
83 theory, we could also track expressions whose value numbers are
84 replaced, in case we end up folding based on expression
85 identities.
87 In order to value number memory, we assign value numbers to vuses.
88 This enables us to note that, for example, stores to the same
89 address of the same value from the same starting memory states are
90 equivalent.
91 TODO:
93 1. We can iterate only the changing portions of the SCC's, but
94 I have not seen an SCC big enough for this to be a win.
95 2. If you differentiate between phi nodes for loops and phi nodes
96 for if-then-else, you can properly consider phi nodes in different
97 blocks for equivalence.
98 3. We could value number vuses in more cases, particularly, whole
99 structure copies.
103 /* vn_nary_op hashtable helpers. */
105 struct vn_nary_op_hasher : typed_noop_remove <vn_nary_op_s>
107 typedef vn_nary_op_s value_type;
108 typedef vn_nary_op_s compare_type;
109 static inline hashval_t hash (const value_type *);
110 static inline bool equal (const value_type *, const compare_type *);
113 /* Return the computed hashcode for nary operation P1. */
115 inline hashval_t
116 vn_nary_op_hasher::hash (const value_type *vno1)
118 return vno1->hashcode;
121 /* Compare nary operations P1 and P2 and return true if they are
122 equivalent. */
124 inline bool
125 vn_nary_op_hasher::equal (const value_type *vno1, const compare_type *vno2)
127 return vn_nary_op_eq (vno1, vno2);
130 typedef hash_table <vn_nary_op_hasher> vn_nary_op_table_type;
131 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
134 /* vn_phi hashtable helpers. */
136 static int
137 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
139 struct vn_phi_hasher
141 typedef vn_phi_s value_type;
142 typedef vn_phi_s compare_type;
143 static inline hashval_t hash (const value_type *);
144 static inline bool equal (const value_type *, const compare_type *);
145 static inline void remove (value_type *);
148 /* Return the computed hashcode for phi operation P1. */
150 inline hashval_t
151 vn_phi_hasher::hash (const value_type *vp1)
153 return vp1->hashcode;
156 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
158 inline bool
159 vn_phi_hasher::equal (const value_type *vp1, const compare_type *vp2)
161 return vn_phi_eq (vp1, vp2);
164 /* Free a phi operation structure VP. */
166 inline void
167 vn_phi_hasher::remove (value_type *phi)
169 phi->phiargs.release ();
172 typedef hash_table <vn_phi_hasher> vn_phi_table_type;
173 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
176 /* Compare two reference operands P1 and P2 for equality. Return true if
177 they are equal, and false otherwise. */
179 static int
180 vn_reference_op_eq (const void *p1, const void *p2)
182 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
183 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
185 return (vro1->opcode == vro2->opcode
186 /* We do not care for differences in type qualification. */
187 && (vro1->type == vro2->type
188 || (vro1->type && vro2->type
189 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
190 TYPE_MAIN_VARIANT (vro2->type))))
191 && expressions_equal_p (vro1->op0, vro2->op0)
192 && expressions_equal_p (vro1->op1, vro2->op1)
193 && expressions_equal_p (vro1->op2, vro2->op2));
196 /* Free a reference operation structure VP. */
198 static inline void
199 free_reference (vn_reference_s *vr)
201 vr->operands.release ();
205 /* vn_reference hashtable helpers. */
207 struct vn_reference_hasher
209 typedef vn_reference_s value_type;
210 typedef vn_reference_s compare_type;
211 static inline hashval_t hash (const value_type *);
212 static inline bool equal (const value_type *, const compare_type *);
213 static inline void remove (value_type *);
216 /* Return the hashcode for a given reference operation P1. */
218 inline hashval_t
219 vn_reference_hasher::hash (const value_type *vr1)
221 return vr1->hashcode;
224 inline bool
225 vn_reference_hasher::equal (const value_type *v, const compare_type *c)
227 return vn_reference_eq (v, c);
230 inline void
231 vn_reference_hasher::remove (value_type *v)
233 free_reference (v);
236 typedef hash_table <vn_reference_hasher> vn_reference_table_type;
237 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
240 /* The set of hashtables and alloc_pool's for their items. */
242 typedef struct vn_tables_s
244 vn_nary_op_table_type nary;
245 vn_phi_table_type phis;
246 vn_reference_table_type references;
247 struct obstack nary_obstack;
248 alloc_pool phis_pool;
249 alloc_pool references_pool;
250 } *vn_tables_t;
253 /* vn_constant hashtable helpers. */
255 struct vn_constant_hasher : typed_free_remove <vn_constant_s>
257 typedef vn_constant_s value_type;
258 typedef vn_constant_s compare_type;
259 static inline hashval_t hash (const value_type *);
260 static inline bool equal (const value_type *, const compare_type *);
263 /* Hash table hash function for vn_constant_t. */
265 inline hashval_t
266 vn_constant_hasher::hash (const value_type *vc1)
268 return vc1->hashcode;
271 /* Hash table equality function for vn_constant_t. */
273 inline bool
274 vn_constant_hasher::equal (const value_type *vc1, const compare_type *vc2)
276 if (vc1->hashcode != vc2->hashcode)
277 return false;
279 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
282 static hash_table <vn_constant_hasher> constant_to_value_id;
283 static bitmap constant_value_ids;
286 /* Valid hashtables storing information we have proven to be
287 correct. */
289 static vn_tables_t valid_info;
291 /* Optimistic hashtables storing information we are making assumptions about
292 during iterations. */
294 static vn_tables_t optimistic_info;
296 /* Pointer to the set of hashtables that is currently being used.
297 Should always point to either the optimistic_info, or the
298 valid_info. */
300 static vn_tables_t current_info;
303 /* Reverse post order index for each basic block. */
305 static int *rpo_numbers;
307 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
309 /* This represents the top of the VN lattice, which is the universal
310 value. */
312 tree VN_TOP;
314 /* Unique counter for our value ids. */
316 static unsigned int next_value_id;
318 /* Next DFS number and the stack for strongly connected component
319 detection. */
321 static unsigned int next_dfs_num;
322 static vec<tree> sccstack;
326 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
327 are allocated on an obstack for locality reasons, and to free them
328 without looping over the vec. */
330 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
331 static struct obstack vn_ssa_aux_obstack;
333 /* Return the value numbering information for a given SSA name. */
335 vn_ssa_aux_t
336 VN_INFO (tree name)
338 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
339 gcc_checking_assert (res);
340 return res;
343 /* Set the value numbering info for a given SSA name to a given
344 value. */
346 static inline void
347 VN_INFO_SET (tree name, vn_ssa_aux_t value)
349 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
352 /* Initialize the value numbering info for a given SSA name.
353 This should be called just once for every SSA name. */
355 vn_ssa_aux_t
356 VN_INFO_GET (tree name)
358 vn_ssa_aux_t newinfo;
360 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
361 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
362 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
363 vn_ssa_aux_table.safe_grow (SSA_NAME_VERSION (name) + 1);
364 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
365 return newinfo;
369 /* Get the representative expression for the SSA_NAME NAME. Returns
370 the representative SSA_NAME if there is no expression associated with it. */
372 tree
373 vn_get_expr_for (tree name)
375 vn_ssa_aux_t vn = VN_INFO (name);
376 gimple def_stmt;
377 tree expr = NULL_TREE;
378 enum tree_code code;
380 if (vn->valnum == VN_TOP)
381 return name;
383 /* If the value-number is a constant it is the representative
384 expression. */
385 if (TREE_CODE (vn->valnum) != SSA_NAME)
386 return vn->valnum;
388 /* Get to the information of the value of this SSA_NAME. */
389 vn = VN_INFO (vn->valnum);
391 /* If the value-number is a constant it is the representative
392 expression. */
393 if (TREE_CODE (vn->valnum) != SSA_NAME)
394 return vn->valnum;
396 /* Else if we have an expression, return it. */
397 if (vn->expr != NULL_TREE)
398 return vn->expr;
400 /* Otherwise use the defining statement to build the expression. */
401 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
403 /* If the value number is not an assignment use it directly. */
404 if (!is_gimple_assign (def_stmt))
405 return vn->valnum;
407 /* FIXME tuples. This is incomplete and likely will miss some
408 simplifications. */
409 code = gimple_assign_rhs_code (def_stmt);
410 switch (TREE_CODE_CLASS (code))
412 case tcc_reference:
413 if ((code == REALPART_EXPR
414 || code == IMAGPART_EXPR
415 || code == VIEW_CONVERT_EXPR)
416 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (def_stmt),
417 0)) == SSA_NAME)
418 expr = fold_build1 (code,
419 gimple_expr_type (def_stmt),
420 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
421 break;
423 case tcc_unary:
424 expr = fold_build1 (code,
425 gimple_expr_type (def_stmt),
426 gimple_assign_rhs1 (def_stmt));
427 break;
429 case tcc_binary:
430 expr = fold_build2 (code,
431 gimple_expr_type (def_stmt),
432 gimple_assign_rhs1 (def_stmt),
433 gimple_assign_rhs2 (def_stmt));
434 break;
436 case tcc_exceptional:
437 if (code == CONSTRUCTOR
438 && TREE_CODE
439 (TREE_TYPE (gimple_assign_rhs1 (def_stmt))) == VECTOR_TYPE)
440 expr = gimple_assign_rhs1 (def_stmt);
441 break;
443 default:;
445 if (expr == NULL_TREE)
446 return vn->valnum;
448 /* Cache the expression. */
449 vn->expr = expr;
451 return expr;
454 /* Return the vn_kind the expression computed by the stmt should be
455 associated with. */
457 enum vn_kind
458 vn_get_stmt_kind (gimple stmt)
460 switch (gimple_code (stmt))
462 case GIMPLE_CALL:
463 return VN_REFERENCE;
464 case GIMPLE_PHI:
465 return VN_PHI;
466 case GIMPLE_ASSIGN:
468 enum tree_code code = gimple_assign_rhs_code (stmt);
469 tree rhs1 = gimple_assign_rhs1 (stmt);
470 switch (get_gimple_rhs_class (code))
472 case GIMPLE_UNARY_RHS:
473 case GIMPLE_BINARY_RHS:
474 case GIMPLE_TERNARY_RHS:
475 return VN_NARY;
476 case GIMPLE_SINGLE_RHS:
477 switch (TREE_CODE_CLASS (code))
479 case tcc_reference:
480 /* VOP-less references can go through unary case. */
481 if ((code == REALPART_EXPR
482 || code == IMAGPART_EXPR
483 || code == VIEW_CONVERT_EXPR
484 || code == BIT_FIELD_REF)
485 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
486 return VN_NARY;
488 /* Fallthrough. */
489 case tcc_declaration:
490 return VN_REFERENCE;
492 case tcc_constant:
493 return VN_CONSTANT;
495 default:
496 if (code == ADDR_EXPR)
497 return (is_gimple_min_invariant (rhs1)
498 ? VN_CONSTANT : VN_REFERENCE);
499 else if (code == CONSTRUCTOR)
500 return VN_NARY;
501 return VN_NONE;
503 default:
504 return VN_NONE;
507 default:
508 return VN_NONE;
512 /* Lookup a value id for CONSTANT and return it. If it does not
513 exist returns 0. */
515 unsigned int
516 get_constant_value_id (tree constant)
518 vn_constant_s **slot;
519 struct vn_constant_s vc;
521 vc.hashcode = vn_hash_constant_with_type (constant);
522 vc.constant = constant;
523 slot = constant_to_value_id.find_slot_with_hash (&vc, vc.hashcode, NO_INSERT);
524 if (slot)
525 return (*slot)->value_id;
526 return 0;
529 /* Lookup a value id for CONSTANT, and if it does not exist, create a
530 new one and return it. If it does exist, return it. */
532 unsigned int
533 get_or_alloc_constant_value_id (tree constant)
535 vn_constant_s **slot;
536 struct vn_constant_s vc;
537 vn_constant_t vcp;
539 vc.hashcode = vn_hash_constant_with_type (constant);
540 vc.constant = constant;
541 slot = constant_to_value_id.find_slot_with_hash (&vc, vc.hashcode, INSERT);
542 if (*slot)
543 return (*slot)->value_id;
545 vcp = XNEW (struct vn_constant_s);
546 vcp->hashcode = vc.hashcode;
547 vcp->constant = constant;
548 vcp->value_id = get_next_value_id ();
549 *slot = vcp;
550 bitmap_set_bit (constant_value_ids, vcp->value_id);
551 return vcp->value_id;
554 /* Return true if V is a value id for a constant. */
556 bool
557 value_id_constant_p (unsigned int v)
559 return bitmap_bit_p (constant_value_ids, v);
562 /* Compute the hash for a reference operand VRO1. */
564 static hashval_t
565 vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result)
567 result = iterative_hash_hashval_t (vro1->opcode, result);
568 if (vro1->op0)
569 result = iterative_hash_expr (vro1->op0, result);
570 if (vro1->op1)
571 result = iterative_hash_expr (vro1->op1, result);
572 if (vro1->op2)
573 result = iterative_hash_expr (vro1->op2, result);
574 return result;
577 /* Compute a hash for the reference operation VR1 and return it. */
579 hashval_t
580 vn_reference_compute_hash (const vn_reference_t vr1)
582 hashval_t result = 0;
583 int i;
584 vn_reference_op_t vro;
585 HOST_WIDE_INT off = -1;
586 bool deref = false;
588 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
590 if (vro->opcode == MEM_REF)
591 deref = true;
592 else if (vro->opcode != ADDR_EXPR)
593 deref = false;
594 if (vro->off != -1)
596 if (off == -1)
597 off = 0;
598 off += vro->off;
600 else
602 if (off != -1
603 && off != 0)
604 result = iterative_hash_hashval_t (off, result);
605 off = -1;
606 if (deref
607 && vro->opcode == ADDR_EXPR)
609 if (vro->op0)
611 tree op = TREE_OPERAND (vro->op0, 0);
612 result = iterative_hash_hashval_t (TREE_CODE (op), result);
613 result = iterative_hash_expr (op, result);
616 else
617 result = vn_reference_op_compute_hash (vro, result);
620 if (vr1->vuse)
621 result += SSA_NAME_VERSION (vr1->vuse);
623 return result;
626 /* Return true if reference operations VR1 and VR2 are equivalent. This
627 means they have the same set of operands and vuses. */
629 bool
630 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
632 unsigned i, j;
634 if (vr1->hashcode != vr2->hashcode)
635 return false;
637 /* Early out if this is not a hash collision. */
638 if (vr1->hashcode != vr2->hashcode)
639 return false;
641 /* The VOP needs to be the same. */
642 if (vr1->vuse != vr2->vuse)
643 return false;
645 /* If the operands are the same we are done. */
646 if (vr1->operands == vr2->operands)
647 return true;
649 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
650 return false;
652 if (INTEGRAL_TYPE_P (vr1->type)
653 && INTEGRAL_TYPE_P (vr2->type))
655 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
656 return false;
658 else if (INTEGRAL_TYPE_P (vr1->type)
659 && (TYPE_PRECISION (vr1->type)
660 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
661 return false;
662 else if (INTEGRAL_TYPE_P (vr2->type)
663 && (TYPE_PRECISION (vr2->type)
664 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
665 return false;
667 i = 0;
668 j = 0;
671 HOST_WIDE_INT off1 = 0, off2 = 0;
672 vn_reference_op_t vro1, vro2;
673 vn_reference_op_s tem1, tem2;
674 bool deref1 = false, deref2 = false;
675 for (; vr1->operands.iterate (i, &vro1); i++)
677 if (vro1->opcode == MEM_REF)
678 deref1 = true;
679 if (vro1->off == -1)
680 break;
681 off1 += vro1->off;
683 for (; vr2->operands.iterate (j, &vro2); j++)
685 if (vro2->opcode == MEM_REF)
686 deref2 = true;
687 if (vro2->off == -1)
688 break;
689 off2 += vro2->off;
691 if (off1 != off2)
692 return false;
693 if (deref1 && vro1->opcode == ADDR_EXPR)
695 memset (&tem1, 0, sizeof (tem1));
696 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
697 tem1.type = TREE_TYPE (tem1.op0);
698 tem1.opcode = TREE_CODE (tem1.op0);
699 vro1 = &tem1;
700 deref1 = false;
702 if (deref2 && vro2->opcode == ADDR_EXPR)
704 memset (&tem2, 0, sizeof (tem2));
705 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
706 tem2.type = TREE_TYPE (tem2.op0);
707 tem2.opcode = TREE_CODE (tem2.op0);
708 vro2 = &tem2;
709 deref2 = false;
711 if (deref1 != deref2)
712 return false;
713 if (!vn_reference_op_eq (vro1, vro2))
714 return false;
715 ++j;
716 ++i;
718 while (vr1->operands.length () != i
719 || vr2->operands.length () != j);
721 return true;
724 /* Copy the operations present in load/store REF into RESULT, a vector of
725 vn_reference_op_s's. */
727 void
728 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
730 if (TREE_CODE (ref) == TARGET_MEM_REF)
732 vn_reference_op_s temp;
734 result->reserve (3);
736 memset (&temp, 0, sizeof (temp));
737 temp.type = TREE_TYPE (ref);
738 temp.opcode = TREE_CODE (ref);
739 temp.op0 = TMR_INDEX (ref);
740 temp.op1 = TMR_STEP (ref);
741 temp.op2 = TMR_OFFSET (ref);
742 temp.off = -1;
743 result->quick_push (temp);
745 memset (&temp, 0, sizeof (temp));
746 temp.type = NULL_TREE;
747 temp.opcode = ERROR_MARK;
748 temp.op0 = TMR_INDEX2 (ref);
749 temp.off = -1;
750 result->quick_push (temp);
752 memset (&temp, 0, sizeof (temp));
753 temp.type = NULL_TREE;
754 temp.opcode = TREE_CODE (TMR_BASE (ref));
755 temp.op0 = TMR_BASE (ref);
756 temp.off = -1;
757 result->quick_push (temp);
758 return;
761 /* For non-calls, store the information that makes up the address. */
763 while (ref)
765 vn_reference_op_s temp;
767 memset (&temp, 0, sizeof (temp));
768 temp.type = TREE_TYPE (ref);
769 temp.opcode = TREE_CODE (ref);
770 temp.off = -1;
772 switch (temp.opcode)
774 case MODIFY_EXPR:
775 temp.op0 = TREE_OPERAND (ref, 1);
776 break;
777 case WITH_SIZE_EXPR:
778 temp.op0 = TREE_OPERAND (ref, 1);
779 temp.off = 0;
780 break;
781 case MEM_REF:
782 /* The base address gets its own vn_reference_op_s structure. */
783 temp.op0 = TREE_OPERAND (ref, 1);
784 if (host_integerp (TREE_OPERAND (ref, 1), 0))
785 temp.off = TREE_INT_CST_LOW (TREE_OPERAND (ref, 1));
786 break;
787 case BIT_FIELD_REF:
788 /* Record bits and position. */
789 temp.op0 = TREE_OPERAND (ref, 1);
790 temp.op1 = TREE_OPERAND (ref, 2);
791 break;
792 case COMPONENT_REF:
793 /* The field decl is enough to unambiguously specify the field,
794 a matching type is not necessary and a mismatching type
795 is always a spurious difference. */
796 temp.type = NULL_TREE;
797 temp.op0 = TREE_OPERAND (ref, 1);
798 temp.op1 = TREE_OPERAND (ref, 2);
800 tree this_offset = component_ref_field_offset (ref);
801 if (this_offset
802 && TREE_CODE (this_offset) == INTEGER_CST)
804 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
805 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
807 double_int off
808 = tree_to_double_int (this_offset)
809 + tree_to_double_int (bit_offset)
810 .rshift (BITS_PER_UNIT == 8
811 ? 3 : exact_log2 (BITS_PER_UNIT));
812 if (off.fits_shwi ())
813 temp.off = off.low;
817 break;
818 case ARRAY_RANGE_REF:
819 case ARRAY_REF:
820 /* Record index as operand. */
821 temp.op0 = TREE_OPERAND (ref, 1);
822 /* Always record lower bounds and element size. */
823 temp.op1 = array_ref_low_bound (ref);
824 temp.op2 = array_ref_element_size (ref);
825 if (TREE_CODE (temp.op0) == INTEGER_CST
826 && TREE_CODE (temp.op1) == INTEGER_CST
827 && TREE_CODE (temp.op2) == INTEGER_CST)
829 double_int off = tree_to_double_int (temp.op0);
830 off += -tree_to_double_int (temp.op1);
831 off *= tree_to_double_int (temp.op2);
832 if (off.fits_shwi ())
833 temp.off = off.low;
835 break;
836 case VAR_DECL:
837 if (DECL_HARD_REGISTER (ref))
839 temp.op0 = ref;
840 break;
842 /* Fallthru. */
843 case PARM_DECL:
844 case CONST_DECL:
845 case RESULT_DECL:
846 /* Canonicalize decls to MEM[&decl] which is what we end up with
847 when valueizing MEM[ptr] with ptr = &decl. */
848 temp.opcode = MEM_REF;
849 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
850 temp.off = 0;
851 result->safe_push (temp);
852 temp.opcode = ADDR_EXPR;
853 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
854 temp.type = TREE_TYPE (temp.op0);
855 temp.off = -1;
856 break;
857 case STRING_CST:
858 case INTEGER_CST:
859 case COMPLEX_CST:
860 case VECTOR_CST:
861 case REAL_CST:
862 case FIXED_CST:
863 case CONSTRUCTOR:
864 case SSA_NAME:
865 temp.op0 = ref;
866 break;
867 case ADDR_EXPR:
868 if (is_gimple_min_invariant (ref))
870 temp.op0 = ref;
871 break;
873 /* Fallthrough. */
874 /* These are only interesting for their operands, their
875 existence, and their type. They will never be the last
876 ref in the chain of references (IE they require an
877 operand), so we don't have to put anything
878 for op* as it will be handled by the iteration */
879 case REALPART_EXPR:
880 case VIEW_CONVERT_EXPR:
881 temp.off = 0;
882 break;
883 case IMAGPART_EXPR:
884 /* This is only interesting for its constant offset. */
885 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
886 break;
887 default:
888 gcc_unreachable ();
890 result->safe_push (temp);
892 if (REFERENCE_CLASS_P (ref)
893 || TREE_CODE (ref) == MODIFY_EXPR
894 || TREE_CODE (ref) == WITH_SIZE_EXPR
895 || (TREE_CODE (ref) == ADDR_EXPR
896 && !is_gimple_min_invariant (ref)))
897 ref = TREE_OPERAND (ref, 0);
898 else
899 ref = NULL_TREE;
903 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
904 operands in *OPS, the reference alias set SET and the reference type TYPE.
905 Return true if something useful was produced. */
907 bool
908 ao_ref_init_from_vn_reference (ao_ref *ref,
909 alias_set_type set, tree type,
910 vec<vn_reference_op_s> ops)
912 vn_reference_op_t op;
913 unsigned i;
914 tree base = NULL_TREE;
915 tree *op0_p = &base;
916 HOST_WIDE_INT offset = 0;
917 HOST_WIDE_INT max_size;
918 HOST_WIDE_INT size = -1;
919 tree size_tree = NULL_TREE;
920 alias_set_type base_alias_set = -1;
922 /* First get the final access size from just the outermost expression. */
923 op = &ops[0];
924 if (op->opcode == COMPONENT_REF)
925 size_tree = DECL_SIZE (op->op0);
926 else if (op->opcode == BIT_FIELD_REF)
927 size_tree = op->op0;
928 else
930 enum machine_mode mode = TYPE_MODE (type);
931 if (mode == BLKmode)
932 size_tree = TYPE_SIZE (type);
933 else
934 size = GET_MODE_BITSIZE (mode);
936 if (size_tree != NULL_TREE)
938 if (!host_integerp (size_tree, 1))
939 size = -1;
940 else
941 size = TREE_INT_CST_LOW (size_tree);
944 /* Initially, maxsize is the same as the accessed element size.
945 In the following it will only grow (or become -1). */
946 max_size = size;
948 /* Compute cumulative bit-offset for nested component-refs and array-refs,
949 and find the ultimate containing object. */
950 FOR_EACH_VEC_ELT (ops, i, op)
952 switch (op->opcode)
954 /* These may be in the reference ops, but we cannot do anything
955 sensible with them here. */
956 case ADDR_EXPR:
957 /* Apart from ADDR_EXPR arguments to MEM_REF. */
958 if (base != NULL_TREE
959 && TREE_CODE (base) == MEM_REF
960 && op->op0
961 && DECL_P (TREE_OPERAND (op->op0, 0)))
963 vn_reference_op_t pop = &ops[i-1];
964 base = TREE_OPERAND (op->op0, 0);
965 if (pop->off == -1)
967 max_size = -1;
968 offset = 0;
970 else
971 offset += pop->off * BITS_PER_UNIT;
972 op0_p = NULL;
973 break;
975 /* Fallthru. */
976 case CALL_EXPR:
977 return false;
979 /* Record the base objects. */
980 case MEM_REF:
981 base_alias_set = get_deref_alias_set (op->op0);
982 *op0_p = build2 (MEM_REF, op->type,
983 NULL_TREE, op->op0);
984 op0_p = &TREE_OPERAND (*op0_p, 0);
985 break;
987 case VAR_DECL:
988 case PARM_DECL:
989 case RESULT_DECL:
990 case SSA_NAME:
991 *op0_p = op->op0;
992 op0_p = NULL;
993 break;
995 /* And now the usual component-reference style ops. */
996 case BIT_FIELD_REF:
997 offset += tree_low_cst (op->op1, 0);
998 break;
1000 case COMPONENT_REF:
1002 tree field = op->op0;
1003 /* We do not have a complete COMPONENT_REF tree here so we
1004 cannot use component_ref_field_offset. Do the interesting
1005 parts manually. */
1007 if (op->op1
1008 || !host_integerp (DECL_FIELD_OFFSET (field), 1))
1009 max_size = -1;
1010 else
1012 offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1013 * BITS_PER_UNIT);
1014 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1016 break;
1019 case ARRAY_RANGE_REF:
1020 case ARRAY_REF:
1021 /* We recorded the lower bound and the element size. */
1022 if (!host_integerp (op->op0, 0)
1023 || !host_integerp (op->op1, 0)
1024 || !host_integerp (op->op2, 0))
1025 max_size = -1;
1026 else
1028 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
1029 hindex -= TREE_INT_CST_LOW (op->op1);
1030 hindex *= TREE_INT_CST_LOW (op->op2);
1031 hindex *= BITS_PER_UNIT;
1032 offset += hindex;
1034 break;
1036 case REALPART_EXPR:
1037 break;
1039 case IMAGPART_EXPR:
1040 offset += size;
1041 break;
1043 case VIEW_CONVERT_EXPR:
1044 break;
1046 case STRING_CST:
1047 case INTEGER_CST:
1048 case COMPLEX_CST:
1049 case VECTOR_CST:
1050 case REAL_CST:
1051 case CONSTRUCTOR:
1052 case CONST_DECL:
1053 return false;
1055 default:
1056 return false;
1060 if (base == NULL_TREE)
1061 return false;
1063 ref->ref = NULL_TREE;
1064 ref->base = base;
1065 ref->offset = offset;
1066 ref->size = size;
1067 ref->max_size = max_size;
1068 ref->ref_alias_set = set;
1069 if (base_alias_set != -1)
1070 ref->base_alias_set = base_alias_set;
1071 else
1072 ref->base_alias_set = get_alias_set (base);
1073 /* We discount volatiles from value-numbering elsewhere. */
1074 ref->volatile_p = false;
1076 return true;
1079 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1080 vn_reference_op_s's. */
1082 void
1083 copy_reference_ops_from_call (gimple call,
1084 vec<vn_reference_op_s> *result)
1086 vn_reference_op_s temp;
1087 unsigned i;
1088 tree lhs = gimple_call_lhs (call);
1090 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1091 different. By adding the lhs here in the vector, we ensure that the
1092 hashcode is different, guaranteeing a different value number. */
1093 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1095 memset (&temp, 0, sizeof (temp));
1096 temp.opcode = MODIFY_EXPR;
1097 temp.type = TREE_TYPE (lhs);
1098 temp.op0 = lhs;
1099 temp.off = -1;
1100 result->safe_push (temp);
1103 /* Copy the type, opcode, function being called and static chain. */
1104 memset (&temp, 0, sizeof (temp));
1105 temp.type = gimple_call_return_type (call);
1106 temp.opcode = CALL_EXPR;
1107 temp.op0 = gimple_call_fn (call);
1108 temp.op1 = gimple_call_chain (call);
1109 temp.off = -1;
1110 result->safe_push (temp);
1112 /* Copy the call arguments. As they can be references as well,
1113 just chain them together. */
1114 for (i = 0; i < gimple_call_num_args (call); ++i)
1116 tree callarg = gimple_call_arg (call, i);
1117 copy_reference_ops_from_ref (callarg, result);
1121 /* Create a vector of vn_reference_op_s structures from CALL, a
1122 call statement. The vector is not shared. */
1124 static vec<vn_reference_op_s>
1125 create_reference_ops_from_call (gimple call)
1127 vec<vn_reference_op_s> result = vNULL;
1129 copy_reference_ops_from_call (call, &result);
1130 return result;
1133 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1134 *I_P to point to the last element of the replacement. */
1135 void
1136 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1137 unsigned int *i_p)
1139 unsigned int i = *i_p;
1140 vn_reference_op_t op = &(*ops)[i];
1141 vn_reference_op_t mem_op = &(*ops)[i - 1];
1142 tree addr_base;
1143 HOST_WIDE_INT addr_offset = 0;
1145 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1146 from .foo.bar to the preceding MEM_REF offset and replace the
1147 address with &OBJ. */
1148 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1149 &addr_offset);
1150 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1151 if (addr_base != TREE_OPERAND (op->op0, 0))
1153 double_int off = tree_to_double_int (mem_op->op0);
1154 off = off.sext (TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1155 off += double_int::from_shwi (addr_offset);
1156 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1157 op->op0 = build_fold_addr_expr (addr_base);
1158 if (host_integerp (mem_op->op0, 0))
1159 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1160 else
1161 mem_op->off = -1;
1165 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1166 *I_P to point to the last element of the replacement. */
1167 static void
1168 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1169 unsigned int *i_p)
1171 unsigned int i = *i_p;
1172 vn_reference_op_t op = &(*ops)[i];
1173 vn_reference_op_t mem_op = &(*ops)[i - 1];
1174 gimple def_stmt;
1175 enum tree_code code;
1176 double_int off;
1178 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1179 if (!is_gimple_assign (def_stmt))
1180 return;
1182 code = gimple_assign_rhs_code (def_stmt);
1183 if (code != ADDR_EXPR
1184 && code != POINTER_PLUS_EXPR)
1185 return;
1187 off = tree_to_double_int (mem_op->op0);
1188 off = off.sext (TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1190 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1191 from .foo.bar to the preceding MEM_REF offset and replace the
1192 address with &OBJ. */
1193 if (code == ADDR_EXPR)
1195 tree addr, addr_base;
1196 HOST_WIDE_INT addr_offset;
1198 addr = gimple_assign_rhs1 (def_stmt);
1199 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1200 &addr_offset);
1201 if (!addr_base
1202 || TREE_CODE (addr_base) != MEM_REF)
1203 return;
1205 off += double_int::from_shwi (addr_offset);
1206 off += mem_ref_offset (addr_base);
1207 op->op0 = TREE_OPERAND (addr_base, 0);
1209 else
1211 tree ptr, ptroff;
1212 ptr = gimple_assign_rhs1 (def_stmt);
1213 ptroff = gimple_assign_rhs2 (def_stmt);
1214 if (TREE_CODE (ptr) != SSA_NAME
1215 || TREE_CODE (ptroff) != INTEGER_CST)
1216 return;
1218 off += tree_to_double_int (ptroff);
1219 op->op0 = ptr;
1222 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1223 if (host_integerp (mem_op->op0, 0))
1224 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1225 else
1226 mem_op->off = -1;
1227 if (TREE_CODE (op->op0) == SSA_NAME)
1228 op->op0 = SSA_VAL (op->op0);
1229 if (TREE_CODE (op->op0) != SSA_NAME)
1230 op->opcode = TREE_CODE (op->op0);
1232 /* And recurse. */
1233 if (TREE_CODE (op->op0) == SSA_NAME)
1234 vn_reference_maybe_forwprop_address (ops, i_p);
1235 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1236 vn_reference_fold_indirect (ops, i_p);
1239 /* Optimize the reference REF to a constant if possible or return
1240 NULL_TREE if not. */
1242 tree
1243 fully_constant_vn_reference_p (vn_reference_t ref)
1245 vec<vn_reference_op_s> operands = ref->operands;
1246 vn_reference_op_t op;
1248 /* Try to simplify the translated expression if it is
1249 a call to a builtin function with at most two arguments. */
1250 op = &operands[0];
1251 if (op->opcode == CALL_EXPR
1252 && TREE_CODE (op->op0) == ADDR_EXPR
1253 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1254 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1255 && operands.length () >= 2
1256 && operands.length () <= 3)
1258 vn_reference_op_t arg0, arg1 = NULL;
1259 bool anyconst = false;
1260 arg0 = &operands[1];
1261 if (operands.length () > 2)
1262 arg1 = &operands[2];
1263 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1264 || (arg0->opcode == ADDR_EXPR
1265 && is_gimple_min_invariant (arg0->op0)))
1266 anyconst = true;
1267 if (arg1
1268 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1269 || (arg1->opcode == ADDR_EXPR
1270 && is_gimple_min_invariant (arg1->op0))))
1271 anyconst = true;
1272 if (anyconst)
1274 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1275 arg1 ? 2 : 1,
1276 arg0->op0,
1277 arg1 ? arg1->op0 : NULL);
1278 if (folded
1279 && TREE_CODE (folded) == NOP_EXPR)
1280 folded = TREE_OPERAND (folded, 0);
1281 if (folded
1282 && is_gimple_min_invariant (folded))
1283 return folded;
1287 /* Simplify reads from constant strings. */
1288 else if (op->opcode == ARRAY_REF
1289 && TREE_CODE (op->op0) == INTEGER_CST
1290 && integer_zerop (op->op1)
1291 && operands.length () == 2)
1293 vn_reference_op_t arg0;
1294 arg0 = &operands[1];
1295 if (arg0->opcode == STRING_CST
1296 && (TYPE_MODE (op->type)
1297 == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
1298 && GET_MODE_CLASS (TYPE_MODE (op->type)) == MODE_INT
1299 && GET_MODE_SIZE (TYPE_MODE (op->type)) == 1
1300 && tree_int_cst_sgn (op->op0) >= 0
1301 && compare_tree_int (op->op0, TREE_STRING_LENGTH (arg0->op0)) < 0)
1302 return build_int_cst_type (op->type,
1303 (TREE_STRING_POINTER (arg0->op0)
1304 [TREE_INT_CST_LOW (op->op0)]));
1307 return NULL_TREE;
1310 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1311 structures into their value numbers. This is done in-place, and
1312 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1313 whether any operands were valueized. */
1315 static vec<vn_reference_op_s>
1316 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1318 vn_reference_op_t vro;
1319 unsigned int i;
1321 *valueized_anything = false;
1323 FOR_EACH_VEC_ELT (orig, i, vro)
1325 if (vro->opcode == SSA_NAME
1326 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1328 tree tem = SSA_VAL (vro->op0);
1329 if (tem != vro->op0)
1331 *valueized_anything = true;
1332 vro->op0 = tem;
1334 /* If it transforms from an SSA_NAME to a constant, update
1335 the opcode. */
1336 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1337 vro->opcode = TREE_CODE (vro->op0);
1339 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1341 tree tem = SSA_VAL (vro->op1);
1342 if (tem != vro->op1)
1344 *valueized_anything = true;
1345 vro->op1 = tem;
1348 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1350 tree tem = SSA_VAL (vro->op2);
1351 if (tem != vro->op2)
1353 *valueized_anything = true;
1354 vro->op2 = tem;
1357 /* If it transforms from an SSA_NAME to an address, fold with
1358 a preceding indirect reference. */
1359 if (i > 0
1360 && vro->op0
1361 && TREE_CODE (vro->op0) == ADDR_EXPR
1362 && orig[i - 1].opcode == MEM_REF)
1363 vn_reference_fold_indirect (&orig, &i);
1364 else if (i > 0
1365 && vro->opcode == SSA_NAME
1366 && orig[i - 1].opcode == MEM_REF)
1367 vn_reference_maybe_forwprop_address (&orig, &i);
1368 /* If it transforms a non-constant ARRAY_REF into a constant
1369 one, adjust the constant offset. */
1370 else if (vro->opcode == ARRAY_REF
1371 && vro->off == -1
1372 && TREE_CODE (vro->op0) == INTEGER_CST
1373 && TREE_CODE (vro->op1) == INTEGER_CST
1374 && TREE_CODE (vro->op2) == INTEGER_CST)
1376 double_int off = tree_to_double_int (vro->op0);
1377 off += -tree_to_double_int (vro->op1);
1378 off *= tree_to_double_int (vro->op2);
1379 if (off.fits_shwi ())
1380 vro->off = off.low;
1384 return orig;
1387 static vec<vn_reference_op_s>
1388 valueize_refs (vec<vn_reference_op_s> orig)
1390 bool tem;
1391 return valueize_refs_1 (orig, &tem);
1394 static vec<vn_reference_op_s> shared_lookup_references;
1396 /* Create a vector of vn_reference_op_s structures from REF, a
1397 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1398 this function. *VALUEIZED_ANYTHING will specify whether any
1399 operands were valueized. */
1401 static vec<vn_reference_op_s>
1402 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1404 if (!ref)
1405 return vNULL;
1406 shared_lookup_references.truncate (0);
1407 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1408 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1409 valueized_anything);
1410 return shared_lookup_references;
1413 /* Create a vector of vn_reference_op_s structures from CALL, a
1414 call statement. The vector is shared among all callers of
1415 this function. */
1417 static vec<vn_reference_op_s>
1418 valueize_shared_reference_ops_from_call (gimple call)
1420 if (!call)
1421 return vNULL;
1422 shared_lookup_references.truncate (0);
1423 copy_reference_ops_from_call (call, &shared_lookup_references);
1424 shared_lookup_references = valueize_refs (shared_lookup_references);
1425 return shared_lookup_references;
1428 /* Lookup a SCCVN reference operation VR in the current hash table.
1429 Returns the resulting value number if it exists in the hash table,
1430 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1431 vn_reference_t stored in the hashtable if something is found. */
1433 static tree
1434 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1436 vn_reference_s **slot;
1437 hashval_t hash;
1439 hash = vr->hashcode;
1440 slot = current_info->references.find_slot_with_hash (vr, hash, NO_INSERT);
1441 if (!slot && current_info == optimistic_info)
1442 slot = valid_info->references.find_slot_with_hash (vr, hash, NO_INSERT);
1443 if (slot)
1445 if (vnresult)
1446 *vnresult = (vn_reference_t)*slot;
1447 return ((vn_reference_t)*slot)->result;
1450 return NULL_TREE;
1453 static tree *last_vuse_ptr;
1454 static vn_lookup_kind vn_walk_kind;
1455 static vn_lookup_kind default_vn_walk_kind;
1457 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1458 with the current VUSE and performs the expression lookup. */
1460 static void *
1461 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1462 unsigned int cnt, void *vr_)
1464 vn_reference_t vr = (vn_reference_t)vr_;
1465 vn_reference_s **slot;
1466 hashval_t hash;
1468 /* This bounds the stmt walks we perform on reference lookups
1469 to O(1) instead of O(N) where N is the number of dominating
1470 stores. */
1471 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1472 return (void *)-1;
1474 if (last_vuse_ptr)
1475 *last_vuse_ptr = vuse;
1477 /* Fixup vuse and hash. */
1478 if (vr->vuse)
1479 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1480 vr->vuse = SSA_VAL (vuse);
1481 if (vr->vuse)
1482 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1484 hash = vr->hashcode;
1485 slot = current_info->references.find_slot_with_hash (vr, hash, NO_INSERT);
1486 if (!slot && current_info == optimistic_info)
1487 slot = valid_info->references.find_slot_with_hash (vr, hash, NO_INSERT);
1488 if (slot)
1489 return *slot;
1491 return NULL;
1494 /* Lookup an existing or insert a new vn_reference entry into the
1495 value table for the VUSE, SET, TYPE, OPERANDS reference which
1496 has the value VALUE which is either a constant or an SSA name. */
1498 static vn_reference_t
1499 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1500 alias_set_type set,
1501 tree type,
1502 vec<vn_reference_op_s,
1503 va_heap> operands,
1504 tree value)
1506 struct vn_reference_s vr1;
1507 vn_reference_t result;
1508 unsigned value_id;
1509 vr1.vuse = vuse;
1510 vr1.operands = operands;
1511 vr1.type = type;
1512 vr1.set = set;
1513 vr1.hashcode = vn_reference_compute_hash (&vr1);
1514 if (vn_reference_lookup_1 (&vr1, &result))
1515 return result;
1516 if (TREE_CODE (value) == SSA_NAME)
1517 value_id = VN_INFO (value)->value_id;
1518 else
1519 value_id = get_or_alloc_constant_value_id (value);
1520 return vn_reference_insert_pieces (vuse, set, type,
1521 operands.copy (), value, value_id);
1524 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1525 from the statement defining VUSE and if not successful tries to
1526 translate *REFP and VR_ through an aggregate copy at the definition
1527 of VUSE. */
1529 static void *
1530 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
1532 vn_reference_t vr = (vn_reference_t)vr_;
1533 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1534 tree base;
1535 HOST_WIDE_INT offset, maxsize;
1536 static vec<vn_reference_op_s>
1537 lhs_ops = vNULL;
1538 ao_ref lhs_ref;
1539 bool lhs_ref_ok = false;
1541 /* First try to disambiguate after value-replacing in the definitions LHS. */
1542 if (is_gimple_assign (def_stmt))
1544 vec<vn_reference_op_s> tem;
1545 tree lhs = gimple_assign_lhs (def_stmt);
1546 bool valueized_anything = false;
1547 /* Avoid re-allocation overhead. */
1548 lhs_ops.truncate (0);
1549 copy_reference_ops_from_ref (lhs, &lhs_ops);
1550 tem = lhs_ops;
1551 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1552 gcc_assert (lhs_ops == tem);
1553 if (valueized_anything)
1555 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1556 get_alias_set (lhs),
1557 TREE_TYPE (lhs), lhs_ops);
1558 if (lhs_ref_ok
1559 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1560 return NULL;
1562 else
1564 ao_ref_init (&lhs_ref, lhs);
1565 lhs_ref_ok = true;
1569 base = ao_ref_base (ref);
1570 offset = ref->offset;
1571 maxsize = ref->max_size;
1573 /* If we cannot constrain the size of the reference we cannot
1574 test if anything kills it. */
1575 if (maxsize == -1)
1576 return (void *)-1;
1578 /* We can't deduce anything useful from clobbers. */
1579 if (gimple_clobber_p (def_stmt))
1580 return (void *)-1;
1582 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1583 from that definition.
1584 1) Memset. */
1585 if (is_gimple_reg_type (vr->type)
1586 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1587 && integer_zerop (gimple_call_arg (def_stmt, 1))
1588 && host_integerp (gimple_call_arg (def_stmt, 2), 1)
1589 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1591 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1592 tree base2;
1593 HOST_WIDE_INT offset2, size2, maxsize2;
1594 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1595 size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
1596 if ((unsigned HOST_WIDE_INT)size2 / 8
1597 == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
1598 && maxsize2 != -1
1599 && operand_equal_p (base, base2, 0)
1600 && offset2 <= offset
1601 && offset2 + size2 >= offset + maxsize)
1603 tree val = build_zero_cst (vr->type);
1604 return vn_reference_lookup_or_insert_for_pieces
1605 (vuse, vr->set, vr->type, vr->operands, val);
1609 /* 2) Assignment from an empty CONSTRUCTOR. */
1610 else if (is_gimple_reg_type (vr->type)
1611 && gimple_assign_single_p (def_stmt)
1612 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1613 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1615 tree base2;
1616 HOST_WIDE_INT offset2, size2, maxsize2;
1617 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1618 &offset2, &size2, &maxsize2);
1619 if (maxsize2 != -1
1620 && operand_equal_p (base, base2, 0)
1621 && offset2 <= offset
1622 && offset2 + size2 >= offset + maxsize)
1624 tree val = build_zero_cst (vr->type);
1625 return vn_reference_lookup_or_insert_for_pieces
1626 (vuse, vr->set, vr->type, vr->operands, val);
1630 /* 3) Assignment from a constant. We can use folds native encode/interpret
1631 routines to extract the assigned bits. */
1632 else if (vn_walk_kind == VN_WALKREWRITE
1633 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1634 && ref->size == maxsize
1635 && maxsize % BITS_PER_UNIT == 0
1636 && offset % BITS_PER_UNIT == 0
1637 && is_gimple_reg_type (vr->type)
1638 && gimple_assign_single_p (def_stmt)
1639 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
1641 tree base2;
1642 HOST_WIDE_INT offset2, size2, maxsize2;
1643 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1644 &offset2, &size2, &maxsize2);
1645 if (maxsize2 != -1
1646 && maxsize2 == size2
1647 && size2 % BITS_PER_UNIT == 0
1648 && offset2 % BITS_PER_UNIT == 0
1649 && operand_equal_p (base, base2, 0)
1650 && offset2 <= offset
1651 && offset2 + size2 >= offset + maxsize)
1653 /* We support up to 512-bit values (for V8DFmode). */
1654 unsigned char buffer[64];
1655 int len;
1657 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1658 buffer, sizeof (buffer));
1659 if (len > 0)
1661 tree val = native_interpret_expr (vr->type,
1662 buffer
1663 + ((offset - offset2)
1664 / BITS_PER_UNIT),
1665 ref->size / BITS_PER_UNIT);
1666 if (val)
1667 return vn_reference_lookup_or_insert_for_pieces
1668 (vuse, vr->set, vr->type, vr->operands, val);
1673 /* 4) Assignment from an SSA name which definition we may be able
1674 to access pieces from. */
1675 else if (ref->size == maxsize
1676 && is_gimple_reg_type (vr->type)
1677 && gimple_assign_single_p (def_stmt)
1678 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
1680 tree rhs1 = gimple_assign_rhs1 (def_stmt);
1681 gimple def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
1682 if (is_gimple_assign (def_stmt2)
1683 && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
1684 || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
1685 && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1))))
1687 tree base2;
1688 HOST_WIDE_INT offset2, size2, maxsize2, off;
1689 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1690 &offset2, &size2, &maxsize2);
1691 off = offset - offset2;
1692 if (maxsize2 != -1
1693 && maxsize2 == size2
1694 && operand_equal_p (base, base2, 0)
1695 && offset2 <= offset
1696 && offset2 + size2 >= offset + maxsize)
1698 tree val = NULL_TREE;
1699 HOST_WIDE_INT elsz
1700 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1))));
1701 if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR)
1703 if (off == 0)
1704 val = gimple_assign_rhs1 (def_stmt2);
1705 else if (off == elsz)
1706 val = gimple_assign_rhs2 (def_stmt2);
1708 else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR
1709 && off % elsz == 0)
1711 tree ctor = gimple_assign_rhs1 (def_stmt2);
1712 unsigned i = off / elsz;
1713 if (i < CONSTRUCTOR_NELTS (ctor))
1715 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
1716 if (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
1718 if (TREE_CODE (TREE_TYPE (elt->value))
1719 != VECTOR_TYPE)
1720 val = elt->value;
1724 if (val)
1725 return vn_reference_lookup_or_insert_for_pieces
1726 (vuse, vr->set, vr->type, vr->operands, val);
1731 /* 5) For aggregate copies translate the reference through them if
1732 the copy kills ref. */
1733 else if (vn_walk_kind == VN_WALKREWRITE
1734 && gimple_assign_single_p (def_stmt)
1735 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1736 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1737 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1739 tree base2;
1740 HOST_WIDE_INT offset2, size2, maxsize2;
1741 int i, j;
1742 vec<vn_reference_op_s>
1743 rhs = vNULL;
1744 vn_reference_op_t vro;
1745 ao_ref r;
1747 if (!lhs_ref_ok)
1748 return (void *)-1;
1750 /* See if the assignment kills REF. */
1751 base2 = ao_ref_base (&lhs_ref);
1752 offset2 = lhs_ref.offset;
1753 size2 = lhs_ref.size;
1754 maxsize2 = lhs_ref.max_size;
1755 if (maxsize2 == -1
1756 || (base != base2 && !operand_equal_p (base, base2, 0))
1757 || offset2 > offset
1758 || offset2 + size2 < offset + maxsize)
1759 return (void *)-1;
1761 /* Find the common base of ref and the lhs. lhs_ops already
1762 contains valueized operands for the lhs. */
1763 i = vr->operands.length () - 1;
1764 j = lhs_ops.length () - 1;
1765 while (j >= 0 && i >= 0
1766 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
1768 i--;
1769 j--;
1772 /* ??? The innermost op should always be a MEM_REF and we already
1773 checked that the assignment to the lhs kills vr. Thus for
1774 aggregate copies using char[] types the vn_reference_op_eq
1775 may fail when comparing types for compatibility. But we really
1776 don't care here - further lookups with the rewritten operands
1777 will simply fail if we messed up types too badly. */
1778 if (j == 0 && i >= 0
1779 && lhs_ops[0].opcode == MEM_REF
1780 && lhs_ops[0].off != -1
1781 && (lhs_ops[0].off == vr->operands[i].off))
1782 i--, j--;
1784 /* i now points to the first additional op.
1785 ??? LHS may not be completely contained in VR, one or more
1786 VIEW_CONVERT_EXPRs could be in its way. We could at least
1787 try handling outermost VIEW_CONVERT_EXPRs. */
1788 if (j != -1)
1789 return (void *)-1;
1791 /* Now re-write REF to be based on the rhs of the assignment. */
1792 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1793 /* We need to pre-pend vr->operands[0..i] to rhs. */
1794 if (i + 1 + rhs.length () > vr->operands.length ())
1796 vec<vn_reference_op_s> old = vr->operands;
1797 vr->operands.safe_grow (i + 1 + rhs.length ());
1798 if (old == shared_lookup_references
1799 && vr->operands != old)
1800 shared_lookup_references = vNULL;
1802 else
1803 vr->operands.truncate (i + 1 + rhs.length ());
1804 FOR_EACH_VEC_ELT (rhs, j, vro)
1805 vr->operands[i + 1 + j] = *vro;
1806 rhs.release ();
1807 vr->operands = valueize_refs (vr->operands);
1808 vr->hashcode = vn_reference_compute_hash (vr);
1810 /* Adjust *ref from the new operands. */
1811 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1812 return (void *)-1;
1813 /* This can happen with bitfields. */
1814 if (ref->size != r.size)
1815 return (void *)-1;
1816 *ref = r;
1818 /* Do not update last seen VUSE after translating. */
1819 last_vuse_ptr = NULL;
1821 /* Keep looking for the adjusted *REF / VR pair. */
1822 return NULL;
1825 /* 6) For memcpy copies translate the reference through them if
1826 the copy kills ref. */
1827 else if (vn_walk_kind == VN_WALKREWRITE
1828 && is_gimple_reg_type (vr->type)
1829 /* ??? Handle BCOPY as well. */
1830 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
1831 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
1832 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
1833 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
1834 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
1835 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
1836 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
1837 && host_integerp (gimple_call_arg (def_stmt, 2), 1))
1839 tree lhs, rhs;
1840 ao_ref r;
1841 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
1842 vn_reference_op_s op;
1843 HOST_WIDE_INT at;
1846 /* Only handle non-variable, addressable refs. */
1847 if (ref->size != maxsize
1848 || offset % BITS_PER_UNIT != 0
1849 || ref->size % BITS_PER_UNIT != 0)
1850 return (void *)-1;
1852 /* Extract a pointer base and an offset for the destination. */
1853 lhs = gimple_call_arg (def_stmt, 0);
1854 lhs_offset = 0;
1855 if (TREE_CODE (lhs) == SSA_NAME)
1856 lhs = SSA_VAL (lhs);
1857 if (TREE_CODE (lhs) == ADDR_EXPR)
1859 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
1860 &lhs_offset);
1861 if (!tem)
1862 return (void *)-1;
1863 if (TREE_CODE (tem) == MEM_REF
1864 && host_integerp (TREE_OPERAND (tem, 1), 1))
1866 lhs = TREE_OPERAND (tem, 0);
1867 lhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1869 else if (DECL_P (tem))
1870 lhs = build_fold_addr_expr (tem);
1871 else
1872 return (void *)-1;
1874 if (TREE_CODE (lhs) != SSA_NAME
1875 && TREE_CODE (lhs) != ADDR_EXPR)
1876 return (void *)-1;
1878 /* Extract a pointer base and an offset for the source. */
1879 rhs = gimple_call_arg (def_stmt, 1);
1880 rhs_offset = 0;
1881 if (TREE_CODE (rhs) == SSA_NAME)
1882 rhs = SSA_VAL (rhs);
1883 if (TREE_CODE (rhs) == ADDR_EXPR)
1885 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
1886 &rhs_offset);
1887 if (!tem)
1888 return (void *)-1;
1889 if (TREE_CODE (tem) == MEM_REF
1890 && host_integerp (TREE_OPERAND (tem, 1), 1))
1892 rhs = TREE_OPERAND (tem, 0);
1893 rhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1895 else if (DECL_P (tem))
1896 rhs = build_fold_addr_expr (tem);
1897 else
1898 return (void *)-1;
1900 if (TREE_CODE (rhs) != SSA_NAME
1901 && TREE_CODE (rhs) != ADDR_EXPR)
1902 return (void *)-1;
1904 copy_size = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2));
1906 /* The bases of the destination and the references have to agree. */
1907 if ((TREE_CODE (base) != MEM_REF
1908 && !DECL_P (base))
1909 || (TREE_CODE (base) == MEM_REF
1910 && (TREE_OPERAND (base, 0) != lhs
1911 || !host_integerp (TREE_OPERAND (base, 1), 1)))
1912 || (DECL_P (base)
1913 && (TREE_CODE (lhs) != ADDR_EXPR
1914 || TREE_OPERAND (lhs, 0) != base)))
1915 return (void *)-1;
1917 /* And the access has to be contained within the memcpy destination. */
1918 at = offset / BITS_PER_UNIT;
1919 if (TREE_CODE (base) == MEM_REF)
1920 at += TREE_INT_CST_LOW (TREE_OPERAND (base, 1));
1921 if (lhs_offset > at
1922 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
1923 return (void *)-1;
1925 /* Make room for 2 operands in the new reference. */
1926 if (vr->operands.length () < 2)
1928 vec<vn_reference_op_s> old = vr->operands;
1929 vr->operands.safe_grow_cleared (2);
1930 if (old == shared_lookup_references
1931 && vr->operands != old)
1932 shared_lookup_references.create (0);
1934 else
1935 vr->operands.truncate (2);
1937 /* The looked-through reference is a simple MEM_REF. */
1938 memset (&op, 0, sizeof (op));
1939 op.type = vr->type;
1940 op.opcode = MEM_REF;
1941 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
1942 op.off = at - lhs_offset + rhs_offset;
1943 vr->operands[0] = op;
1944 op.type = TREE_TYPE (rhs);
1945 op.opcode = TREE_CODE (rhs);
1946 op.op0 = rhs;
1947 op.off = -1;
1948 vr->operands[1] = op;
1949 vr->hashcode = vn_reference_compute_hash (vr);
1951 /* Adjust *ref from the new operands. */
1952 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1953 return (void *)-1;
1954 /* This can happen with bitfields. */
1955 if (ref->size != r.size)
1956 return (void *)-1;
1957 *ref = r;
1959 /* Do not update last seen VUSE after translating. */
1960 last_vuse_ptr = NULL;
1962 /* Keep looking for the adjusted *REF / VR pair. */
1963 return NULL;
1966 /* Bail out and stop walking. */
1967 return (void *)-1;
1970 /* Lookup a reference operation by it's parts, in the current hash table.
1971 Returns the resulting value number if it exists in the hash table,
1972 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1973 vn_reference_t stored in the hashtable if something is found. */
1975 tree
1976 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
1977 vec<vn_reference_op_s> operands,
1978 vn_reference_t *vnresult, vn_lookup_kind kind)
1980 struct vn_reference_s vr1;
1981 vn_reference_t tmp;
1982 tree cst;
1984 if (!vnresult)
1985 vnresult = &tmp;
1986 *vnresult = NULL;
1988 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1989 shared_lookup_references.truncate (0);
1990 shared_lookup_references.safe_grow (operands.length ());
1991 memcpy (shared_lookup_references.address (),
1992 operands.address (),
1993 sizeof (vn_reference_op_s)
1994 * operands.length ());
1995 vr1.operands = operands = shared_lookup_references
1996 = valueize_refs (shared_lookup_references);
1997 vr1.type = type;
1998 vr1.set = set;
1999 vr1.hashcode = vn_reference_compute_hash (&vr1);
2000 if ((cst = fully_constant_vn_reference_p (&vr1)))
2001 return cst;
2003 vn_reference_lookup_1 (&vr1, vnresult);
2004 if (!*vnresult
2005 && kind != VN_NOWALK
2006 && vr1.vuse)
2008 ao_ref r;
2009 vn_walk_kind = kind;
2010 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2011 *vnresult =
2012 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2013 vn_reference_lookup_2,
2014 vn_reference_lookup_3, &vr1);
2015 if (vr1.operands != operands)
2016 vr1.operands.release ();
2019 if (*vnresult)
2020 return (*vnresult)->result;
2022 return NULL_TREE;
2025 /* Lookup OP in the current hash table, and return the resulting value
2026 number if it exists in the hash table. Return NULL_TREE if it does
2027 not exist in the hash table or if the result field of the structure
2028 was NULL.. VNRESULT will be filled in with the vn_reference_t
2029 stored in the hashtable if one exists. */
2031 tree
2032 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2033 vn_reference_t *vnresult)
2035 vec<vn_reference_op_s> operands;
2036 struct vn_reference_s vr1;
2037 tree cst;
2038 bool valuezied_anything;
2040 if (vnresult)
2041 *vnresult = NULL;
2043 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2044 vr1.operands = operands
2045 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2046 vr1.type = TREE_TYPE (op);
2047 vr1.set = get_alias_set (op);
2048 vr1.hashcode = vn_reference_compute_hash (&vr1);
2049 if ((cst = fully_constant_vn_reference_p (&vr1)))
2050 return cst;
2052 if (kind != VN_NOWALK
2053 && vr1.vuse)
2055 vn_reference_t wvnresult;
2056 ao_ref r;
2057 /* Make sure to use a valueized reference if we valueized anything.
2058 Otherwise preserve the full reference for advanced TBAA. */
2059 if (!valuezied_anything
2060 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2061 vr1.operands))
2062 ao_ref_init (&r, op);
2063 vn_walk_kind = kind;
2064 wvnresult =
2065 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2066 vn_reference_lookup_2,
2067 vn_reference_lookup_3, &vr1);
2068 if (vr1.operands != operands)
2069 vr1.operands.release ();
2070 if (wvnresult)
2072 if (vnresult)
2073 *vnresult = wvnresult;
2074 return wvnresult->result;
2077 return NULL_TREE;
2080 return vn_reference_lookup_1 (&vr1, vnresult);
2084 /* Insert OP into the current hash table with a value number of
2085 RESULT, and return the resulting reference structure we created. */
2087 vn_reference_t
2088 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2090 vn_reference_s **slot;
2091 vn_reference_t vr1;
2092 bool tem;
2094 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
2095 if (TREE_CODE (result) == SSA_NAME)
2096 vr1->value_id = VN_INFO (result)->value_id;
2097 else
2098 vr1->value_id = get_or_alloc_constant_value_id (result);
2099 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2100 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2101 vr1->type = TREE_TYPE (op);
2102 vr1->set = get_alias_set (op);
2103 vr1->hashcode = vn_reference_compute_hash (vr1);
2104 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2105 vr1->result_vdef = vdef;
2107 slot = current_info->references.find_slot_with_hash (vr1, vr1->hashcode,
2108 INSERT);
2110 /* Because we lookup stores using vuses, and value number failures
2111 using the vdefs (see visit_reference_op_store for how and why),
2112 it's possible that on failure we may try to insert an already
2113 inserted store. This is not wrong, there is no ssa name for a
2114 store that we could use as a differentiator anyway. Thus, unlike
2115 the other lookup functions, you cannot gcc_assert (!*slot)
2116 here. */
2118 /* But free the old slot in case of a collision. */
2119 if (*slot)
2120 free_reference (*slot);
2122 *slot = vr1;
2123 return vr1;
2126 /* Insert a reference by it's pieces into the current hash table with
2127 a value number of RESULT. Return the resulting reference
2128 structure we created. */
2130 vn_reference_t
2131 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2132 vec<vn_reference_op_s> operands,
2133 tree result, unsigned int value_id)
2136 vn_reference_s **slot;
2137 vn_reference_t vr1;
2139 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
2140 vr1->value_id = value_id;
2141 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2142 vr1->operands = valueize_refs (operands);
2143 vr1->type = type;
2144 vr1->set = set;
2145 vr1->hashcode = vn_reference_compute_hash (vr1);
2146 if (result && TREE_CODE (result) == SSA_NAME)
2147 result = SSA_VAL (result);
2148 vr1->result = result;
2150 slot = current_info->references.find_slot_with_hash (vr1, vr1->hashcode,
2151 INSERT);
2153 /* At this point we should have all the things inserted that we have
2154 seen before, and we should never try inserting something that
2155 already exists. */
2156 gcc_assert (!*slot);
2157 if (*slot)
2158 free_reference (*slot);
2160 *slot = vr1;
2161 return vr1;
2164 /* Compute and return the hash value for nary operation VBO1. */
2166 hashval_t
2167 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2169 hashval_t hash;
2170 unsigned i;
2172 for (i = 0; i < vno1->length; ++i)
2173 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2174 vno1->op[i] = SSA_VAL (vno1->op[i]);
2176 if (vno1->length == 2
2177 && commutative_tree_code (vno1->opcode)
2178 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2180 tree temp = vno1->op[0];
2181 vno1->op[0] = vno1->op[1];
2182 vno1->op[1] = temp;
2185 hash = iterative_hash_hashval_t (vno1->opcode, 0);
2186 for (i = 0; i < vno1->length; ++i)
2187 hash = iterative_hash_expr (vno1->op[i], hash);
2189 return hash;
2192 /* Compare nary operations VNO1 and VNO2 and return true if they are
2193 equivalent. */
2195 bool
2196 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2198 unsigned i;
2200 if (vno1->hashcode != vno2->hashcode)
2201 return false;
2203 if (vno1->length != vno2->length)
2204 return false;
2206 if (vno1->opcode != vno2->opcode
2207 || !types_compatible_p (vno1->type, vno2->type))
2208 return false;
2210 for (i = 0; i < vno1->length; ++i)
2211 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2212 return false;
2214 return true;
2217 /* Initialize VNO from the pieces provided. */
2219 static void
2220 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2221 enum tree_code code, tree type, tree *ops)
2223 vno->opcode = code;
2224 vno->length = length;
2225 vno->type = type;
2226 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2229 /* Initialize VNO from OP. */
2231 static void
2232 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2234 unsigned i;
2236 vno->opcode = TREE_CODE (op);
2237 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2238 vno->type = TREE_TYPE (op);
2239 for (i = 0; i < vno->length; ++i)
2240 vno->op[i] = TREE_OPERAND (op, i);
2243 /* Return the number of operands for a vn_nary ops structure from STMT. */
2245 static unsigned int
2246 vn_nary_length_from_stmt (gimple stmt)
2248 switch (gimple_assign_rhs_code (stmt))
2250 case REALPART_EXPR:
2251 case IMAGPART_EXPR:
2252 case VIEW_CONVERT_EXPR:
2253 return 1;
2255 case BIT_FIELD_REF:
2256 return 3;
2258 case CONSTRUCTOR:
2259 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2261 default:
2262 return gimple_num_ops (stmt) - 1;
2266 /* Initialize VNO from STMT. */
2268 static void
2269 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
2271 unsigned i;
2273 vno->opcode = gimple_assign_rhs_code (stmt);
2274 vno->type = gimple_expr_type (stmt);
2275 switch (vno->opcode)
2277 case REALPART_EXPR:
2278 case IMAGPART_EXPR:
2279 case VIEW_CONVERT_EXPR:
2280 vno->length = 1;
2281 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2282 break;
2284 case BIT_FIELD_REF:
2285 vno->length = 3;
2286 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2287 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2288 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2289 break;
2291 case CONSTRUCTOR:
2292 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2293 for (i = 0; i < vno->length; ++i)
2294 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2295 break;
2297 default:
2298 gcc_checking_assert (!gimple_assign_single_p (stmt));
2299 vno->length = gimple_num_ops (stmt) - 1;
2300 for (i = 0; i < vno->length; ++i)
2301 vno->op[i] = gimple_op (stmt, i + 1);
2305 /* Compute the hashcode for VNO and look for it in the hash table;
2306 return the resulting value number if it exists in the hash table.
2307 Return NULL_TREE if it does not exist in the hash table or if the
2308 result field of the operation is NULL. VNRESULT will contain the
2309 vn_nary_op_t from the hashtable if it exists. */
2311 static tree
2312 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2314 vn_nary_op_s **slot;
2316 if (vnresult)
2317 *vnresult = NULL;
2319 vno->hashcode = vn_nary_op_compute_hash (vno);
2320 slot = current_info->nary.find_slot_with_hash (vno, vno->hashcode, NO_INSERT);
2321 if (!slot && current_info == optimistic_info)
2322 slot = valid_info->nary.find_slot_with_hash (vno, vno->hashcode, NO_INSERT);
2323 if (!slot)
2324 return NULL_TREE;
2325 if (vnresult)
2326 *vnresult = *slot;
2327 return (*slot)->result;
2330 /* Lookup a n-ary operation by its pieces and return the resulting value
2331 number if it exists in the hash table. Return NULL_TREE if it does
2332 not exist in the hash table or if the result field of the operation
2333 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2334 if it exists. */
2336 tree
2337 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2338 tree type, tree *ops, vn_nary_op_t *vnresult)
2340 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2341 sizeof_vn_nary_op (length));
2342 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2343 return vn_nary_op_lookup_1 (vno1, vnresult);
2346 /* Lookup OP in the current hash table, and return the resulting value
2347 number if it exists in the hash table. Return NULL_TREE if it does
2348 not exist in the hash table or if the result field of the operation
2349 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2350 if it exists. */
2352 tree
2353 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2355 vn_nary_op_t vno1
2356 = XALLOCAVAR (struct vn_nary_op_s,
2357 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2358 init_vn_nary_op_from_op (vno1, op);
2359 return vn_nary_op_lookup_1 (vno1, vnresult);
2362 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2363 value number if it exists in the hash table. Return NULL_TREE if
2364 it does not exist in the hash table. VNRESULT will contain the
2365 vn_nary_op_t from the hashtable if it exists. */
2367 tree
2368 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
2370 vn_nary_op_t vno1
2371 = XALLOCAVAR (struct vn_nary_op_s,
2372 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2373 init_vn_nary_op_from_stmt (vno1, stmt);
2374 return vn_nary_op_lookup_1 (vno1, vnresult);
2377 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2379 static vn_nary_op_t
2380 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2382 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2385 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2386 obstack. */
2388 static vn_nary_op_t
2389 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2391 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2392 &current_info->nary_obstack);
2394 vno1->value_id = value_id;
2395 vno1->length = length;
2396 vno1->result = result;
2398 return vno1;
2401 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2402 VNO->HASHCODE first. */
2404 static vn_nary_op_t
2405 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type table,
2406 bool compute_hash)
2408 vn_nary_op_s **slot;
2410 if (compute_hash)
2411 vno->hashcode = vn_nary_op_compute_hash (vno);
2413 slot = table.find_slot_with_hash (vno, vno->hashcode, INSERT);
2414 gcc_assert (!*slot);
2416 *slot = vno;
2417 return vno;
2420 /* Insert a n-ary operation into the current hash table using it's
2421 pieces. Return the vn_nary_op_t structure we created and put in
2422 the hashtable. */
2424 vn_nary_op_t
2425 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2426 tree type, tree *ops,
2427 tree result, unsigned int value_id)
2429 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2430 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2431 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2434 /* Insert OP into the current hash table with a value number of
2435 RESULT. Return the vn_nary_op_t structure we created and put in
2436 the hashtable. */
2438 vn_nary_op_t
2439 vn_nary_op_insert (tree op, tree result)
2441 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2442 vn_nary_op_t vno1;
2444 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2445 init_vn_nary_op_from_op (vno1, op);
2446 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2449 /* Insert the rhs of STMT into the current hash table with a value number of
2450 RESULT. */
2452 vn_nary_op_t
2453 vn_nary_op_insert_stmt (gimple stmt, tree result)
2455 vn_nary_op_t vno1
2456 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2457 result, VN_INFO (result)->value_id);
2458 init_vn_nary_op_from_stmt (vno1, stmt);
2459 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2462 /* Compute a hashcode for PHI operation VP1 and return it. */
2464 static inline hashval_t
2465 vn_phi_compute_hash (vn_phi_t vp1)
2467 hashval_t result;
2468 int i;
2469 tree phi1op;
2470 tree type;
2472 result = vp1->block->index;
2474 /* If all PHI arguments are constants we need to distinguish
2475 the PHI node via its type. */
2476 type = vp1->type;
2477 result += vn_hash_type (type);
2479 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2481 if (phi1op == VN_TOP)
2482 continue;
2483 result = iterative_hash_expr (phi1op, result);
2486 return result;
2489 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2491 static int
2492 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
2494 if (vp1->hashcode != vp2->hashcode)
2495 return false;
2497 if (vp1->block == vp2->block)
2499 int i;
2500 tree phi1op;
2502 /* If the PHI nodes do not have compatible types
2503 they are not the same. */
2504 if (!types_compatible_p (vp1->type, vp2->type))
2505 return false;
2507 /* Any phi in the same block will have it's arguments in the
2508 same edge order, because of how we store phi nodes. */
2509 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2511 tree phi2op = vp2->phiargs[i];
2512 if (phi1op == VN_TOP || phi2op == VN_TOP)
2513 continue;
2514 if (!expressions_equal_p (phi1op, phi2op))
2515 return false;
2517 return true;
2519 return false;
2522 static vec<tree> shared_lookup_phiargs;
2524 /* Lookup PHI in the current hash table, and return the resulting
2525 value number if it exists in the hash table. Return NULL_TREE if
2526 it does not exist in the hash table. */
2528 static tree
2529 vn_phi_lookup (gimple phi)
2531 vn_phi_s **slot;
2532 struct vn_phi_s vp1;
2533 unsigned i;
2535 shared_lookup_phiargs.truncate (0);
2537 /* Canonicalize the SSA_NAME's to their value number. */
2538 for (i = 0; i < gimple_phi_num_args (phi); i++)
2540 tree def = PHI_ARG_DEF (phi, i);
2541 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2542 shared_lookup_phiargs.safe_push (def);
2544 vp1.type = TREE_TYPE (gimple_phi_result (phi));
2545 vp1.phiargs = shared_lookup_phiargs;
2546 vp1.block = gimple_bb (phi);
2547 vp1.hashcode = vn_phi_compute_hash (&vp1);
2548 slot = current_info->phis.find_slot_with_hash (&vp1, vp1.hashcode, NO_INSERT);
2549 if (!slot && current_info == optimistic_info)
2550 slot = valid_info->phis.find_slot_with_hash (&vp1, vp1.hashcode, NO_INSERT);
2551 if (!slot)
2552 return NULL_TREE;
2553 return (*slot)->result;
2556 /* Insert PHI into the current hash table with a value number of
2557 RESULT. */
2559 static vn_phi_t
2560 vn_phi_insert (gimple phi, tree result)
2562 vn_phi_s **slot;
2563 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
2564 unsigned i;
2565 vec<tree> args = vNULL;
2567 /* Canonicalize the SSA_NAME's to their value number. */
2568 for (i = 0; i < gimple_phi_num_args (phi); i++)
2570 tree def = PHI_ARG_DEF (phi, i);
2571 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2572 args.safe_push (def);
2574 vp1->value_id = VN_INFO (result)->value_id;
2575 vp1->type = TREE_TYPE (gimple_phi_result (phi));
2576 vp1->phiargs = args;
2577 vp1->block = gimple_bb (phi);
2578 vp1->result = result;
2579 vp1->hashcode = vn_phi_compute_hash (vp1);
2581 slot = current_info->phis.find_slot_with_hash (vp1, vp1->hashcode, INSERT);
2583 /* Because we iterate over phi operations more than once, it's
2584 possible the slot might already exist here, hence no assert.*/
2585 *slot = vp1;
2586 return vp1;
2590 /* Print set of components in strongly connected component SCC to OUT. */
2592 static void
2593 print_scc (FILE *out, vec<tree> scc)
2595 tree var;
2596 unsigned int i;
2598 fprintf (out, "SCC consists of:");
2599 FOR_EACH_VEC_ELT (scc, i, var)
2601 fprintf (out, " ");
2602 print_generic_expr (out, var, 0);
2604 fprintf (out, "\n");
2607 /* Set the value number of FROM to TO, return true if it has changed
2608 as a result. */
2610 static inline bool
2611 set_ssa_val_to (tree from, tree to)
2613 tree currval = SSA_VAL (from);
2614 HOST_WIDE_INT toff, coff;
2616 if (from != to)
2618 if (currval == from)
2620 if (dump_file && (dump_flags & TDF_DETAILS))
2622 fprintf (dump_file, "Not changing value number of ");
2623 print_generic_expr (dump_file, from, 0);
2624 fprintf (dump_file, " from VARYING to ");
2625 print_generic_expr (dump_file, to, 0);
2626 fprintf (dump_file, "\n");
2628 return false;
2630 else if (TREE_CODE (to) == SSA_NAME
2631 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2632 to = from;
2635 /* The only thing we allow as value numbers are VN_TOP, ssa_names
2636 and invariants. So assert that here. */
2637 gcc_assert (to != NULL_TREE
2638 && (to == VN_TOP
2639 || TREE_CODE (to) == SSA_NAME
2640 || is_gimple_min_invariant (to)));
2642 if (dump_file && (dump_flags & TDF_DETAILS))
2644 fprintf (dump_file, "Setting value number of ");
2645 print_generic_expr (dump_file, from, 0);
2646 fprintf (dump_file, " to ");
2647 print_generic_expr (dump_file, to, 0);
2650 if (currval != to
2651 && !operand_equal_p (currval, to, 0)
2652 /* ??? For addresses involving volatile objects or types operand_equal_p
2653 does not reliably detect ADDR_EXPRs as equal. We know we are only
2654 getting invariant gimple addresses here, so can use
2655 get_addr_base_and_unit_offset to do this comparison. */
2656 && !(TREE_CODE (currval) == ADDR_EXPR
2657 && TREE_CODE (to) == ADDR_EXPR
2658 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
2659 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
2660 && coff == toff))
2662 VN_INFO (from)->valnum = to;
2663 if (dump_file && (dump_flags & TDF_DETAILS))
2664 fprintf (dump_file, " (changed)\n");
2665 return true;
2667 if (dump_file && (dump_flags & TDF_DETAILS))
2668 fprintf (dump_file, "\n");
2669 return false;
2672 /* Mark as processed all the definitions in the defining stmt of USE, or
2673 the USE itself. */
2675 static void
2676 mark_use_processed (tree use)
2678 ssa_op_iter iter;
2679 def_operand_p defp;
2680 gimple stmt = SSA_NAME_DEF_STMT (use);
2682 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
2684 VN_INFO (use)->use_processed = true;
2685 return;
2688 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2690 tree def = DEF_FROM_PTR (defp);
2692 VN_INFO (def)->use_processed = true;
2696 /* Set all definitions in STMT to value number to themselves.
2697 Return true if a value number changed. */
2699 static bool
2700 defs_to_varying (gimple stmt)
2702 bool changed = false;
2703 ssa_op_iter iter;
2704 def_operand_p defp;
2706 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2708 tree def = DEF_FROM_PTR (defp);
2709 changed |= set_ssa_val_to (def, def);
2711 return changed;
2714 static bool expr_has_constants (tree expr);
2715 static tree valueize_expr (tree expr);
2717 /* Visit a copy between LHS and RHS, return true if the value number
2718 changed. */
2720 static bool
2721 visit_copy (tree lhs, tree rhs)
2723 /* The copy may have a more interesting constant filled expression
2724 (we don't, since we know our RHS is just an SSA name). */
2725 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2726 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2728 /* And finally valueize. */
2729 rhs = SSA_VAL (rhs);
2731 return set_ssa_val_to (lhs, rhs);
2734 /* Visit a nary operator RHS, value number it, and return true if the
2735 value number of LHS has changed as a result. */
2737 static bool
2738 visit_nary_op (tree lhs, gimple stmt)
2740 bool changed = false;
2741 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2743 if (result)
2744 changed = set_ssa_val_to (lhs, result);
2745 else
2747 changed = set_ssa_val_to (lhs, lhs);
2748 vn_nary_op_insert_stmt (stmt, lhs);
2751 return changed;
2754 /* Visit a call STMT storing into LHS. Return true if the value number
2755 of the LHS has changed as a result. */
2757 static bool
2758 visit_reference_op_call (tree lhs, gimple stmt)
2760 bool changed = false;
2761 struct vn_reference_s vr1;
2762 vn_reference_t vnresult = NULL;
2763 tree vuse = gimple_vuse (stmt);
2764 tree vdef = gimple_vdef (stmt);
2766 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
2767 if (lhs && TREE_CODE (lhs) != SSA_NAME)
2768 lhs = NULL_TREE;
2770 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2771 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
2772 vr1.type = gimple_expr_type (stmt);
2773 vr1.set = 0;
2774 vr1.hashcode = vn_reference_compute_hash (&vr1);
2775 vn_reference_lookup_1 (&vr1, &vnresult);
2777 if (vnresult)
2779 if (vnresult->result_vdef)
2780 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
2782 if (!vnresult->result && lhs)
2783 vnresult->result = lhs;
2785 if (vnresult->result && lhs)
2787 changed |= set_ssa_val_to (lhs, vnresult->result);
2789 if (VN_INFO (vnresult->result)->has_constants)
2790 VN_INFO (lhs)->has_constants = true;
2793 else
2795 vn_reference_s **slot;
2796 vn_reference_t vr2;
2797 if (vdef)
2798 changed |= set_ssa_val_to (vdef, vdef);
2799 if (lhs)
2800 changed |= set_ssa_val_to (lhs, lhs);
2801 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
2802 vr2->vuse = vr1.vuse;
2803 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
2804 vr2->type = vr1.type;
2805 vr2->set = vr1.set;
2806 vr2->hashcode = vr1.hashcode;
2807 vr2->result = lhs;
2808 vr2->result_vdef = vdef;
2809 slot = current_info->references.find_slot_with_hash (vr2, vr2->hashcode,
2810 INSERT);
2811 if (*slot)
2812 free_reference (*slot);
2813 *slot = vr2;
2816 return changed;
2819 /* Visit a load from a reference operator RHS, part of STMT, value number it,
2820 and return true if the value number of the LHS has changed as a result. */
2822 static bool
2823 visit_reference_op_load (tree lhs, tree op, gimple stmt)
2825 bool changed = false;
2826 tree last_vuse;
2827 tree result;
2829 last_vuse = gimple_vuse (stmt);
2830 last_vuse_ptr = &last_vuse;
2831 result = vn_reference_lookup (op, gimple_vuse (stmt),
2832 default_vn_walk_kind, NULL);
2833 last_vuse_ptr = NULL;
2835 /* If we have a VCE, try looking up its operand as it might be stored in
2836 a different type. */
2837 if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
2838 result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
2839 default_vn_walk_kind, NULL);
2841 /* We handle type-punning through unions by value-numbering based
2842 on offset and size of the access. Be prepared to handle a
2843 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
2844 if (result
2845 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
2847 /* We will be setting the value number of lhs to the value number
2848 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2849 So first simplify and lookup this expression to see if it
2850 is already available. */
2851 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
2852 if ((CONVERT_EXPR_P (val)
2853 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
2854 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
2856 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
2857 if ((CONVERT_EXPR_P (tem)
2858 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
2859 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
2860 TREE_TYPE (val), tem)))
2861 val = tem;
2863 result = val;
2864 if (!is_gimple_min_invariant (val)
2865 && TREE_CODE (val) != SSA_NAME)
2866 result = vn_nary_op_lookup (val, NULL);
2867 /* If the expression is not yet available, value-number lhs to
2868 a new SSA_NAME we create. */
2869 if (!result)
2871 result = make_temp_ssa_name (TREE_TYPE (lhs), gimple_build_nop (),
2872 "vntemp");
2873 /* Initialize value-number information properly. */
2874 VN_INFO_GET (result)->valnum = result;
2875 VN_INFO (result)->value_id = get_next_value_id ();
2876 VN_INFO (result)->expr = val;
2877 VN_INFO (result)->has_constants = expr_has_constants (val);
2878 VN_INFO (result)->needs_insertion = true;
2879 /* As all "inserted" statements are singleton SCCs, insert
2880 to the valid table. This is strictly needed to
2881 avoid re-generating new value SSA_NAMEs for the same
2882 expression during SCC iteration over and over (the
2883 optimistic table gets cleared after each iteration).
2884 We do not need to insert into the optimistic table, as
2885 lookups there will fall back to the valid table. */
2886 if (current_info == optimistic_info)
2888 current_info = valid_info;
2889 vn_nary_op_insert (val, result);
2890 current_info = optimistic_info;
2892 else
2893 vn_nary_op_insert (val, result);
2894 if (dump_file && (dump_flags & TDF_DETAILS))
2896 fprintf (dump_file, "Inserting name ");
2897 print_generic_expr (dump_file, result, 0);
2898 fprintf (dump_file, " for expression ");
2899 print_generic_expr (dump_file, val, 0);
2900 fprintf (dump_file, "\n");
2905 if (result)
2907 changed = set_ssa_val_to (lhs, result);
2908 if (TREE_CODE (result) == SSA_NAME
2909 && VN_INFO (result)->has_constants)
2911 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2912 VN_INFO (lhs)->has_constants = true;
2915 else
2917 changed = set_ssa_val_to (lhs, lhs);
2918 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
2921 return changed;
2925 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2926 and return true if the value number of the LHS has changed as a result. */
2928 static bool
2929 visit_reference_op_store (tree lhs, tree op, gimple stmt)
2931 bool changed = false;
2932 vn_reference_t vnresult = NULL;
2933 tree result, assign;
2934 bool resultsame = false;
2935 tree vuse = gimple_vuse (stmt);
2936 tree vdef = gimple_vdef (stmt);
2938 /* First we want to lookup using the *vuses* from the store and see
2939 if there the last store to this location with the same address
2940 had the same value.
2942 The vuses represent the memory state before the store. If the
2943 memory state, address, and value of the store is the same as the
2944 last store to this location, then this store will produce the
2945 same memory state as that store.
2947 In this case the vdef versions for this store are value numbered to those
2948 vuse versions, since they represent the same memory state after
2949 this store.
2951 Otherwise, the vdefs for the store are used when inserting into
2952 the table, since the store generates a new memory state. */
2954 result = vn_reference_lookup (lhs, vuse, VN_NOWALK, NULL);
2956 if (result)
2958 if (TREE_CODE (result) == SSA_NAME)
2959 result = SSA_VAL (result);
2960 if (TREE_CODE (op) == SSA_NAME)
2961 op = SSA_VAL (op);
2962 resultsame = expressions_equal_p (result, op);
2965 if (!result || !resultsame)
2967 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
2968 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult);
2969 if (vnresult)
2971 VN_INFO (vdef)->use_processed = true;
2972 return set_ssa_val_to (vdef, vnresult->result_vdef);
2976 if (!result || !resultsame)
2978 if (dump_file && (dump_flags & TDF_DETAILS))
2980 fprintf (dump_file, "No store match\n");
2981 fprintf (dump_file, "Value numbering store ");
2982 print_generic_expr (dump_file, lhs, 0);
2983 fprintf (dump_file, " to ");
2984 print_generic_expr (dump_file, op, 0);
2985 fprintf (dump_file, "\n");
2987 /* Have to set value numbers before insert, since insert is
2988 going to valueize the references in-place. */
2989 if (vdef)
2991 changed |= set_ssa_val_to (vdef, vdef);
2994 /* Do not insert structure copies into the tables. */
2995 if (is_gimple_min_invariant (op)
2996 || is_gimple_reg (op))
2997 vn_reference_insert (lhs, op, vdef, NULL);
2999 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3000 vn_reference_insert (assign, lhs, vuse, vdef);
3002 else
3004 /* We had a match, so value number the vdef to have the value
3005 number of the vuse it came from. */
3007 if (dump_file && (dump_flags & TDF_DETAILS))
3008 fprintf (dump_file, "Store matched earlier value,"
3009 "value numbering store vdefs to matching vuses.\n");
3011 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3014 return changed;
3017 /* Visit and value number PHI, return true if the value number
3018 changed. */
3020 static bool
3021 visit_phi (gimple phi)
3023 bool changed = false;
3024 tree result;
3025 tree sameval = VN_TOP;
3026 bool allsame = true;
3027 unsigned i;
3029 /* TODO: We could check for this in init_sccvn, and replace this
3030 with a gcc_assert. */
3031 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3032 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3034 /* See if all non-TOP arguments have the same value. TOP is
3035 equivalent to everything, so we can ignore it. */
3036 for (i = 0; i < gimple_phi_num_args (phi); i++)
3038 tree def = PHI_ARG_DEF (phi, i);
3040 if (TREE_CODE (def) == SSA_NAME)
3041 def = SSA_VAL (def);
3042 if (def == VN_TOP)
3043 continue;
3044 if (sameval == VN_TOP)
3046 sameval = def;
3048 else
3050 if (!expressions_equal_p (def, sameval))
3052 allsame = false;
3053 break;
3058 /* If all value numbered to the same value, the phi node has that
3059 value. */
3060 if (allsame)
3062 if (is_gimple_min_invariant (sameval))
3064 VN_INFO (PHI_RESULT (phi))->has_constants = true;
3065 VN_INFO (PHI_RESULT (phi))->expr = sameval;
3067 else
3069 VN_INFO (PHI_RESULT (phi))->has_constants = false;
3070 VN_INFO (PHI_RESULT (phi))->expr = sameval;
3073 if (TREE_CODE (sameval) == SSA_NAME)
3074 return visit_copy (PHI_RESULT (phi), sameval);
3076 return set_ssa_val_to (PHI_RESULT (phi), sameval);
3079 /* Otherwise, see if it is equivalent to a phi node in this block. */
3080 result = vn_phi_lookup (phi);
3081 if (result)
3083 if (TREE_CODE (result) == SSA_NAME)
3084 changed = visit_copy (PHI_RESULT (phi), result);
3085 else
3086 changed = set_ssa_val_to (PHI_RESULT (phi), result);
3088 else
3090 vn_phi_insert (phi, PHI_RESULT (phi));
3091 VN_INFO (PHI_RESULT (phi))->has_constants = false;
3092 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
3093 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3096 return changed;
3099 /* Return true if EXPR contains constants. */
3101 static bool
3102 expr_has_constants (tree expr)
3104 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
3106 case tcc_unary:
3107 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
3109 case tcc_binary:
3110 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
3111 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
3112 /* Constants inside reference ops are rarely interesting, but
3113 it can take a lot of looking to find them. */
3114 case tcc_reference:
3115 case tcc_declaration:
3116 return false;
3117 default:
3118 return is_gimple_min_invariant (expr);
3120 return false;
3123 /* Return true if STMT contains constants. */
3125 static bool
3126 stmt_has_constants (gimple stmt)
3128 tree tem;
3130 if (gimple_code (stmt) != GIMPLE_ASSIGN)
3131 return false;
3133 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
3135 case GIMPLE_TERNARY_RHS:
3136 tem = gimple_assign_rhs3 (stmt);
3137 if (TREE_CODE (tem) == SSA_NAME)
3138 tem = SSA_VAL (tem);
3139 if (is_gimple_min_invariant (tem))
3140 return true;
3141 /* Fallthru. */
3143 case GIMPLE_BINARY_RHS:
3144 tem = gimple_assign_rhs2 (stmt);
3145 if (TREE_CODE (tem) == SSA_NAME)
3146 tem = SSA_VAL (tem);
3147 if (is_gimple_min_invariant (tem))
3148 return true;
3149 /* Fallthru. */
3151 case GIMPLE_SINGLE_RHS:
3152 /* Constants inside reference ops are rarely interesting, but
3153 it can take a lot of looking to find them. */
3154 case GIMPLE_UNARY_RHS:
3155 tem = gimple_assign_rhs1 (stmt);
3156 if (TREE_CODE (tem) == SSA_NAME)
3157 tem = SSA_VAL (tem);
3158 return is_gimple_min_invariant (tem);
3160 default:
3161 gcc_unreachable ();
3163 return false;
3166 /* Replace SSA_NAMES in expr with their value numbers, and return the
3167 result.
3168 This is performed in place. */
3170 static tree
3171 valueize_expr (tree expr)
3173 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
3175 case tcc_binary:
3176 TREE_OPERAND (expr, 1) = vn_valueize (TREE_OPERAND (expr, 1));
3177 /* Fallthru. */
3178 case tcc_unary:
3179 TREE_OPERAND (expr, 0) = vn_valueize (TREE_OPERAND (expr, 0));
3180 break;
3181 default:;
3183 return expr;
3186 /* Simplify the binary expression RHS, and return the result if
3187 simplified. */
3189 static tree
3190 simplify_binary_expression (gimple stmt)
3192 tree result = NULL_TREE;
3193 tree op0 = gimple_assign_rhs1 (stmt);
3194 tree op1 = gimple_assign_rhs2 (stmt);
3195 enum tree_code code = gimple_assign_rhs_code (stmt);
3197 /* This will not catch every single case we could combine, but will
3198 catch those with constants. The goal here is to simultaneously
3199 combine constants between expressions, but avoid infinite
3200 expansion of expressions during simplification. */
3201 if (TREE_CODE (op0) == SSA_NAME)
3203 if (VN_INFO (op0)->has_constants
3204 || TREE_CODE_CLASS (code) == tcc_comparison
3205 || code == COMPLEX_EXPR)
3206 op0 = valueize_expr (vn_get_expr_for (op0));
3207 else
3208 op0 = vn_valueize (op0);
3211 if (TREE_CODE (op1) == SSA_NAME)
3213 if (VN_INFO (op1)->has_constants
3214 || code == COMPLEX_EXPR)
3215 op1 = valueize_expr (vn_get_expr_for (op1));
3216 else
3217 op1 = vn_valueize (op1);
3220 /* Pointer plus constant can be represented as invariant address.
3221 Do so to allow further propatation, see also tree forwprop. */
3222 if (code == POINTER_PLUS_EXPR
3223 && host_integerp (op1, 1)
3224 && TREE_CODE (op0) == ADDR_EXPR
3225 && is_gimple_min_invariant (op0))
3226 return build_invariant_address (TREE_TYPE (op0),
3227 TREE_OPERAND (op0, 0),
3228 TREE_INT_CST_LOW (op1));
3230 /* Avoid folding if nothing changed. */
3231 if (op0 == gimple_assign_rhs1 (stmt)
3232 && op1 == gimple_assign_rhs2 (stmt))
3233 return NULL_TREE;
3235 fold_defer_overflow_warnings ();
3237 result = fold_binary (code, gimple_expr_type (stmt), op0, op1);
3238 if (result)
3239 STRIP_USELESS_TYPE_CONVERSION (result);
3241 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
3242 stmt, 0);
3244 /* Make sure result is not a complex expression consisting
3245 of operators of operators (IE (a + b) + (a + c))
3246 Otherwise, we will end up with unbounded expressions if
3247 fold does anything at all. */
3248 if (result && valid_gimple_rhs_p (result))
3249 return result;
3251 return NULL_TREE;
3254 /* Simplify the unary expression RHS, and return the result if
3255 simplified. */
3257 static tree
3258 simplify_unary_expression (gimple stmt)
3260 tree result = NULL_TREE;
3261 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
3262 enum tree_code code = gimple_assign_rhs_code (stmt);
3264 /* We handle some tcc_reference codes here that are all
3265 GIMPLE_ASSIGN_SINGLE codes. */
3266 if (code == REALPART_EXPR
3267 || code == IMAGPART_EXPR
3268 || code == VIEW_CONVERT_EXPR
3269 || code == BIT_FIELD_REF)
3270 op0 = TREE_OPERAND (op0, 0);
3272 if (TREE_CODE (op0) != SSA_NAME)
3273 return NULL_TREE;
3275 orig_op0 = op0;
3276 if (VN_INFO (op0)->has_constants)
3277 op0 = valueize_expr (vn_get_expr_for (op0));
3278 else if (CONVERT_EXPR_CODE_P (code)
3279 || code == REALPART_EXPR
3280 || code == IMAGPART_EXPR
3281 || code == VIEW_CONVERT_EXPR
3282 || code == BIT_FIELD_REF)
3284 /* We want to do tree-combining on conversion-like expressions.
3285 Make sure we feed only SSA_NAMEs or constants to fold though. */
3286 tree tem = valueize_expr (vn_get_expr_for (op0));
3287 if (UNARY_CLASS_P (tem)
3288 || BINARY_CLASS_P (tem)
3289 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
3290 || TREE_CODE (tem) == SSA_NAME
3291 || TREE_CODE (tem) == CONSTRUCTOR
3292 || is_gimple_min_invariant (tem))
3293 op0 = tem;
3296 /* Avoid folding if nothing changed, but remember the expression. */
3297 if (op0 == orig_op0)
3298 return NULL_TREE;
3300 if (code == BIT_FIELD_REF)
3302 tree rhs = gimple_assign_rhs1 (stmt);
3303 result = fold_ternary (BIT_FIELD_REF, TREE_TYPE (rhs),
3304 op0, TREE_OPERAND (rhs, 1), TREE_OPERAND (rhs, 2));
3306 else
3307 result = fold_unary_ignore_overflow (code, gimple_expr_type (stmt), op0);
3308 if (result)
3310 STRIP_USELESS_TYPE_CONVERSION (result);
3311 if (valid_gimple_rhs_p (result))
3312 return result;
3315 return NULL_TREE;
3318 /* Try to simplify RHS using equivalences and constant folding. */
3320 static tree
3321 try_to_simplify (gimple stmt)
3323 enum tree_code code = gimple_assign_rhs_code (stmt);
3324 tree tem;
3326 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3327 in this case, there is no point in doing extra work. */
3328 if (code == SSA_NAME)
3329 return NULL_TREE;
3331 /* First try constant folding based on our current lattice. */
3332 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize);
3333 if (tem
3334 && (TREE_CODE (tem) == SSA_NAME
3335 || is_gimple_min_invariant (tem)))
3336 return tem;
3338 /* If that didn't work try combining multiple statements. */
3339 switch (TREE_CODE_CLASS (code))
3341 case tcc_reference:
3342 /* Fallthrough for some unary codes that can operate on registers. */
3343 if (!(code == REALPART_EXPR
3344 || code == IMAGPART_EXPR
3345 || code == VIEW_CONVERT_EXPR
3346 || code == BIT_FIELD_REF))
3347 break;
3348 /* We could do a little more with unary ops, if they expand
3349 into binary ops, but it's debatable whether it is worth it. */
3350 case tcc_unary:
3351 return simplify_unary_expression (stmt);
3353 case tcc_comparison:
3354 case tcc_binary:
3355 return simplify_binary_expression (stmt);
3357 default:
3358 break;
3361 return NULL_TREE;
3364 /* Visit and value number USE, return true if the value number
3365 changed. */
3367 static bool
3368 visit_use (tree use)
3370 bool changed = false;
3371 gimple stmt = SSA_NAME_DEF_STMT (use);
3373 mark_use_processed (use);
3375 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3376 if (dump_file && (dump_flags & TDF_DETAILS)
3377 && !SSA_NAME_IS_DEFAULT_DEF (use))
3379 fprintf (dump_file, "Value numbering ");
3380 print_generic_expr (dump_file, use, 0);
3381 fprintf (dump_file, " stmt = ");
3382 print_gimple_stmt (dump_file, stmt, 0, 0);
3385 /* Handle uninitialized uses. */
3386 if (SSA_NAME_IS_DEFAULT_DEF (use))
3387 changed = set_ssa_val_to (use, use);
3388 else
3390 if (gimple_code (stmt) == GIMPLE_PHI)
3391 changed = visit_phi (stmt);
3392 else if (gimple_has_volatile_ops (stmt))
3393 changed = defs_to_varying (stmt);
3394 else if (is_gimple_assign (stmt))
3396 enum tree_code code = gimple_assign_rhs_code (stmt);
3397 tree lhs = gimple_assign_lhs (stmt);
3398 tree rhs1 = gimple_assign_rhs1 (stmt);
3399 tree simplified;
3401 /* Shortcut for copies. Simplifying copies is pointless,
3402 since we copy the expression and value they represent. */
3403 if (code == SSA_NAME
3404 && TREE_CODE (lhs) == SSA_NAME)
3406 changed = visit_copy (lhs, rhs1);
3407 goto done;
3409 simplified = try_to_simplify (stmt);
3410 if (simplified)
3412 if (dump_file && (dump_flags & TDF_DETAILS))
3414 fprintf (dump_file, "RHS ");
3415 print_gimple_expr (dump_file, stmt, 0, 0);
3416 fprintf (dump_file, " simplified to ");
3417 print_generic_expr (dump_file, simplified, 0);
3418 if (TREE_CODE (lhs) == SSA_NAME)
3419 fprintf (dump_file, " has constants %d\n",
3420 expr_has_constants (simplified));
3421 else
3422 fprintf (dump_file, "\n");
3425 /* Setting value numbers to constants will occasionally
3426 screw up phi congruence because constants are not
3427 uniquely associated with a single ssa name that can be
3428 looked up. */
3429 if (simplified
3430 && is_gimple_min_invariant (simplified)
3431 && TREE_CODE (lhs) == SSA_NAME)
3433 VN_INFO (lhs)->expr = simplified;
3434 VN_INFO (lhs)->has_constants = true;
3435 changed = set_ssa_val_to (lhs, simplified);
3436 goto done;
3438 else if (simplified
3439 && TREE_CODE (simplified) == SSA_NAME
3440 && TREE_CODE (lhs) == SSA_NAME)
3442 changed = visit_copy (lhs, simplified);
3443 goto done;
3445 else if (simplified)
3447 if (TREE_CODE (lhs) == SSA_NAME)
3449 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
3450 /* We have to unshare the expression or else
3451 valuizing may change the IL stream. */
3452 VN_INFO (lhs)->expr = unshare_expr (simplified);
3455 else if (stmt_has_constants (stmt)
3456 && TREE_CODE (lhs) == SSA_NAME)
3457 VN_INFO (lhs)->has_constants = true;
3458 else if (TREE_CODE (lhs) == SSA_NAME)
3460 /* We reset expr and constantness here because we may
3461 have been value numbering optimistically, and
3462 iterating. They may become non-constant in this case,
3463 even if they were optimistically constant. */
3465 VN_INFO (lhs)->has_constants = false;
3466 VN_INFO (lhs)->expr = NULL_TREE;
3469 if ((TREE_CODE (lhs) == SSA_NAME
3470 /* We can substitute SSA_NAMEs that are live over
3471 abnormal edges with their constant value. */
3472 && !(gimple_assign_copy_p (stmt)
3473 && is_gimple_min_invariant (rhs1))
3474 && !(simplified
3475 && is_gimple_min_invariant (simplified))
3476 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3477 /* Stores or copies from SSA_NAMEs that are live over
3478 abnormal edges are a problem. */
3479 || (code == SSA_NAME
3480 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3481 changed = defs_to_varying (stmt);
3482 else if (REFERENCE_CLASS_P (lhs)
3483 || DECL_P (lhs))
3484 changed = visit_reference_op_store (lhs, rhs1, stmt);
3485 else if (TREE_CODE (lhs) == SSA_NAME)
3487 if ((gimple_assign_copy_p (stmt)
3488 && is_gimple_min_invariant (rhs1))
3489 || (simplified
3490 && is_gimple_min_invariant (simplified)))
3492 VN_INFO (lhs)->has_constants = true;
3493 if (simplified)
3494 changed = set_ssa_val_to (lhs, simplified);
3495 else
3496 changed = set_ssa_val_to (lhs, rhs1);
3498 else
3500 /* First try to lookup the simplified expression. */
3501 if (simplified)
3503 enum gimple_rhs_class rhs_class;
3506 rhs_class = get_gimple_rhs_class (TREE_CODE (simplified));
3507 if ((rhs_class == GIMPLE_UNARY_RHS
3508 || rhs_class == GIMPLE_BINARY_RHS
3509 || rhs_class == GIMPLE_TERNARY_RHS)
3510 && valid_gimple_rhs_p (simplified))
3512 tree result = vn_nary_op_lookup (simplified, NULL);
3513 if (result)
3515 changed = set_ssa_val_to (lhs, result);
3516 goto done;
3521 /* Otherwise visit the original statement. */
3522 switch (vn_get_stmt_kind (stmt))
3524 case VN_NARY:
3525 changed = visit_nary_op (lhs, stmt);
3526 break;
3527 case VN_REFERENCE:
3528 changed = visit_reference_op_load (lhs, rhs1, stmt);
3529 break;
3530 default:
3531 changed = defs_to_varying (stmt);
3532 break;
3536 else
3537 changed = defs_to_varying (stmt);
3539 else if (is_gimple_call (stmt))
3541 tree lhs = gimple_call_lhs (stmt);
3543 /* ??? We could try to simplify calls. */
3545 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3547 if (stmt_has_constants (stmt))
3548 VN_INFO (lhs)->has_constants = true;
3549 else
3551 /* We reset expr and constantness here because we may
3552 have been value numbering optimistically, and
3553 iterating. They may become non-constant in this case,
3554 even if they were optimistically constant. */
3555 VN_INFO (lhs)->has_constants = false;
3556 VN_INFO (lhs)->expr = NULL_TREE;
3559 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3561 changed = defs_to_varying (stmt);
3562 goto done;
3566 if (!gimple_call_internal_p (stmt)
3567 && (/* Calls to the same function with the same vuse
3568 and the same operands do not necessarily return the same
3569 value, unless they're pure or const. */
3570 gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST)
3571 /* If calls have a vdef, subsequent calls won't have
3572 the same incoming vuse. So, if 2 calls with vdef have the
3573 same vuse, we know they're not subsequent.
3574 We can value number 2 calls to the same function with the
3575 same vuse and the same operands which are not subsequent
3576 the same, because there is no code in the program that can
3577 compare the 2 values... */
3578 || (gimple_vdef (stmt)
3579 /* ... unless the call returns a pointer which does
3580 not alias with anything else. In which case the
3581 information that the values are distinct are encoded
3582 in the IL. */
3583 && !(gimple_call_return_flags (stmt) & ERF_NOALIAS))))
3584 changed = visit_reference_op_call (lhs, stmt);
3585 else
3586 changed = defs_to_varying (stmt);
3588 else
3589 changed = defs_to_varying (stmt);
3591 done:
3592 return changed;
3595 /* Compare two operands by reverse postorder index */
3597 static int
3598 compare_ops (const void *pa, const void *pb)
3600 const tree opa = *((const tree *)pa);
3601 const tree opb = *((const tree *)pb);
3602 gimple opstmta = SSA_NAME_DEF_STMT (opa);
3603 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
3604 basic_block bba;
3605 basic_block bbb;
3607 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3608 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3609 else if (gimple_nop_p (opstmta))
3610 return -1;
3611 else if (gimple_nop_p (opstmtb))
3612 return 1;
3614 bba = gimple_bb (opstmta);
3615 bbb = gimple_bb (opstmtb);
3617 if (!bba && !bbb)
3618 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3619 else if (!bba)
3620 return -1;
3621 else if (!bbb)
3622 return 1;
3624 if (bba == bbb)
3626 if (gimple_code (opstmta) == GIMPLE_PHI
3627 && gimple_code (opstmtb) == GIMPLE_PHI)
3628 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3629 else if (gimple_code (opstmta) == GIMPLE_PHI)
3630 return -1;
3631 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3632 return 1;
3633 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3634 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3635 else
3636 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3638 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3641 /* Sort an array containing members of a strongly connected component
3642 SCC so that the members are ordered by RPO number.
3643 This means that when the sort is complete, iterating through the
3644 array will give you the members in RPO order. */
3646 static void
3647 sort_scc (vec<tree> scc)
3649 scc.qsort (compare_ops);
3652 /* Insert the no longer used nary ONARY to the hash INFO. */
3654 static void
3655 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3657 size_t size = sizeof_vn_nary_op (onary->length);
3658 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3659 &info->nary_obstack);
3660 memcpy (nary, onary, size);
3661 vn_nary_op_insert_into (nary, info->nary, false);
3664 /* Insert the no longer used phi OPHI to the hash INFO. */
3666 static void
3667 copy_phi (vn_phi_t ophi, vn_tables_t info)
3669 vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
3670 vn_phi_s **slot;
3671 memcpy (phi, ophi, sizeof (*phi));
3672 ophi->phiargs.create (0);
3673 slot = info->phis.find_slot_with_hash (phi, phi->hashcode, INSERT);
3674 gcc_assert (!*slot);
3675 *slot = phi;
3678 /* Insert the no longer used reference OREF to the hash INFO. */
3680 static void
3681 copy_reference (vn_reference_t oref, vn_tables_t info)
3683 vn_reference_t ref;
3684 vn_reference_s **slot;
3685 ref = (vn_reference_t) pool_alloc (info->references_pool);
3686 memcpy (ref, oref, sizeof (*ref));
3687 oref->operands.create (0);
3688 slot = info->references.find_slot_with_hash (ref, ref->hashcode, INSERT);
3689 if (*slot)
3690 free_reference (*slot);
3691 *slot = ref;
3694 /* Process a strongly connected component in the SSA graph. */
3696 static void
3697 process_scc (vec<tree> scc)
3699 tree var;
3700 unsigned int i;
3701 unsigned int iterations = 0;
3702 bool changed = true;
3703 vn_nary_op_iterator_type hin;
3704 vn_phi_iterator_type hip;
3705 vn_reference_iterator_type hir;
3706 vn_nary_op_t nary;
3707 vn_phi_t phi;
3708 vn_reference_t ref;
3710 /* If the SCC has a single member, just visit it. */
3711 if (scc.length () == 1)
3713 tree use = scc[0];
3714 if (VN_INFO (use)->use_processed)
3715 return;
3716 /* We need to make sure it doesn't form a cycle itself, which can
3717 happen for self-referential PHI nodes. In that case we would
3718 end up inserting an expression with VN_TOP operands into the
3719 valid table which makes us derive bogus equivalences later.
3720 The cheapest way to check this is to assume it for all PHI nodes. */
3721 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3722 /* Fallthru to iteration. */ ;
3723 else
3725 visit_use (use);
3726 return;
3730 /* Iterate over the SCC with the optimistic table until it stops
3731 changing. */
3732 current_info = optimistic_info;
3733 while (changed)
3735 changed = false;
3736 iterations++;
3737 if (dump_file && (dump_flags & TDF_DETAILS))
3738 fprintf (dump_file, "Starting iteration %d\n", iterations);
3739 /* As we are value-numbering optimistically we have to
3740 clear the expression tables and the simplified expressions
3741 in each iteration until we converge. */
3742 optimistic_info->nary.empty ();
3743 optimistic_info->phis.empty ();
3744 optimistic_info->references.empty ();
3745 obstack_free (&optimistic_info->nary_obstack, NULL);
3746 gcc_obstack_init (&optimistic_info->nary_obstack);
3747 empty_alloc_pool (optimistic_info->phis_pool);
3748 empty_alloc_pool (optimistic_info->references_pool);
3749 FOR_EACH_VEC_ELT (scc, i, var)
3750 VN_INFO (var)->expr = NULL_TREE;
3751 FOR_EACH_VEC_ELT (scc, i, var)
3752 changed |= visit_use (var);
3755 statistics_histogram_event (cfun, "SCC iterations", iterations);
3757 /* Finally, copy the contents of the no longer used optimistic
3758 table to the valid table. */
3759 FOR_EACH_HASH_TABLE_ELEMENT (optimistic_info->nary, nary, vn_nary_op_t, hin)
3760 copy_nary (nary, valid_info);
3761 FOR_EACH_HASH_TABLE_ELEMENT (optimistic_info->phis, phi, vn_phi_t, hip)
3762 copy_phi (phi, valid_info);
3763 FOR_EACH_HASH_TABLE_ELEMENT (optimistic_info->references,
3764 ref, vn_reference_t, hir)
3765 copy_reference (ref, valid_info);
3767 current_info = valid_info;
3771 /* Pop the components of the found SCC for NAME off the SCC stack
3772 and process them. Returns true if all went well, false if
3773 we run into resource limits. */
3775 static bool
3776 extract_and_process_scc_for_name (tree name)
3778 vec<tree> scc = vNULL;
3779 tree x;
3781 /* Found an SCC, pop the components off the SCC stack and
3782 process them. */
3785 x = sccstack.pop ();
3787 VN_INFO (x)->on_sccstack = false;
3788 scc.safe_push (x);
3789 } while (x != name);
3791 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
3792 if (scc.length ()
3793 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
3795 if (dump_file)
3796 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
3797 "SCC size %u exceeding %u\n", scc.length (),
3798 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
3800 scc.release ();
3801 return false;
3804 if (scc.length () > 1)
3805 sort_scc (scc);
3807 if (dump_file && (dump_flags & TDF_DETAILS))
3808 print_scc (dump_file, scc);
3810 process_scc (scc);
3812 scc.release ();
3814 return true;
3817 /* Depth first search on NAME to discover and process SCC's in the SSA
3818 graph.
3819 Execution of this algorithm relies on the fact that the SCC's are
3820 popped off the stack in topological order.
3821 Returns true if successful, false if we stopped processing SCC's due
3822 to resource constraints. */
3824 static bool
3825 DFS (tree name)
3827 vec<ssa_op_iter> itervec = vNULL;
3828 vec<tree> namevec = vNULL;
3829 use_operand_p usep = NULL;
3830 gimple defstmt;
3831 tree use;
3832 ssa_op_iter iter;
3834 start_over:
3835 /* SCC info */
3836 VN_INFO (name)->dfsnum = next_dfs_num++;
3837 VN_INFO (name)->visited = true;
3838 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
3840 sccstack.safe_push (name);
3841 VN_INFO (name)->on_sccstack = true;
3842 defstmt = SSA_NAME_DEF_STMT (name);
3844 /* Recursively DFS on our operands, looking for SCC's. */
3845 if (!gimple_nop_p (defstmt))
3847 /* Push a new iterator. */
3848 if (gimple_code (defstmt) == GIMPLE_PHI)
3849 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
3850 else
3851 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
3853 else
3854 clear_and_done_ssa_iter (&iter);
3856 while (1)
3858 /* If we are done processing uses of a name, go up the stack
3859 of iterators and process SCCs as we found them. */
3860 if (op_iter_done (&iter))
3862 /* See if we found an SCC. */
3863 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
3864 if (!extract_and_process_scc_for_name (name))
3866 namevec.release ();
3867 itervec.release ();
3868 return false;
3871 /* Check if we are done. */
3872 if (namevec.is_empty ())
3874 namevec.release ();
3875 itervec.release ();
3876 return true;
3879 /* Restore the last use walker and continue walking there. */
3880 use = name;
3881 name = namevec.pop ();
3882 memcpy (&iter, &itervec.last (),
3883 sizeof (ssa_op_iter));
3884 itervec.pop ();
3885 goto continue_walking;
3888 use = USE_FROM_PTR (usep);
3890 /* Since we handle phi nodes, we will sometimes get
3891 invariants in the use expression. */
3892 if (TREE_CODE (use) == SSA_NAME)
3894 if (! (VN_INFO (use)->visited))
3896 /* Recurse by pushing the current use walking state on
3897 the stack and starting over. */
3898 itervec.safe_push (iter);
3899 namevec.safe_push (name);
3900 name = use;
3901 goto start_over;
3903 continue_walking:
3904 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3905 VN_INFO (use)->low);
3907 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3908 && VN_INFO (use)->on_sccstack)
3910 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3911 VN_INFO (name)->low);
3915 usep = op_iter_next_use (&iter);
3919 /* Allocate a value number table. */
3921 static void
3922 allocate_vn_table (vn_tables_t table)
3924 table->phis.create (23);
3925 table->nary.create (23);
3926 table->references.create (23);
3928 gcc_obstack_init (&table->nary_obstack);
3929 table->phis_pool = create_alloc_pool ("VN phis",
3930 sizeof (struct vn_phi_s),
3931 30);
3932 table->references_pool = create_alloc_pool ("VN references",
3933 sizeof (struct vn_reference_s),
3934 30);
3937 /* Free a value number table. */
3939 static void
3940 free_vn_table (vn_tables_t table)
3942 table->phis.dispose ();
3943 table->nary.dispose ();
3944 table->references.dispose ();
3945 obstack_free (&table->nary_obstack, NULL);
3946 free_alloc_pool (table->phis_pool);
3947 free_alloc_pool (table->references_pool);
3950 static void
3951 init_scc_vn (void)
3953 size_t i;
3954 int j;
3955 int *rpo_numbers_temp;
3957 calculate_dominance_info (CDI_DOMINATORS);
3958 sccstack.create (0);
3959 constant_to_value_id.create (23);
3961 constant_value_ids = BITMAP_ALLOC (NULL);
3963 next_dfs_num = 1;
3964 next_value_id = 1;
3966 vn_ssa_aux_table.create (num_ssa_names + 1);
3967 /* VEC_alloc doesn't actually grow it to the right size, it just
3968 preallocates the space to do so. */
3969 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
3970 gcc_obstack_init (&vn_ssa_aux_obstack);
3972 shared_lookup_phiargs.create (0);
3973 shared_lookup_references.create (0);
3974 rpo_numbers = XNEWVEC (int, last_basic_block);
3975 rpo_numbers_temp = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
3976 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
3978 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3979 the i'th block in RPO order is bb. We want to map bb's to RPO
3980 numbers, so we need to rearrange this array. */
3981 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
3982 rpo_numbers[rpo_numbers_temp[j]] = j;
3984 XDELETE (rpo_numbers_temp);
3986 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
3988 /* Create the VN_INFO structures, and initialize value numbers to
3989 TOP. */
3990 for (i = 0; i < num_ssa_names; i++)
3992 tree name = ssa_name (i);
3993 if (name)
3995 VN_INFO_GET (name)->valnum = VN_TOP;
3996 VN_INFO (name)->expr = NULL_TREE;
3997 VN_INFO (name)->value_id = 0;
4001 renumber_gimple_stmt_uids ();
4003 /* Create the valid and optimistic value numbering tables. */
4004 valid_info = XCNEW (struct vn_tables_s);
4005 allocate_vn_table (valid_info);
4006 optimistic_info = XCNEW (struct vn_tables_s);
4007 allocate_vn_table (optimistic_info);
4010 void
4011 free_scc_vn (void)
4013 size_t i;
4015 constant_to_value_id.dispose ();
4016 BITMAP_FREE (constant_value_ids);
4017 shared_lookup_phiargs.release ();
4018 shared_lookup_references.release ();
4019 XDELETEVEC (rpo_numbers);
4021 for (i = 0; i < num_ssa_names; i++)
4023 tree name = ssa_name (i);
4024 if (name
4025 && VN_INFO (name)->needs_insertion)
4026 release_ssa_name (name);
4028 obstack_free (&vn_ssa_aux_obstack, NULL);
4029 vn_ssa_aux_table.release ();
4031 sccstack.release ();
4032 free_vn_table (valid_info);
4033 XDELETE (valid_info);
4034 free_vn_table (optimistic_info);
4035 XDELETE (optimistic_info);
4038 /* Set *ID according to RESULT. */
4040 static void
4041 set_value_id_for_result (tree result, unsigned int *id)
4043 if (result && TREE_CODE (result) == SSA_NAME)
4044 *id = VN_INFO (result)->value_id;
4045 else if (result && is_gimple_min_invariant (result))
4046 *id = get_or_alloc_constant_value_id (result);
4047 else
4048 *id = get_next_value_id ();
4051 /* Set the value ids in the valid hash tables. */
4053 static void
4054 set_hashtable_value_ids (void)
4056 vn_nary_op_iterator_type hin;
4057 vn_phi_iterator_type hip;
4058 vn_reference_iterator_type hir;
4059 vn_nary_op_t vno;
4060 vn_reference_t vr;
4061 vn_phi_t vp;
4063 /* Now set the value ids of the things we had put in the hash
4064 table. */
4066 FOR_EACH_HASH_TABLE_ELEMENT (valid_info->nary, vno, vn_nary_op_t, hin)
4067 set_value_id_for_result (vno->result, &vno->value_id);
4069 FOR_EACH_HASH_TABLE_ELEMENT (valid_info->phis, vp, vn_phi_t, hip)
4070 set_value_id_for_result (vp->result, &vp->value_id);
4072 FOR_EACH_HASH_TABLE_ELEMENT (valid_info->references, vr, vn_reference_t, hir)
4073 set_value_id_for_result (vr->result, &vr->value_id);
4076 /* Do SCCVN. Returns true if it finished, false if we bailed out
4077 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
4078 how we use the alias oracle walking during the VN process. */
4080 bool
4081 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
4083 size_t i;
4084 tree param;
4086 default_vn_walk_kind = default_vn_walk_kind_;
4088 init_scc_vn ();
4089 current_info = valid_info;
4091 for (param = DECL_ARGUMENTS (current_function_decl);
4092 param;
4093 param = DECL_CHAIN (param))
4095 tree def = ssa_default_def (cfun, param);
4096 if (def)
4097 VN_INFO (def)->valnum = def;
4100 for (i = 1; i < num_ssa_names; ++i)
4102 tree name = ssa_name (i);
4103 if (name
4104 && VN_INFO (name)->visited == false
4105 && !has_zero_uses (name))
4106 if (!DFS (name))
4108 free_scc_vn ();
4109 return false;
4113 /* Initialize the value ids. */
4115 for (i = 1; i < num_ssa_names; ++i)
4117 tree name = ssa_name (i);
4118 vn_ssa_aux_t info;
4119 if (!name)
4120 continue;
4121 info = VN_INFO (name);
4122 if (info->valnum == name
4123 || info->valnum == VN_TOP)
4124 info->value_id = get_next_value_id ();
4125 else if (is_gimple_min_invariant (info->valnum))
4126 info->value_id = get_or_alloc_constant_value_id (info->valnum);
4129 /* Propagate. */
4130 for (i = 1; i < num_ssa_names; ++i)
4132 tree name = ssa_name (i);
4133 vn_ssa_aux_t info;
4134 if (!name)
4135 continue;
4136 info = VN_INFO (name);
4137 if (TREE_CODE (info->valnum) == SSA_NAME
4138 && info->valnum != name
4139 && info->value_id != VN_INFO (info->valnum)->value_id)
4140 info->value_id = VN_INFO (info->valnum)->value_id;
4143 set_hashtable_value_ids ();
4145 if (dump_file && (dump_flags & TDF_DETAILS))
4147 fprintf (dump_file, "Value numbers:\n");
4148 for (i = 0; i < num_ssa_names; i++)
4150 tree name = ssa_name (i);
4151 if (name
4152 && VN_INFO (name)->visited
4153 && SSA_VAL (name) != name)
4155 print_generic_expr (dump_file, name, 0);
4156 fprintf (dump_file, " = ");
4157 print_generic_expr (dump_file, SSA_VAL (name), 0);
4158 fprintf (dump_file, "\n");
4163 return true;
4166 /* Return the maximum value id we have ever seen. */
4168 unsigned int
4169 get_max_value_id (void)
4171 return next_value_id;
4174 /* Return the next unique value id. */
4176 unsigned int
4177 get_next_value_id (void)
4179 return next_value_id++;
4183 /* Compare two expressions E1 and E2 and return true if they are equal. */
4185 bool
4186 expressions_equal_p (tree e1, tree e2)
4188 /* The obvious case. */
4189 if (e1 == e2)
4190 return true;
4192 /* If only one of them is null, they cannot be equal. */
4193 if (!e1 || !e2)
4194 return false;
4196 /* Now perform the actual comparison. */
4197 if (TREE_CODE (e1) == TREE_CODE (e2)
4198 && operand_equal_p (e1, e2, OEP_PURE_SAME))
4199 return true;
4201 return false;
4205 /* Return true if the nary operation NARY may trap. This is a copy
4206 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4208 bool
4209 vn_nary_may_trap (vn_nary_op_t nary)
4211 tree type;
4212 tree rhs2 = NULL_TREE;
4213 bool honor_nans = false;
4214 bool honor_snans = false;
4215 bool fp_operation = false;
4216 bool honor_trapv = false;
4217 bool handled, ret;
4218 unsigned i;
4220 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4221 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4222 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4224 type = nary->type;
4225 fp_operation = FLOAT_TYPE_P (type);
4226 if (fp_operation)
4228 honor_nans = flag_trapping_math && !flag_finite_math_only;
4229 honor_snans = flag_signaling_nans != 0;
4231 else if (INTEGRAL_TYPE_P (type)
4232 && TYPE_OVERFLOW_TRAPS (type))
4233 honor_trapv = true;
4235 if (nary->length >= 2)
4236 rhs2 = nary->op[1];
4237 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4238 honor_trapv,
4239 honor_nans, honor_snans, rhs2,
4240 &handled);
4241 if (handled
4242 && ret)
4243 return true;
4245 for (i = 0; i < nary->length; ++i)
4246 if (tree_could_trap_p (nary->op[i]))
4247 return true;
4249 return false;