Merge trunk version 202159 into gupc branch.
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blob6886efbe3eb1f1f341e796c5c85abc58e870cec7
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2013 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "basic-block.h"
27 #include "gimple-pretty-print.h"
28 #include "tree-inline.h"
29 #include "tree-flow.h"
30 #include "gimple.h"
31 #include "dumpfile.h"
32 #include "hash-table.h"
33 #include "alloc-pool.h"
34 #include "flags.h"
35 #include "bitmap.h"
36 #include "cfgloop.h"
37 #include "params.h"
38 #include "tree-ssa-propagate.h"
39 #include "tree-ssa-sccvn.h"
40 #include "gimple-fold.h"
42 /* This algorithm is based on the SCC algorithm presented by Keith
43 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
44 (http://citeseer.ist.psu.edu/41805.html). In
45 straight line code, it is equivalent to a regular hash based value
46 numbering that is performed in reverse postorder.
48 For code with cycles, there are two alternatives, both of which
49 require keeping the hashtables separate from the actual list of
50 value numbers for SSA names.
52 1. Iterate value numbering in an RPO walk of the blocks, removing
53 all the entries from the hashtable after each iteration (but
54 keeping the SSA name->value number mapping between iterations).
55 Iterate until it does not change.
57 2. Perform value numbering as part of an SCC walk on the SSA graph,
58 iterating only the cycles in the SSA graph until they do not change
59 (using a separate, optimistic hashtable for value numbering the SCC
60 operands).
62 The second is not just faster in practice (because most SSA graph
63 cycles do not involve all the variables in the graph), it also has
64 some nice properties.
66 One of these nice properties is that when we pop an SCC off the
67 stack, we are guaranteed to have processed all the operands coming from
68 *outside of that SCC*, so we do not need to do anything special to
69 ensure they have value numbers.
71 Another nice property is that the SCC walk is done as part of a DFS
72 of the SSA graph, which makes it easy to perform combining and
73 simplifying operations at the same time.
75 The code below is deliberately written in a way that makes it easy
76 to separate the SCC walk from the other work it does.
78 In order to propagate constants through the code, we track which
79 expressions contain constants, and use those while folding. In
80 theory, we could also track expressions whose value numbers are
81 replaced, in case we end up folding based on expression
82 identities.
84 In order to value number memory, we assign value numbers to vuses.
85 This enables us to note that, for example, stores to the same
86 address of the same value from the same starting memory states are
87 equivalent.
88 TODO:
90 1. We can iterate only the changing portions of the SCC's, but
91 I have not seen an SCC big enough for this to be a win.
92 2. If you differentiate between phi nodes for loops and phi nodes
93 for if-then-else, you can properly consider phi nodes in different
94 blocks for equivalence.
95 3. We could value number vuses in more cases, particularly, whole
96 structure copies.
100 /* vn_nary_op hashtable helpers. */
102 struct vn_nary_op_hasher : typed_noop_remove <vn_nary_op_s>
104 typedef vn_nary_op_s value_type;
105 typedef vn_nary_op_s compare_type;
106 static inline hashval_t hash (const value_type *);
107 static inline bool equal (const value_type *, const compare_type *);
110 /* Return the computed hashcode for nary operation P1. */
112 inline hashval_t
113 vn_nary_op_hasher::hash (const value_type *vno1)
115 return vno1->hashcode;
118 /* Compare nary operations P1 and P2 and return true if they are
119 equivalent. */
121 inline bool
122 vn_nary_op_hasher::equal (const value_type *vno1, const compare_type *vno2)
124 return vn_nary_op_eq (vno1, vno2);
127 typedef hash_table <vn_nary_op_hasher> vn_nary_op_table_type;
128 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
131 /* vn_phi hashtable helpers. */
133 static int
134 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
136 struct vn_phi_hasher
138 typedef vn_phi_s value_type;
139 typedef vn_phi_s compare_type;
140 static inline hashval_t hash (const value_type *);
141 static inline bool equal (const value_type *, const compare_type *);
142 static inline void remove (value_type *);
145 /* Return the computed hashcode for phi operation P1. */
147 inline hashval_t
148 vn_phi_hasher::hash (const value_type *vp1)
150 return vp1->hashcode;
153 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
155 inline bool
156 vn_phi_hasher::equal (const value_type *vp1, const compare_type *vp2)
158 return vn_phi_eq (vp1, vp2);
161 /* Free a phi operation structure VP. */
163 inline void
164 vn_phi_hasher::remove (value_type *phi)
166 phi->phiargs.release ();
169 typedef hash_table <vn_phi_hasher> vn_phi_table_type;
170 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
173 /* Compare two reference operands P1 and P2 for equality. Return true if
174 they are equal, and false otherwise. */
176 static int
177 vn_reference_op_eq (const void *p1, const void *p2)
179 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
180 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
182 return (vro1->opcode == vro2->opcode
183 /* We do not care for differences in type qualification. */
184 && (vro1->type == vro2->type
185 || (vro1->type && vro2->type
186 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
187 TYPE_MAIN_VARIANT (vro2->type))))
188 && expressions_equal_p (vro1->op0, vro2->op0)
189 && expressions_equal_p (vro1->op1, vro2->op1)
190 && expressions_equal_p (vro1->op2, vro2->op2));
193 /* Free a reference operation structure VP. */
195 static inline void
196 free_reference (vn_reference_s *vr)
198 vr->operands.release ();
202 /* vn_reference hashtable helpers. */
204 struct vn_reference_hasher
206 typedef vn_reference_s value_type;
207 typedef vn_reference_s compare_type;
208 static inline hashval_t hash (const value_type *);
209 static inline bool equal (const value_type *, const compare_type *);
210 static inline void remove (value_type *);
213 /* Return the hashcode for a given reference operation P1. */
215 inline hashval_t
216 vn_reference_hasher::hash (const value_type *vr1)
218 return vr1->hashcode;
221 inline bool
222 vn_reference_hasher::equal (const value_type *v, const compare_type *c)
224 return vn_reference_eq (v, c);
227 inline void
228 vn_reference_hasher::remove (value_type *v)
230 free_reference (v);
233 typedef hash_table <vn_reference_hasher> vn_reference_table_type;
234 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
237 /* The set of hashtables and alloc_pool's for their items. */
239 typedef struct vn_tables_s
241 vn_nary_op_table_type nary;
242 vn_phi_table_type phis;
243 vn_reference_table_type references;
244 struct obstack nary_obstack;
245 alloc_pool phis_pool;
246 alloc_pool references_pool;
247 } *vn_tables_t;
250 /* vn_constant hashtable helpers. */
252 struct vn_constant_hasher : typed_free_remove <vn_constant_s>
254 typedef vn_constant_s value_type;
255 typedef vn_constant_s compare_type;
256 static inline hashval_t hash (const value_type *);
257 static inline bool equal (const value_type *, const compare_type *);
260 /* Hash table hash function for vn_constant_t. */
262 inline hashval_t
263 vn_constant_hasher::hash (const value_type *vc1)
265 return vc1->hashcode;
268 /* Hash table equality function for vn_constant_t. */
270 inline bool
271 vn_constant_hasher::equal (const value_type *vc1, const compare_type *vc2)
273 if (vc1->hashcode != vc2->hashcode)
274 return false;
276 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
279 static hash_table <vn_constant_hasher> constant_to_value_id;
280 static bitmap constant_value_ids;
283 /* Valid hashtables storing information we have proven to be
284 correct. */
286 static vn_tables_t valid_info;
288 /* Optimistic hashtables storing information we are making assumptions about
289 during iterations. */
291 static vn_tables_t optimistic_info;
293 /* Pointer to the set of hashtables that is currently being used.
294 Should always point to either the optimistic_info, or the
295 valid_info. */
297 static vn_tables_t current_info;
300 /* Reverse post order index for each basic block. */
302 static int *rpo_numbers;
304 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
306 /* This represents the top of the VN lattice, which is the universal
307 value. */
309 tree VN_TOP;
311 /* Unique counter for our value ids. */
313 static unsigned int next_value_id;
315 /* Next DFS number and the stack for strongly connected component
316 detection. */
318 static unsigned int next_dfs_num;
319 static vec<tree> sccstack;
323 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
324 are allocated on an obstack for locality reasons, and to free them
325 without looping over the vec. */
327 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
328 static struct obstack vn_ssa_aux_obstack;
330 /* Return the value numbering information for a given SSA name. */
332 vn_ssa_aux_t
333 VN_INFO (tree name)
335 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
336 gcc_checking_assert (res);
337 return res;
340 /* Set the value numbering info for a given SSA name to a given
341 value. */
343 static inline void
344 VN_INFO_SET (tree name, vn_ssa_aux_t value)
346 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
349 /* Initialize the value numbering info for a given SSA name.
350 This should be called just once for every SSA name. */
352 vn_ssa_aux_t
353 VN_INFO_GET (tree name)
355 vn_ssa_aux_t newinfo;
357 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
358 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
359 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
360 vn_ssa_aux_table.safe_grow (SSA_NAME_VERSION (name) + 1);
361 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
362 return newinfo;
366 /* Get the representative expression for the SSA_NAME NAME. Returns
367 the representative SSA_NAME if there is no expression associated with it. */
369 tree
370 vn_get_expr_for (tree name)
372 vn_ssa_aux_t vn = VN_INFO (name);
373 gimple def_stmt;
374 tree expr = NULL_TREE;
375 enum tree_code code;
377 if (vn->valnum == VN_TOP)
378 return name;
380 /* If the value-number is a constant it is the representative
381 expression. */
382 if (TREE_CODE (vn->valnum) != SSA_NAME)
383 return vn->valnum;
385 /* Get to the information of the value of this SSA_NAME. */
386 vn = VN_INFO (vn->valnum);
388 /* If the value-number is a constant it is the representative
389 expression. */
390 if (TREE_CODE (vn->valnum) != SSA_NAME)
391 return vn->valnum;
393 /* Else if we have an expression, return it. */
394 if (vn->expr != NULL_TREE)
395 return vn->expr;
397 /* Otherwise use the defining statement to build the expression. */
398 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
400 /* If the value number is not an assignment use it directly. */
401 if (!is_gimple_assign (def_stmt))
402 return vn->valnum;
404 /* FIXME tuples. This is incomplete and likely will miss some
405 simplifications. */
406 code = gimple_assign_rhs_code (def_stmt);
407 switch (TREE_CODE_CLASS (code))
409 case tcc_reference:
410 if ((code == REALPART_EXPR
411 || code == IMAGPART_EXPR
412 || code == VIEW_CONVERT_EXPR)
413 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (def_stmt),
414 0)) == SSA_NAME)
415 expr = fold_build1 (code,
416 gimple_expr_type (def_stmt),
417 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
418 break;
420 case tcc_unary:
421 expr = fold_build1 (code,
422 gimple_expr_type (def_stmt),
423 gimple_assign_rhs1 (def_stmt));
424 break;
426 case tcc_binary:
427 expr = fold_build2 (code,
428 gimple_expr_type (def_stmt),
429 gimple_assign_rhs1 (def_stmt),
430 gimple_assign_rhs2 (def_stmt));
431 break;
433 case tcc_exceptional:
434 if (code == CONSTRUCTOR
435 && TREE_CODE
436 (TREE_TYPE (gimple_assign_rhs1 (def_stmt))) == VECTOR_TYPE)
437 expr = gimple_assign_rhs1 (def_stmt);
438 break;
440 default:;
442 if (expr == NULL_TREE)
443 return vn->valnum;
445 /* Cache the expression. */
446 vn->expr = expr;
448 return expr;
451 /* Return the vn_kind the expression computed by the stmt should be
452 associated with. */
454 enum vn_kind
455 vn_get_stmt_kind (gimple stmt)
457 switch (gimple_code (stmt))
459 case GIMPLE_CALL:
460 return VN_REFERENCE;
461 case GIMPLE_PHI:
462 return VN_PHI;
463 case GIMPLE_ASSIGN:
465 enum tree_code code = gimple_assign_rhs_code (stmt);
466 tree rhs1 = gimple_assign_rhs1 (stmt);
467 switch (get_gimple_rhs_class (code))
469 case GIMPLE_UNARY_RHS:
470 case GIMPLE_BINARY_RHS:
471 case GIMPLE_TERNARY_RHS:
472 return VN_NARY;
473 case GIMPLE_SINGLE_RHS:
474 switch (TREE_CODE_CLASS (code))
476 case tcc_reference:
477 /* VOP-less references can go through unary case. */
478 if ((code == REALPART_EXPR
479 || code == IMAGPART_EXPR
480 || code == VIEW_CONVERT_EXPR
481 || code == BIT_FIELD_REF)
482 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
483 return VN_NARY;
485 /* Fallthrough. */
486 case tcc_declaration:
487 return VN_REFERENCE;
489 case tcc_constant:
490 return VN_CONSTANT;
492 default:
493 if (code == ADDR_EXPR)
494 return (is_gimple_min_invariant (rhs1)
495 ? VN_CONSTANT : VN_REFERENCE);
496 else if (code == CONSTRUCTOR)
497 return VN_NARY;
498 return VN_NONE;
500 default:
501 return VN_NONE;
504 default:
505 return VN_NONE;
509 /* Lookup a value id for CONSTANT and return it. If it does not
510 exist returns 0. */
512 unsigned int
513 get_constant_value_id (tree constant)
515 vn_constant_s **slot;
516 struct vn_constant_s vc;
518 vc.hashcode = vn_hash_constant_with_type (constant);
519 vc.constant = constant;
520 slot = constant_to_value_id.find_slot_with_hash (&vc, vc.hashcode, NO_INSERT);
521 if (slot)
522 return (*slot)->value_id;
523 return 0;
526 /* Lookup a value id for CONSTANT, and if it does not exist, create a
527 new one and return it. If it does exist, return it. */
529 unsigned int
530 get_or_alloc_constant_value_id (tree constant)
532 vn_constant_s **slot;
533 struct vn_constant_s vc;
534 vn_constant_t vcp;
536 vc.hashcode = vn_hash_constant_with_type (constant);
537 vc.constant = constant;
538 slot = constant_to_value_id.find_slot_with_hash (&vc, vc.hashcode, INSERT);
539 if (*slot)
540 return (*slot)->value_id;
542 vcp = XNEW (struct vn_constant_s);
543 vcp->hashcode = vc.hashcode;
544 vcp->constant = constant;
545 vcp->value_id = get_next_value_id ();
546 *slot = vcp;
547 bitmap_set_bit (constant_value_ids, vcp->value_id);
548 return vcp->value_id;
551 /* Return true if V is a value id for a constant. */
553 bool
554 value_id_constant_p (unsigned int v)
556 return bitmap_bit_p (constant_value_ids, v);
559 /* Compute the hash for a reference operand VRO1. */
561 static hashval_t
562 vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result)
564 result = iterative_hash_hashval_t (vro1->opcode, result);
565 if (vro1->op0)
566 result = iterative_hash_expr (vro1->op0, result);
567 if (vro1->op1)
568 result = iterative_hash_expr (vro1->op1, result);
569 if (vro1->op2)
570 result = iterative_hash_expr (vro1->op2, result);
571 return result;
574 /* Compute a hash for the reference operation VR1 and return it. */
576 hashval_t
577 vn_reference_compute_hash (const vn_reference_t vr1)
579 hashval_t result = 0;
580 int i;
581 vn_reference_op_t vro;
582 HOST_WIDE_INT off = -1;
583 bool deref = false;
585 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
587 if (vro->opcode == MEM_REF)
588 deref = true;
589 else if (vro->opcode != ADDR_EXPR)
590 deref = false;
591 if (vro->off != -1)
593 if (off == -1)
594 off = 0;
595 off += vro->off;
597 else
599 if (off != -1
600 && off != 0)
601 result = iterative_hash_hashval_t (off, result);
602 off = -1;
603 if (deref
604 && vro->opcode == ADDR_EXPR)
606 if (vro->op0)
608 tree op = TREE_OPERAND (vro->op0, 0);
609 result = iterative_hash_hashval_t (TREE_CODE (op), result);
610 result = iterative_hash_expr (op, result);
613 else
614 result = vn_reference_op_compute_hash (vro, result);
617 if (vr1->vuse)
618 result += SSA_NAME_VERSION (vr1->vuse);
620 return result;
623 /* Return true if reference operations VR1 and VR2 are equivalent. This
624 means they have the same set of operands and vuses. */
626 bool
627 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
629 unsigned i, j;
631 if (vr1->hashcode != vr2->hashcode)
632 return false;
634 /* Early out if this is not a hash collision. */
635 if (vr1->hashcode != vr2->hashcode)
636 return false;
638 /* The VOP needs to be the same. */
639 if (vr1->vuse != vr2->vuse)
640 return false;
642 /* If the operands are the same we are done. */
643 if (vr1->operands == vr2->operands)
644 return true;
646 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
647 return false;
649 if (INTEGRAL_TYPE_P (vr1->type)
650 && INTEGRAL_TYPE_P (vr2->type))
652 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
653 return false;
655 else if (INTEGRAL_TYPE_P (vr1->type)
656 && (TYPE_PRECISION (vr1->type)
657 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
658 return false;
659 else if (INTEGRAL_TYPE_P (vr2->type)
660 && (TYPE_PRECISION (vr2->type)
661 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
662 return false;
664 i = 0;
665 j = 0;
668 HOST_WIDE_INT off1 = 0, off2 = 0;
669 vn_reference_op_t vro1, vro2;
670 vn_reference_op_s tem1, tem2;
671 bool deref1 = false, deref2 = false;
672 for (; vr1->operands.iterate (i, &vro1); i++)
674 if (vro1->opcode == MEM_REF)
675 deref1 = true;
676 if (vro1->off == -1)
677 break;
678 off1 += vro1->off;
680 for (; vr2->operands.iterate (j, &vro2); j++)
682 if (vro2->opcode == MEM_REF)
683 deref2 = true;
684 if (vro2->off == -1)
685 break;
686 off2 += vro2->off;
688 if (off1 != off2)
689 return false;
690 if (deref1 && vro1->opcode == ADDR_EXPR)
692 memset (&tem1, 0, sizeof (tem1));
693 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
694 tem1.type = TREE_TYPE (tem1.op0);
695 tem1.opcode = TREE_CODE (tem1.op0);
696 vro1 = &tem1;
697 deref1 = false;
699 if (deref2 && vro2->opcode == ADDR_EXPR)
701 memset (&tem2, 0, sizeof (tem2));
702 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
703 tem2.type = TREE_TYPE (tem2.op0);
704 tem2.opcode = TREE_CODE (tem2.op0);
705 vro2 = &tem2;
706 deref2 = false;
708 if (deref1 != deref2)
709 return false;
710 if (!vn_reference_op_eq (vro1, vro2))
711 return false;
712 ++j;
713 ++i;
715 while (vr1->operands.length () != i
716 || vr2->operands.length () != j);
718 return true;
721 /* Copy the operations present in load/store REF into RESULT, a vector of
722 vn_reference_op_s's. */
724 void
725 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
727 if (TREE_CODE (ref) == TARGET_MEM_REF)
729 vn_reference_op_s temp;
731 result->reserve (3);
733 memset (&temp, 0, sizeof (temp));
734 temp.type = TREE_TYPE (ref);
735 temp.opcode = TREE_CODE (ref);
736 temp.op0 = TMR_INDEX (ref);
737 temp.op1 = TMR_STEP (ref);
738 temp.op2 = TMR_OFFSET (ref);
739 temp.off = -1;
740 result->quick_push (temp);
742 memset (&temp, 0, sizeof (temp));
743 temp.type = NULL_TREE;
744 temp.opcode = ERROR_MARK;
745 temp.op0 = TMR_INDEX2 (ref);
746 temp.off = -1;
747 result->quick_push (temp);
749 memset (&temp, 0, sizeof (temp));
750 temp.type = NULL_TREE;
751 temp.opcode = TREE_CODE (TMR_BASE (ref));
752 temp.op0 = TMR_BASE (ref);
753 temp.off = -1;
754 result->quick_push (temp);
755 return;
758 /* For non-calls, store the information that makes up the address. */
760 while (ref)
762 vn_reference_op_s temp;
764 memset (&temp, 0, sizeof (temp));
765 temp.type = TREE_TYPE (ref);
766 temp.opcode = TREE_CODE (ref);
767 temp.off = -1;
769 switch (temp.opcode)
771 case MODIFY_EXPR:
772 temp.op0 = TREE_OPERAND (ref, 1);
773 break;
774 case WITH_SIZE_EXPR:
775 temp.op0 = TREE_OPERAND (ref, 1);
776 temp.off = 0;
777 break;
778 case MEM_REF:
779 /* The base address gets its own vn_reference_op_s structure. */
780 temp.op0 = TREE_OPERAND (ref, 1);
781 if (host_integerp (TREE_OPERAND (ref, 1), 0))
782 temp.off = TREE_INT_CST_LOW (TREE_OPERAND (ref, 1));
783 break;
784 case BIT_FIELD_REF:
785 /* Record bits and position. */
786 temp.op0 = TREE_OPERAND (ref, 1);
787 temp.op1 = TREE_OPERAND (ref, 2);
788 break;
789 case COMPONENT_REF:
790 /* The field decl is enough to unambiguously specify the field,
791 a matching type is not necessary and a mismatching type
792 is always a spurious difference. */
793 temp.type = NULL_TREE;
794 temp.op0 = TREE_OPERAND (ref, 1);
795 temp.op1 = TREE_OPERAND (ref, 2);
797 tree this_offset = component_ref_field_offset (ref);
798 if (this_offset
799 && TREE_CODE (this_offset) == INTEGER_CST)
801 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
802 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
804 double_int off
805 = tree_to_double_int (this_offset)
806 + tree_to_double_int (bit_offset)
807 .rshift (BITS_PER_UNIT == 8
808 ? 3 : exact_log2 (BITS_PER_UNIT));
809 if (off.fits_shwi ())
810 temp.off = off.low;
814 break;
815 case ARRAY_RANGE_REF:
816 case ARRAY_REF:
817 /* Record index as operand. */
818 temp.op0 = TREE_OPERAND (ref, 1);
819 /* Always record lower bounds and element size. */
820 temp.op1 = array_ref_low_bound (ref);
821 temp.op2 = array_ref_element_size (ref);
822 if (TREE_CODE (temp.op0) == INTEGER_CST
823 && TREE_CODE (temp.op1) == INTEGER_CST
824 && TREE_CODE (temp.op2) == INTEGER_CST)
826 double_int off = tree_to_double_int (temp.op0);
827 off += -tree_to_double_int (temp.op1);
828 off *= tree_to_double_int (temp.op2);
829 if (off.fits_shwi ())
830 temp.off = off.low;
832 break;
833 case VAR_DECL:
834 if (DECL_HARD_REGISTER (ref))
836 temp.op0 = ref;
837 break;
839 /* Fallthru. */
840 case PARM_DECL:
841 case CONST_DECL:
842 case RESULT_DECL:
843 /* Canonicalize decls to MEM[&decl] which is what we end up with
844 when valueizing MEM[ptr] with ptr = &decl. */
845 temp.opcode = MEM_REF;
846 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
847 temp.off = 0;
848 result->safe_push (temp);
849 temp.opcode = ADDR_EXPR;
850 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
851 temp.type = TREE_TYPE (temp.op0);
852 temp.off = -1;
853 break;
854 case STRING_CST:
855 case INTEGER_CST:
856 case COMPLEX_CST:
857 case VECTOR_CST:
858 case REAL_CST:
859 case FIXED_CST:
860 case CONSTRUCTOR:
861 case SSA_NAME:
862 temp.op0 = ref;
863 break;
864 case ADDR_EXPR:
865 if (is_gimple_min_invariant (ref))
867 temp.op0 = ref;
868 break;
870 /* Fallthrough. */
871 /* These are only interesting for their operands, their
872 existence, and their type. They will never be the last
873 ref in the chain of references (IE they require an
874 operand), so we don't have to put anything
875 for op* as it will be handled by the iteration */
876 case REALPART_EXPR:
877 case VIEW_CONVERT_EXPR:
878 temp.off = 0;
879 break;
880 case IMAGPART_EXPR:
881 /* This is only interesting for its constant offset. */
882 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
883 break;
884 default:
885 gcc_unreachable ();
887 result->safe_push (temp);
889 if (REFERENCE_CLASS_P (ref)
890 || TREE_CODE (ref) == MODIFY_EXPR
891 || TREE_CODE (ref) == WITH_SIZE_EXPR
892 || (TREE_CODE (ref) == ADDR_EXPR
893 && !is_gimple_min_invariant (ref)))
894 ref = TREE_OPERAND (ref, 0);
895 else
896 ref = NULL_TREE;
900 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
901 operands in *OPS, the reference alias set SET and the reference type TYPE.
902 Return true if something useful was produced. */
904 bool
905 ao_ref_init_from_vn_reference (ao_ref *ref,
906 alias_set_type set, tree type,
907 vec<vn_reference_op_s> ops)
909 vn_reference_op_t op;
910 unsigned i;
911 tree base = NULL_TREE;
912 tree *op0_p = &base;
913 HOST_WIDE_INT offset = 0;
914 HOST_WIDE_INT max_size;
915 HOST_WIDE_INT size = -1;
916 tree size_tree = NULL_TREE;
917 alias_set_type base_alias_set = -1;
919 /* First get the final access size from just the outermost expression. */
920 op = &ops[0];
921 if (op->opcode == COMPONENT_REF)
922 size_tree = DECL_SIZE (op->op0);
923 else if (op->opcode == BIT_FIELD_REF)
924 size_tree = op->op0;
925 else
927 enum machine_mode mode = TYPE_MODE (type);
928 if (mode == BLKmode)
929 size_tree = TYPE_SIZE (type);
930 else
931 size = GET_MODE_BITSIZE (mode);
933 if (size_tree != NULL_TREE)
935 if (!host_integerp (size_tree, 1))
936 size = -1;
937 else
938 size = TREE_INT_CST_LOW (size_tree);
941 /* Initially, maxsize is the same as the accessed element size.
942 In the following it will only grow (or become -1). */
943 max_size = size;
945 /* Compute cumulative bit-offset for nested component-refs and array-refs,
946 and find the ultimate containing object. */
947 FOR_EACH_VEC_ELT (ops, i, op)
949 switch (op->opcode)
951 /* These may be in the reference ops, but we cannot do anything
952 sensible with them here. */
953 case ADDR_EXPR:
954 /* Apart from ADDR_EXPR arguments to MEM_REF. */
955 if (base != NULL_TREE
956 && TREE_CODE (base) == MEM_REF
957 && op->op0
958 && DECL_P (TREE_OPERAND (op->op0, 0)))
960 vn_reference_op_t pop = &ops[i-1];
961 base = TREE_OPERAND (op->op0, 0);
962 if (pop->off == -1)
964 max_size = -1;
965 offset = 0;
967 else
968 offset += pop->off * BITS_PER_UNIT;
969 op0_p = NULL;
970 break;
972 /* Fallthru. */
973 case CALL_EXPR:
974 return false;
976 /* Record the base objects. */
977 case MEM_REF:
978 base_alias_set = get_deref_alias_set (op->op0);
979 *op0_p = build2 (MEM_REF, op->type,
980 NULL_TREE, op->op0);
981 op0_p = &TREE_OPERAND (*op0_p, 0);
982 break;
984 case VAR_DECL:
985 case PARM_DECL:
986 case RESULT_DECL:
987 case SSA_NAME:
988 *op0_p = op->op0;
989 op0_p = NULL;
990 break;
992 /* And now the usual component-reference style ops. */
993 case BIT_FIELD_REF:
994 offset += tree_low_cst (op->op1, 0);
995 break;
997 case COMPONENT_REF:
999 tree field = op->op0;
1000 /* We do not have a complete COMPONENT_REF tree here so we
1001 cannot use component_ref_field_offset. Do the interesting
1002 parts manually. */
1004 if (op->op1
1005 || !host_integerp (DECL_FIELD_OFFSET (field), 1))
1006 max_size = -1;
1007 else
1009 offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1010 * BITS_PER_UNIT);
1011 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1013 break;
1016 case ARRAY_RANGE_REF:
1017 case ARRAY_REF:
1018 /* We recorded the lower bound and the element size. */
1019 if (!host_integerp (op->op0, 0)
1020 || !host_integerp (op->op1, 0)
1021 || !host_integerp (op->op2, 0))
1022 max_size = -1;
1023 else
1025 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
1026 hindex -= TREE_INT_CST_LOW (op->op1);
1027 hindex *= TREE_INT_CST_LOW (op->op2);
1028 hindex *= BITS_PER_UNIT;
1029 offset += hindex;
1031 break;
1033 case REALPART_EXPR:
1034 break;
1036 case IMAGPART_EXPR:
1037 offset += size;
1038 break;
1040 case VIEW_CONVERT_EXPR:
1041 break;
1043 case STRING_CST:
1044 case INTEGER_CST:
1045 case COMPLEX_CST:
1046 case VECTOR_CST:
1047 case REAL_CST:
1048 case CONSTRUCTOR:
1049 case CONST_DECL:
1050 return false;
1052 default:
1053 return false;
1057 if (base == NULL_TREE)
1058 return false;
1060 ref->ref = NULL_TREE;
1061 ref->base = base;
1062 ref->offset = offset;
1063 ref->size = size;
1064 ref->max_size = max_size;
1065 ref->ref_alias_set = set;
1066 if (base_alias_set != -1)
1067 ref->base_alias_set = base_alias_set;
1068 else
1069 ref->base_alias_set = get_alias_set (base);
1070 /* We discount volatiles from value-numbering elsewhere. */
1071 ref->volatile_p = false;
1073 return true;
1076 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1077 vn_reference_op_s's. */
1079 void
1080 copy_reference_ops_from_call (gimple call,
1081 vec<vn_reference_op_s> *result)
1083 vn_reference_op_s temp;
1084 unsigned i;
1085 tree lhs = gimple_call_lhs (call);
1087 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1088 different. By adding the lhs here in the vector, we ensure that the
1089 hashcode is different, guaranteeing a different value number. */
1090 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1092 memset (&temp, 0, sizeof (temp));
1093 temp.opcode = MODIFY_EXPR;
1094 temp.type = TREE_TYPE (lhs);
1095 temp.op0 = lhs;
1096 temp.off = -1;
1097 result->safe_push (temp);
1100 /* Copy the type, opcode, function being called and static chain. */
1101 memset (&temp, 0, sizeof (temp));
1102 temp.type = gimple_call_return_type (call);
1103 temp.opcode = CALL_EXPR;
1104 temp.op0 = gimple_call_fn (call);
1105 temp.op1 = gimple_call_chain (call);
1106 temp.off = -1;
1107 result->safe_push (temp);
1109 /* Copy the call arguments. As they can be references as well,
1110 just chain them together. */
1111 for (i = 0; i < gimple_call_num_args (call); ++i)
1113 tree callarg = gimple_call_arg (call, i);
1114 copy_reference_ops_from_ref (callarg, result);
1118 /* Create a vector of vn_reference_op_s structures from CALL, a
1119 call statement. The vector is not shared. */
1121 static vec<vn_reference_op_s>
1122 create_reference_ops_from_call (gimple call)
1124 vec<vn_reference_op_s> result = vNULL;
1126 copy_reference_ops_from_call (call, &result);
1127 return result;
1130 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1131 *I_P to point to the last element of the replacement. */
1132 void
1133 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1134 unsigned int *i_p)
1136 unsigned int i = *i_p;
1137 vn_reference_op_t op = &(*ops)[i];
1138 vn_reference_op_t mem_op = &(*ops)[i - 1];
1139 tree addr_base;
1140 HOST_WIDE_INT addr_offset = 0;
1142 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1143 from .foo.bar to the preceding MEM_REF offset and replace the
1144 address with &OBJ. */
1145 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1146 &addr_offset);
1147 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1148 if (addr_base != TREE_OPERAND (op->op0, 0))
1150 double_int off = tree_to_double_int (mem_op->op0);
1151 off = off.sext (TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1152 off += double_int::from_shwi (addr_offset);
1153 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1154 op->op0 = build_fold_addr_expr (addr_base);
1155 if (host_integerp (mem_op->op0, 0))
1156 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1157 else
1158 mem_op->off = -1;
1162 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1163 *I_P to point to the last element of the replacement. */
1164 static void
1165 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1166 unsigned int *i_p)
1168 unsigned int i = *i_p;
1169 vn_reference_op_t op = &(*ops)[i];
1170 vn_reference_op_t mem_op = &(*ops)[i - 1];
1171 gimple def_stmt;
1172 enum tree_code code;
1173 double_int off;
1175 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1176 if (!is_gimple_assign (def_stmt))
1177 return;
1179 code = gimple_assign_rhs_code (def_stmt);
1180 if (code != ADDR_EXPR
1181 && code != POINTER_PLUS_EXPR)
1182 return;
1184 off = tree_to_double_int (mem_op->op0);
1185 off = off.sext (TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1187 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1188 from .foo.bar to the preceding MEM_REF offset and replace the
1189 address with &OBJ. */
1190 if (code == ADDR_EXPR)
1192 tree addr, addr_base;
1193 HOST_WIDE_INT addr_offset;
1195 addr = gimple_assign_rhs1 (def_stmt);
1196 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1197 &addr_offset);
1198 if (!addr_base
1199 || TREE_CODE (addr_base) != MEM_REF)
1200 return;
1202 off += double_int::from_shwi (addr_offset);
1203 off += mem_ref_offset (addr_base);
1204 op->op0 = TREE_OPERAND (addr_base, 0);
1206 else
1208 tree ptr, ptroff;
1209 ptr = gimple_assign_rhs1 (def_stmt);
1210 ptroff = gimple_assign_rhs2 (def_stmt);
1211 if (TREE_CODE (ptr) != SSA_NAME
1212 || TREE_CODE (ptroff) != INTEGER_CST)
1213 return;
1215 off += tree_to_double_int (ptroff);
1216 op->op0 = ptr;
1219 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1220 if (host_integerp (mem_op->op0, 0))
1221 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1222 else
1223 mem_op->off = -1;
1224 if (TREE_CODE (op->op0) == SSA_NAME)
1225 op->op0 = SSA_VAL (op->op0);
1226 if (TREE_CODE (op->op0) != SSA_NAME)
1227 op->opcode = TREE_CODE (op->op0);
1229 /* And recurse. */
1230 if (TREE_CODE (op->op0) == SSA_NAME)
1231 vn_reference_maybe_forwprop_address (ops, i_p);
1232 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1233 vn_reference_fold_indirect (ops, i_p);
1236 /* Optimize the reference REF to a constant if possible or return
1237 NULL_TREE if not. */
1239 tree
1240 fully_constant_vn_reference_p (vn_reference_t ref)
1242 vec<vn_reference_op_s> operands = ref->operands;
1243 vn_reference_op_t op;
1245 /* Try to simplify the translated expression if it is
1246 a call to a builtin function with at most two arguments. */
1247 op = &operands[0];
1248 if (op->opcode == CALL_EXPR
1249 && TREE_CODE (op->op0) == ADDR_EXPR
1250 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1251 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1252 && operands.length () >= 2
1253 && operands.length () <= 3)
1255 vn_reference_op_t arg0, arg1 = NULL;
1256 bool anyconst = false;
1257 arg0 = &operands[1];
1258 if (operands.length () > 2)
1259 arg1 = &operands[2];
1260 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1261 || (arg0->opcode == ADDR_EXPR
1262 && is_gimple_min_invariant (arg0->op0)))
1263 anyconst = true;
1264 if (arg1
1265 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1266 || (arg1->opcode == ADDR_EXPR
1267 && is_gimple_min_invariant (arg1->op0))))
1268 anyconst = true;
1269 if (anyconst)
1271 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1272 arg1 ? 2 : 1,
1273 arg0->op0,
1274 arg1 ? arg1->op0 : NULL);
1275 if (folded
1276 && TREE_CODE (folded) == NOP_EXPR)
1277 folded = TREE_OPERAND (folded, 0);
1278 if (folded
1279 && is_gimple_min_invariant (folded))
1280 return folded;
1284 /* Simplify reads from constant strings. */
1285 else if (op->opcode == ARRAY_REF
1286 && TREE_CODE (op->op0) == INTEGER_CST
1287 && integer_zerop (op->op1)
1288 && operands.length () == 2)
1290 vn_reference_op_t arg0;
1291 arg0 = &operands[1];
1292 if (arg0->opcode == STRING_CST
1293 && (TYPE_MODE (op->type)
1294 == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
1295 && GET_MODE_CLASS (TYPE_MODE (op->type)) == MODE_INT
1296 && GET_MODE_SIZE (TYPE_MODE (op->type)) == 1
1297 && tree_int_cst_sgn (op->op0) >= 0
1298 && compare_tree_int (op->op0, TREE_STRING_LENGTH (arg0->op0)) < 0)
1299 return build_int_cst_type (op->type,
1300 (TREE_STRING_POINTER (arg0->op0)
1301 [TREE_INT_CST_LOW (op->op0)]));
1304 return NULL_TREE;
1307 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1308 structures into their value numbers. This is done in-place, and
1309 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1310 whether any operands were valueized. */
1312 static vec<vn_reference_op_s>
1313 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1315 vn_reference_op_t vro;
1316 unsigned int i;
1318 *valueized_anything = false;
1320 FOR_EACH_VEC_ELT (orig, i, vro)
1322 if (vro->opcode == SSA_NAME
1323 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1325 tree tem = SSA_VAL (vro->op0);
1326 if (tem != vro->op0)
1328 *valueized_anything = true;
1329 vro->op0 = tem;
1331 /* If it transforms from an SSA_NAME to a constant, update
1332 the opcode. */
1333 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1334 vro->opcode = TREE_CODE (vro->op0);
1336 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1338 tree tem = SSA_VAL (vro->op1);
1339 if (tem != vro->op1)
1341 *valueized_anything = true;
1342 vro->op1 = tem;
1345 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1347 tree tem = SSA_VAL (vro->op2);
1348 if (tem != vro->op2)
1350 *valueized_anything = true;
1351 vro->op2 = tem;
1354 /* If it transforms from an SSA_NAME to an address, fold with
1355 a preceding indirect reference. */
1356 if (i > 0
1357 && vro->op0
1358 && TREE_CODE (vro->op0) == ADDR_EXPR
1359 && orig[i - 1].opcode == MEM_REF)
1360 vn_reference_fold_indirect (&orig, &i);
1361 else if (i > 0
1362 && vro->opcode == SSA_NAME
1363 && orig[i - 1].opcode == MEM_REF)
1364 vn_reference_maybe_forwprop_address (&orig, &i);
1365 /* If it transforms a non-constant ARRAY_REF into a constant
1366 one, adjust the constant offset. */
1367 else if (vro->opcode == ARRAY_REF
1368 && vro->off == -1
1369 && TREE_CODE (vro->op0) == INTEGER_CST
1370 && TREE_CODE (vro->op1) == INTEGER_CST
1371 && TREE_CODE (vro->op2) == INTEGER_CST)
1373 double_int off = tree_to_double_int (vro->op0);
1374 off += -tree_to_double_int (vro->op1);
1375 off *= tree_to_double_int (vro->op2);
1376 if (off.fits_shwi ())
1377 vro->off = off.low;
1381 return orig;
1384 static vec<vn_reference_op_s>
1385 valueize_refs (vec<vn_reference_op_s> orig)
1387 bool tem;
1388 return valueize_refs_1 (orig, &tem);
1391 static vec<vn_reference_op_s> shared_lookup_references;
1393 /* Create a vector of vn_reference_op_s structures from REF, a
1394 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1395 this function. *VALUEIZED_ANYTHING will specify whether any
1396 operands were valueized. */
1398 static vec<vn_reference_op_s>
1399 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1401 if (!ref)
1402 return vNULL;
1403 shared_lookup_references.truncate (0);
1404 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1405 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1406 valueized_anything);
1407 return shared_lookup_references;
1410 /* Create a vector of vn_reference_op_s structures from CALL, a
1411 call statement. The vector is shared among all callers of
1412 this function. */
1414 static vec<vn_reference_op_s>
1415 valueize_shared_reference_ops_from_call (gimple call)
1417 if (!call)
1418 return vNULL;
1419 shared_lookup_references.truncate (0);
1420 copy_reference_ops_from_call (call, &shared_lookup_references);
1421 shared_lookup_references = valueize_refs (shared_lookup_references);
1422 return shared_lookup_references;
1425 /* Lookup a SCCVN reference operation VR in the current hash table.
1426 Returns the resulting value number if it exists in the hash table,
1427 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1428 vn_reference_t stored in the hashtable if something is found. */
1430 static tree
1431 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1433 vn_reference_s **slot;
1434 hashval_t hash;
1436 hash = vr->hashcode;
1437 slot = current_info->references.find_slot_with_hash (vr, hash, NO_INSERT);
1438 if (!slot && current_info == optimistic_info)
1439 slot = valid_info->references.find_slot_with_hash (vr, hash, NO_INSERT);
1440 if (slot)
1442 if (vnresult)
1443 *vnresult = (vn_reference_t)*slot;
1444 return ((vn_reference_t)*slot)->result;
1447 return NULL_TREE;
1450 static tree *last_vuse_ptr;
1451 static vn_lookup_kind vn_walk_kind;
1452 static vn_lookup_kind default_vn_walk_kind;
1454 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1455 with the current VUSE and performs the expression lookup. */
1457 static void *
1458 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1459 unsigned int cnt, void *vr_)
1461 vn_reference_t vr = (vn_reference_t)vr_;
1462 vn_reference_s **slot;
1463 hashval_t hash;
1465 /* This bounds the stmt walks we perform on reference lookups
1466 to O(1) instead of O(N) where N is the number of dominating
1467 stores. */
1468 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1469 return (void *)-1;
1471 if (last_vuse_ptr)
1472 *last_vuse_ptr = vuse;
1474 /* Fixup vuse and hash. */
1475 if (vr->vuse)
1476 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1477 vr->vuse = SSA_VAL (vuse);
1478 if (vr->vuse)
1479 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1481 hash = vr->hashcode;
1482 slot = current_info->references.find_slot_with_hash (vr, hash, NO_INSERT);
1483 if (!slot && current_info == optimistic_info)
1484 slot = valid_info->references.find_slot_with_hash (vr, hash, NO_INSERT);
1485 if (slot)
1486 return *slot;
1488 return NULL;
1491 /* Lookup an existing or insert a new vn_reference entry into the
1492 value table for the VUSE, SET, TYPE, OPERANDS reference which
1493 has the value VALUE which is either a constant or an SSA name. */
1495 static vn_reference_t
1496 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1497 alias_set_type set,
1498 tree type,
1499 vec<vn_reference_op_s,
1500 va_heap> operands,
1501 tree value)
1503 struct vn_reference_s vr1;
1504 vn_reference_t result;
1505 unsigned value_id;
1506 vr1.vuse = vuse;
1507 vr1.operands = operands;
1508 vr1.type = type;
1509 vr1.set = set;
1510 vr1.hashcode = vn_reference_compute_hash (&vr1);
1511 if (vn_reference_lookup_1 (&vr1, &result))
1512 return result;
1513 if (TREE_CODE (value) == SSA_NAME)
1514 value_id = VN_INFO (value)->value_id;
1515 else
1516 value_id = get_or_alloc_constant_value_id (value);
1517 return vn_reference_insert_pieces (vuse, set, type,
1518 operands.copy (), value, value_id);
1521 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1522 from the statement defining VUSE and if not successful tries to
1523 translate *REFP and VR_ through an aggregate copy at the definition
1524 of VUSE. */
1526 static void *
1527 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
1529 vn_reference_t vr = (vn_reference_t)vr_;
1530 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1531 tree base;
1532 HOST_WIDE_INT offset, maxsize;
1533 static vec<vn_reference_op_s>
1534 lhs_ops = vNULL;
1535 ao_ref lhs_ref;
1536 bool lhs_ref_ok = false;
1538 /* First try to disambiguate after value-replacing in the definitions LHS. */
1539 if (is_gimple_assign (def_stmt))
1541 vec<vn_reference_op_s> tem;
1542 tree lhs = gimple_assign_lhs (def_stmt);
1543 bool valueized_anything = false;
1544 /* Avoid re-allocation overhead. */
1545 lhs_ops.truncate (0);
1546 copy_reference_ops_from_ref (lhs, &lhs_ops);
1547 tem = lhs_ops;
1548 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1549 gcc_assert (lhs_ops == tem);
1550 if (valueized_anything)
1552 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1553 get_alias_set (lhs),
1554 TREE_TYPE (lhs), lhs_ops);
1555 if (lhs_ref_ok
1556 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1557 return NULL;
1559 else
1561 ao_ref_init (&lhs_ref, lhs);
1562 lhs_ref_ok = true;
1566 base = ao_ref_base (ref);
1567 offset = ref->offset;
1568 maxsize = ref->max_size;
1570 /* If we cannot constrain the size of the reference we cannot
1571 test if anything kills it. */
1572 if (maxsize == -1)
1573 return (void *)-1;
1575 /* We can't deduce anything useful from clobbers. */
1576 if (gimple_clobber_p (def_stmt))
1577 return (void *)-1;
1579 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1580 from that definition.
1581 1) Memset. */
1582 if (is_gimple_reg_type (vr->type)
1583 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1584 && integer_zerop (gimple_call_arg (def_stmt, 1))
1585 && host_integerp (gimple_call_arg (def_stmt, 2), 1)
1586 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1588 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1589 tree base2;
1590 HOST_WIDE_INT offset2, size2, maxsize2;
1591 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1592 size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
1593 if ((unsigned HOST_WIDE_INT)size2 / 8
1594 == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
1595 && maxsize2 != -1
1596 && operand_equal_p (base, base2, 0)
1597 && offset2 <= offset
1598 && offset2 + size2 >= offset + maxsize)
1600 tree val = build_zero_cst (vr->type);
1601 return vn_reference_lookup_or_insert_for_pieces
1602 (vuse, vr->set, vr->type, vr->operands, val);
1606 /* 2) Assignment from an empty CONSTRUCTOR. */
1607 else if (is_gimple_reg_type (vr->type)
1608 && gimple_assign_single_p (def_stmt)
1609 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1610 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1612 tree base2;
1613 HOST_WIDE_INT offset2, size2, maxsize2;
1614 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1615 &offset2, &size2, &maxsize2);
1616 if (maxsize2 != -1
1617 && operand_equal_p (base, base2, 0)
1618 && offset2 <= offset
1619 && offset2 + size2 >= offset + maxsize)
1621 tree val = build_zero_cst (vr->type);
1622 return vn_reference_lookup_or_insert_for_pieces
1623 (vuse, vr->set, vr->type, vr->operands, val);
1627 /* 3) Assignment from a constant. We can use folds native encode/interpret
1628 routines to extract the assigned bits. */
1629 else if (vn_walk_kind == VN_WALKREWRITE
1630 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1631 && ref->size == maxsize
1632 && maxsize % BITS_PER_UNIT == 0
1633 && offset % BITS_PER_UNIT == 0
1634 && is_gimple_reg_type (vr->type)
1635 && gimple_assign_single_p (def_stmt)
1636 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
1638 tree base2;
1639 HOST_WIDE_INT offset2, size2, maxsize2;
1640 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1641 &offset2, &size2, &maxsize2);
1642 if (maxsize2 != -1
1643 && maxsize2 == size2
1644 && size2 % BITS_PER_UNIT == 0
1645 && offset2 % BITS_PER_UNIT == 0
1646 && operand_equal_p (base, base2, 0)
1647 && offset2 <= offset
1648 && offset2 + size2 >= offset + maxsize)
1650 /* We support up to 512-bit values (for V8DFmode). */
1651 unsigned char buffer[64];
1652 int len;
1654 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1655 buffer, sizeof (buffer));
1656 if (len > 0)
1658 tree val = native_interpret_expr (vr->type,
1659 buffer
1660 + ((offset - offset2)
1661 / BITS_PER_UNIT),
1662 ref->size / BITS_PER_UNIT);
1663 if (val)
1664 return vn_reference_lookup_or_insert_for_pieces
1665 (vuse, vr->set, vr->type, vr->operands, val);
1670 /* 4) Assignment from an SSA name which definition we may be able
1671 to access pieces from. */
1672 else if (ref->size == maxsize
1673 && is_gimple_reg_type (vr->type)
1674 && gimple_assign_single_p (def_stmt)
1675 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
1677 tree rhs1 = gimple_assign_rhs1 (def_stmt);
1678 gimple def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
1679 if (is_gimple_assign (def_stmt2)
1680 && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
1681 || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
1682 && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1))))
1684 tree base2;
1685 HOST_WIDE_INT offset2, size2, maxsize2, off;
1686 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1687 &offset2, &size2, &maxsize2);
1688 off = offset - offset2;
1689 if (maxsize2 != -1
1690 && maxsize2 == size2
1691 && operand_equal_p (base, base2, 0)
1692 && offset2 <= offset
1693 && offset2 + size2 >= offset + maxsize)
1695 tree val = NULL_TREE;
1696 HOST_WIDE_INT elsz
1697 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1))));
1698 if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR)
1700 if (off == 0)
1701 val = gimple_assign_rhs1 (def_stmt2);
1702 else if (off == elsz)
1703 val = gimple_assign_rhs2 (def_stmt2);
1705 else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR
1706 && off % elsz == 0)
1708 tree ctor = gimple_assign_rhs1 (def_stmt2);
1709 unsigned i = off / elsz;
1710 if (i < CONSTRUCTOR_NELTS (ctor))
1712 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
1713 if (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
1715 if (TREE_CODE (TREE_TYPE (elt->value))
1716 != VECTOR_TYPE)
1717 val = elt->value;
1721 if (val)
1722 return vn_reference_lookup_or_insert_for_pieces
1723 (vuse, vr->set, vr->type, vr->operands, val);
1728 /* 5) For aggregate copies translate the reference through them if
1729 the copy kills ref. */
1730 else if (vn_walk_kind == VN_WALKREWRITE
1731 && gimple_assign_single_p (def_stmt)
1732 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1733 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1734 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1736 tree base2;
1737 HOST_WIDE_INT offset2, size2, maxsize2;
1738 int i, j;
1739 vec<vn_reference_op_s>
1740 rhs = vNULL;
1741 vn_reference_op_t vro;
1742 ao_ref r;
1744 if (!lhs_ref_ok)
1745 return (void *)-1;
1747 /* See if the assignment kills REF. */
1748 base2 = ao_ref_base (&lhs_ref);
1749 offset2 = lhs_ref.offset;
1750 size2 = lhs_ref.size;
1751 maxsize2 = lhs_ref.max_size;
1752 if (maxsize2 == -1
1753 || (base != base2 && !operand_equal_p (base, base2, 0))
1754 || offset2 > offset
1755 || offset2 + size2 < offset + maxsize)
1756 return (void *)-1;
1758 /* Find the common base of ref and the lhs. lhs_ops already
1759 contains valueized operands for the lhs. */
1760 i = vr->operands.length () - 1;
1761 j = lhs_ops.length () - 1;
1762 while (j >= 0 && i >= 0
1763 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
1765 i--;
1766 j--;
1769 /* ??? The innermost op should always be a MEM_REF and we already
1770 checked that the assignment to the lhs kills vr. Thus for
1771 aggregate copies using char[] types the vn_reference_op_eq
1772 may fail when comparing types for compatibility. But we really
1773 don't care here - further lookups with the rewritten operands
1774 will simply fail if we messed up types too badly. */
1775 if (j == 0 && i >= 0
1776 && lhs_ops[0].opcode == MEM_REF
1777 && lhs_ops[0].off != -1
1778 && (lhs_ops[0].off == vr->operands[i].off))
1779 i--, j--;
1781 /* i now points to the first additional op.
1782 ??? LHS may not be completely contained in VR, one or more
1783 VIEW_CONVERT_EXPRs could be in its way. We could at least
1784 try handling outermost VIEW_CONVERT_EXPRs. */
1785 if (j != -1)
1786 return (void *)-1;
1788 /* Now re-write REF to be based on the rhs of the assignment. */
1789 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1790 /* We need to pre-pend vr->operands[0..i] to rhs. */
1791 if (i + 1 + rhs.length () > vr->operands.length ())
1793 vec<vn_reference_op_s> old = vr->operands;
1794 vr->operands.safe_grow (i + 1 + rhs.length ());
1795 if (old == shared_lookup_references
1796 && vr->operands != old)
1797 shared_lookup_references = vNULL;
1799 else
1800 vr->operands.truncate (i + 1 + rhs.length ());
1801 FOR_EACH_VEC_ELT (rhs, j, vro)
1802 vr->operands[i + 1 + j] = *vro;
1803 rhs.release ();
1804 vr->operands = valueize_refs (vr->operands);
1805 vr->hashcode = vn_reference_compute_hash (vr);
1807 /* Adjust *ref from the new operands. */
1808 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1809 return (void *)-1;
1810 /* This can happen with bitfields. */
1811 if (ref->size != r.size)
1812 return (void *)-1;
1813 *ref = r;
1815 /* Do not update last seen VUSE after translating. */
1816 last_vuse_ptr = NULL;
1818 /* Keep looking for the adjusted *REF / VR pair. */
1819 return NULL;
1822 /* 6) For memcpy copies translate the reference through them if
1823 the copy kills ref. */
1824 else if (vn_walk_kind == VN_WALKREWRITE
1825 && is_gimple_reg_type (vr->type)
1826 /* ??? Handle BCOPY as well. */
1827 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
1828 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
1829 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
1830 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
1831 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
1832 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
1833 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
1834 && host_integerp (gimple_call_arg (def_stmt, 2), 1))
1836 tree lhs, rhs;
1837 ao_ref r;
1838 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
1839 vn_reference_op_s op;
1840 HOST_WIDE_INT at;
1843 /* Only handle non-variable, addressable refs. */
1844 if (ref->size != maxsize
1845 || offset % BITS_PER_UNIT != 0
1846 || ref->size % BITS_PER_UNIT != 0)
1847 return (void *)-1;
1849 /* Extract a pointer base and an offset for the destination. */
1850 lhs = gimple_call_arg (def_stmt, 0);
1851 lhs_offset = 0;
1852 if (TREE_CODE (lhs) == SSA_NAME)
1853 lhs = SSA_VAL (lhs);
1854 if (TREE_CODE (lhs) == ADDR_EXPR)
1856 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
1857 &lhs_offset);
1858 if (!tem)
1859 return (void *)-1;
1860 if (TREE_CODE (tem) == MEM_REF
1861 && host_integerp (TREE_OPERAND (tem, 1), 1))
1863 lhs = TREE_OPERAND (tem, 0);
1864 lhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1866 else if (DECL_P (tem))
1867 lhs = build_fold_addr_expr (tem);
1868 else
1869 return (void *)-1;
1871 if (TREE_CODE (lhs) != SSA_NAME
1872 && TREE_CODE (lhs) != ADDR_EXPR)
1873 return (void *)-1;
1875 /* Extract a pointer base and an offset for the source. */
1876 rhs = gimple_call_arg (def_stmt, 1);
1877 rhs_offset = 0;
1878 if (TREE_CODE (rhs) == SSA_NAME)
1879 rhs = SSA_VAL (rhs);
1880 if (TREE_CODE (rhs) == ADDR_EXPR)
1882 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
1883 &rhs_offset);
1884 if (!tem)
1885 return (void *)-1;
1886 if (TREE_CODE (tem) == MEM_REF
1887 && host_integerp (TREE_OPERAND (tem, 1), 1))
1889 rhs = TREE_OPERAND (tem, 0);
1890 rhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1892 else if (DECL_P (tem))
1893 rhs = build_fold_addr_expr (tem);
1894 else
1895 return (void *)-1;
1897 if (TREE_CODE (rhs) != SSA_NAME
1898 && TREE_CODE (rhs) != ADDR_EXPR)
1899 return (void *)-1;
1901 copy_size = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2));
1903 /* The bases of the destination and the references have to agree. */
1904 if ((TREE_CODE (base) != MEM_REF
1905 && !DECL_P (base))
1906 || (TREE_CODE (base) == MEM_REF
1907 && (TREE_OPERAND (base, 0) != lhs
1908 || !host_integerp (TREE_OPERAND (base, 1), 1)))
1909 || (DECL_P (base)
1910 && (TREE_CODE (lhs) != ADDR_EXPR
1911 || TREE_OPERAND (lhs, 0) != base)))
1912 return (void *)-1;
1914 /* And the access has to be contained within the memcpy destination. */
1915 at = offset / BITS_PER_UNIT;
1916 if (TREE_CODE (base) == MEM_REF)
1917 at += TREE_INT_CST_LOW (TREE_OPERAND (base, 1));
1918 if (lhs_offset > at
1919 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
1920 return (void *)-1;
1922 /* Make room for 2 operands in the new reference. */
1923 if (vr->operands.length () < 2)
1925 vec<vn_reference_op_s> old = vr->operands;
1926 vr->operands.safe_grow_cleared (2);
1927 if (old == shared_lookup_references
1928 && vr->operands != old)
1929 shared_lookup_references.create (0);
1931 else
1932 vr->operands.truncate (2);
1934 /* The looked-through reference is a simple MEM_REF. */
1935 memset (&op, 0, sizeof (op));
1936 op.type = vr->type;
1937 op.opcode = MEM_REF;
1938 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
1939 op.off = at - lhs_offset + rhs_offset;
1940 vr->operands[0] = op;
1941 op.type = TREE_TYPE (rhs);
1942 op.opcode = TREE_CODE (rhs);
1943 op.op0 = rhs;
1944 op.off = -1;
1945 vr->operands[1] = op;
1946 vr->hashcode = vn_reference_compute_hash (vr);
1948 /* Adjust *ref from the new operands. */
1949 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1950 return (void *)-1;
1951 /* This can happen with bitfields. */
1952 if (ref->size != r.size)
1953 return (void *)-1;
1954 *ref = r;
1956 /* Do not update last seen VUSE after translating. */
1957 last_vuse_ptr = NULL;
1959 /* Keep looking for the adjusted *REF / VR pair. */
1960 return NULL;
1963 /* Bail out and stop walking. */
1964 return (void *)-1;
1967 /* Lookup a reference operation by it's parts, in the current hash table.
1968 Returns the resulting value number if it exists in the hash table,
1969 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1970 vn_reference_t stored in the hashtable if something is found. */
1972 tree
1973 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
1974 vec<vn_reference_op_s> operands,
1975 vn_reference_t *vnresult, vn_lookup_kind kind)
1977 struct vn_reference_s vr1;
1978 vn_reference_t tmp;
1979 tree cst;
1981 if (!vnresult)
1982 vnresult = &tmp;
1983 *vnresult = NULL;
1985 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1986 shared_lookup_references.truncate (0);
1987 shared_lookup_references.safe_grow (operands.length ());
1988 memcpy (shared_lookup_references.address (),
1989 operands.address (),
1990 sizeof (vn_reference_op_s)
1991 * operands.length ());
1992 vr1.operands = operands = shared_lookup_references
1993 = valueize_refs (shared_lookup_references);
1994 vr1.type = type;
1995 vr1.set = set;
1996 vr1.hashcode = vn_reference_compute_hash (&vr1);
1997 if ((cst = fully_constant_vn_reference_p (&vr1)))
1998 return cst;
2000 vn_reference_lookup_1 (&vr1, vnresult);
2001 if (!*vnresult
2002 && kind != VN_NOWALK
2003 && vr1.vuse)
2005 ao_ref r;
2006 vn_walk_kind = kind;
2007 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2008 *vnresult =
2009 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2010 vn_reference_lookup_2,
2011 vn_reference_lookup_3, &vr1);
2012 if (vr1.operands != operands)
2013 vr1.operands.release ();
2016 if (*vnresult)
2017 return (*vnresult)->result;
2019 return NULL_TREE;
2022 /* Lookup OP in the current hash table, and return the resulting value
2023 number if it exists in the hash table. Return NULL_TREE if it does
2024 not exist in the hash table or if the result field of the structure
2025 was NULL.. VNRESULT will be filled in with the vn_reference_t
2026 stored in the hashtable if one exists. */
2028 tree
2029 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2030 vn_reference_t *vnresult)
2032 vec<vn_reference_op_s> operands;
2033 struct vn_reference_s vr1;
2034 tree cst;
2035 bool valuezied_anything;
2037 if (vnresult)
2038 *vnresult = NULL;
2040 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2041 vr1.operands = operands
2042 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2043 vr1.type = TREE_TYPE (op);
2044 vr1.set = get_alias_set (op);
2045 vr1.hashcode = vn_reference_compute_hash (&vr1);
2046 if ((cst = fully_constant_vn_reference_p (&vr1)))
2047 return cst;
2049 if (kind != VN_NOWALK
2050 && vr1.vuse)
2052 vn_reference_t wvnresult;
2053 ao_ref r;
2054 /* Make sure to use a valueized reference if we valueized anything.
2055 Otherwise preserve the full reference for advanced TBAA. */
2056 if (!valuezied_anything
2057 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2058 vr1.operands))
2059 ao_ref_init (&r, op);
2060 vn_walk_kind = kind;
2061 wvnresult =
2062 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2063 vn_reference_lookup_2,
2064 vn_reference_lookup_3, &vr1);
2065 if (vr1.operands != operands)
2066 vr1.operands.release ();
2067 if (wvnresult)
2069 if (vnresult)
2070 *vnresult = wvnresult;
2071 return wvnresult->result;
2074 return NULL_TREE;
2077 return vn_reference_lookup_1 (&vr1, vnresult);
2081 /* Insert OP into the current hash table with a value number of
2082 RESULT, and return the resulting reference structure we created. */
2084 vn_reference_t
2085 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2087 vn_reference_s **slot;
2088 vn_reference_t vr1;
2089 bool tem;
2091 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
2092 if (TREE_CODE (result) == SSA_NAME)
2093 vr1->value_id = VN_INFO (result)->value_id;
2094 else
2095 vr1->value_id = get_or_alloc_constant_value_id (result);
2096 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2097 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2098 vr1->type = TREE_TYPE (op);
2099 vr1->set = get_alias_set (op);
2100 vr1->hashcode = vn_reference_compute_hash (vr1);
2101 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2102 vr1->result_vdef = vdef;
2104 slot = current_info->references.find_slot_with_hash (vr1, vr1->hashcode,
2105 INSERT);
2107 /* Because we lookup stores using vuses, and value number failures
2108 using the vdefs (see visit_reference_op_store for how and why),
2109 it's possible that on failure we may try to insert an already
2110 inserted store. This is not wrong, there is no ssa name for a
2111 store that we could use as a differentiator anyway. Thus, unlike
2112 the other lookup functions, you cannot gcc_assert (!*slot)
2113 here. */
2115 /* But free the old slot in case of a collision. */
2116 if (*slot)
2117 free_reference (*slot);
2119 *slot = vr1;
2120 return vr1;
2123 /* Insert a reference by it's pieces into the current hash table with
2124 a value number of RESULT. Return the resulting reference
2125 structure we created. */
2127 vn_reference_t
2128 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2129 vec<vn_reference_op_s> operands,
2130 tree result, unsigned int value_id)
2133 vn_reference_s **slot;
2134 vn_reference_t vr1;
2136 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
2137 vr1->value_id = value_id;
2138 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2139 vr1->operands = valueize_refs (operands);
2140 vr1->type = type;
2141 vr1->set = set;
2142 vr1->hashcode = vn_reference_compute_hash (vr1);
2143 if (result && TREE_CODE (result) == SSA_NAME)
2144 result = SSA_VAL (result);
2145 vr1->result = result;
2147 slot = current_info->references.find_slot_with_hash (vr1, vr1->hashcode,
2148 INSERT);
2150 /* At this point we should have all the things inserted that we have
2151 seen before, and we should never try inserting something that
2152 already exists. */
2153 gcc_assert (!*slot);
2154 if (*slot)
2155 free_reference (*slot);
2157 *slot = vr1;
2158 return vr1;
2161 /* Compute and return the hash value for nary operation VBO1. */
2163 hashval_t
2164 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2166 hashval_t hash;
2167 unsigned i;
2169 for (i = 0; i < vno1->length; ++i)
2170 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2171 vno1->op[i] = SSA_VAL (vno1->op[i]);
2173 if (vno1->length == 2
2174 && commutative_tree_code (vno1->opcode)
2175 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2177 tree temp = vno1->op[0];
2178 vno1->op[0] = vno1->op[1];
2179 vno1->op[1] = temp;
2182 hash = iterative_hash_hashval_t (vno1->opcode, 0);
2183 for (i = 0; i < vno1->length; ++i)
2184 hash = iterative_hash_expr (vno1->op[i], hash);
2186 return hash;
2189 /* Compare nary operations VNO1 and VNO2 and return true if they are
2190 equivalent. */
2192 bool
2193 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2195 unsigned i;
2197 if (vno1->hashcode != vno2->hashcode)
2198 return false;
2200 if (vno1->length != vno2->length)
2201 return false;
2203 if (vno1->opcode != vno2->opcode
2204 || !types_compatible_p (vno1->type, vno2->type))
2205 return false;
2207 for (i = 0; i < vno1->length; ++i)
2208 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2209 return false;
2211 return true;
2214 /* Initialize VNO from the pieces provided. */
2216 static void
2217 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2218 enum tree_code code, tree type, tree *ops)
2220 vno->opcode = code;
2221 vno->length = length;
2222 vno->type = type;
2223 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2226 /* Initialize VNO from OP. */
2228 static void
2229 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2231 unsigned i;
2233 vno->opcode = TREE_CODE (op);
2234 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2235 vno->type = TREE_TYPE (op);
2236 for (i = 0; i < vno->length; ++i)
2237 vno->op[i] = TREE_OPERAND (op, i);
2240 /* Return the number of operands for a vn_nary ops structure from STMT. */
2242 static unsigned int
2243 vn_nary_length_from_stmt (gimple stmt)
2245 switch (gimple_assign_rhs_code (stmt))
2247 case REALPART_EXPR:
2248 case IMAGPART_EXPR:
2249 case VIEW_CONVERT_EXPR:
2250 return 1;
2252 case BIT_FIELD_REF:
2253 return 3;
2255 case CONSTRUCTOR:
2256 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2258 default:
2259 return gimple_num_ops (stmt) - 1;
2263 /* Initialize VNO from STMT. */
2265 static void
2266 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
2268 unsigned i;
2270 vno->opcode = gimple_assign_rhs_code (stmt);
2271 vno->type = gimple_expr_type (stmt);
2272 switch (vno->opcode)
2274 case REALPART_EXPR:
2275 case IMAGPART_EXPR:
2276 case VIEW_CONVERT_EXPR:
2277 vno->length = 1;
2278 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2279 break;
2281 case BIT_FIELD_REF:
2282 vno->length = 3;
2283 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2284 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2285 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2286 break;
2288 case CONSTRUCTOR:
2289 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2290 for (i = 0; i < vno->length; ++i)
2291 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2292 break;
2294 default:
2295 gcc_checking_assert (!gimple_assign_single_p (stmt));
2296 vno->length = gimple_num_ops (stmt) - 1;
2297 for (i = 0; i < vno->length; ++i)
2298 vno->op[i] = gimple_op (stmt, i + 1);
2302 /* Compute the hashcode for VNO and look for it in the hash table;
2303 return the resulting value number if it exists in the hash table.
2304 Return NULL_TREE if it does not exist in the hash table or if the
2305 result field of the operation is NULL. VNRESULT will contain the
2306 vn_nary_op_t from the hashtable if it exists. */
2308 static tree
2309 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2311 vn_nary_op_s **slot;
2313 if (vnresult)
2314 *vnresult = NULL;
2316 vno->hashcode = vn_nary_op_compute_hash (vno);
2317 slot = current_info->nary.find_slot_with_hash (vno, vno->hashcode, NO_INSERT);
2318 if (!slot && current_info == optimistic_info)
2319 slot = valid_info->nary.find_slot_with_hash (vno, vno->hashcode, NO_INSERT);
2320 if (!slot)
2321 return NULL_TREE;
2322 if (vnresult)
2323 *vnresult = *slot;
2324 return (*slot)->result;
2327 /* Lookup a n-ary operation by its pieces and return the resulting value
2328 number if it exists in the hash table. Return NULL_TREE if it does
2329 not exist in the hash table or if the result field of the operation
2330 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2331 if it exists. */
2333 tree
2334 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2335 tree type, tree *ops, vn_nary_op_t *vnresult)
2337 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2338 sizeof_vn_nary_op (length));
2339 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2340 return vn_nary_op_lookup_1 (vno1, vnresult);
2343 /* Lookup OP in the current hash table, and return the resulting value
2344 number if it exists in the hash table. Return NULL_TREE if it does
2345 not exist in the hash table or if the result field of the operation
2346 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2347 if it exists. */
2349 tree
2350 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2352 vn_nary_op_t vno1
2353 = XALLOCAVAR (struct vn_nary_op_s,
2354 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2355 init_vn_nary_op_from_op (vno1, op);
2356 return vn_nary_op_lookup_1 (vno1, vnresult);
2359 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2360 value number if it exists in the hash table. Return NULL_TREE if
2361 it does not exist in the hash table. VNRESULT will contain the
2362 vn_nary_op_t from the hashtable if it exists. */
2364 tree
2365 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
2367 vn_nary_op_t vno1
2368 = XALLOCAVAR (struct vn_nary_op_s,
2369 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2370 init_vn_nary_op_from_stmt (vno1, stmt);
2371 return vn_nary_op_lookup_1 (vno1, vnresult);
2374 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2376 static vn_nary_op_t
2377 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2379 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2382 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2383 obstack. */
2385 static vn_nary_op_t
2386 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2388 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2389 &current_info->nary_obstack);
2391 vno1->value_id = value_id;
2392 vno1->length = length;
2393 vno1->result = result;
2395 return vno1;
2398 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2399 VNO->HASHCODE first. */
2401 static vn_nary_op_t
2402 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type table,
2403 bool compute_hash)
2405 vn_nary_op_s **slot;
2407 if (compute_hash)
2408 vno->hashcode = vn_nary_op_compute_hash (vno);
2410 slot = table.find_slot_with_hash (vno, vno->hashcode, INSERT);
2411 gcc_assert (!*slot);
2413 *slot = vno;
2414 return vno;
2417 /* Insert a n-ary operation into the current hash table using it's
2418 pieces. Return the vn_nary_op_t structure we created and put in
2419 the hashtable. */
2421 vn_nary_op_t
2422 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2423 tree type, tree *ops,
2424 tree result, unsigned int value_id)
2426 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2427 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2428 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2431 /* Insert OP into the current hash table with a value number of
2432 RESULT. Return the vn_nary_op_t structure we created and put in
2433 the hashtable. */
2435 vn_nary_op_t
2436 vn_nary_op_insert (tree op, tree result)
2438 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2439 vn_nary_op_t vno1;
2441 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2442 init_vn_nary_op_from_op (vno1, op);
2443 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2446 /* Insert the rhs of STMT into the current hash table with a value number of
2447 RESULT. */
2449 vn_nary_op_t
2450 vn_nary_op_insert_stmt (gimple stmt, tree result)
2452 vn_nary_op_t vno1
2453 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2454 result, VN_INFO (result)->value_id);
2455 init_vn_nary_op_from_stmt (vno1, stmt);
2456 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2459 /* Compute a hashcode for PHI operation VP1 and return it. */
2461 static inline hashval_t
2462 vn_phi_compute_hash (vn_phi_t vp1)
2464 hashval_t result;
2465 int i;
2466 tree phi1op;
2467 tree type;
2469 result = vp1->block->index;
2471 /* If all PHI arguments are constants we need to distinguish
2472 the PHI node via its type. */
2473 type = vp1->type;
2474 result += vn_hash_type (type);
2476 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2478 if (phi1op == VN_TOP)
2479 continue;
2480 result = iterative_hash_expr (phi1op, result);
2483 return result;
2486 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2488 static int
2489 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
2491 if (vp1->hashcode != vp2->hashcode)
2492 return false;
2494 if (vp1->block == vp2->block)
2496 int i;
2497 tree phi1op;
2499 /* If the PHI nodes do not have compatible types
2500 they are not the same. */
2501 if (!types_compatible_p (vp1->type, vp2->type))
2502 return false;
2504 /* Any phi in the same block will have it's arguments in the
2505 same edge order, because of how we store phi nodes. */
2506 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2508 tree phi2op = vp2->phiargs[i];
2509 if (phi1op == VN_TOP || phi2op == VN_TOP)
2510 continue;
2511 if (!expressions_equal_p (phi1op, phi2op))
2512 return false;
2514 return true;
2516 return false;
2519 static vec<tree> shared_lookup_phiargs;
2521 /* Lookup PHI in the current hash table, and return the resulting
2522 value number if it exists in the hash table. Return NULL_TREE if
2523 it does not exist in the hash table. */
2525 static tree
2526 vn_phi_lookup (gimple phi)
2528 vn_phi_s **slot;
2529 struct vn_phi_s vp1;
2530 unsigned i;
2532 shared_lookup_phiargs.truncate (0);
2534 /* Canonicalize the SSA_NAME's to their value number. */
2535 for (i = 0; i < gimple_phi_num_args (phi); i++)
2537 tree def = PHI_ARG_DEF (phi, i);
2538 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2539 shared_lookup_phiargs.safe_push (def);
2541 vp1.type = TREE_TYPE (gimple_phi_result (phi));
2542 vp1.phiargs = shared_lookup_phiargs;
2543 vp1.block = gimple_bb (phi);
2544 vp1.hashcode = vn_phi_compute_hash (&vp1);
2545 slot = current_info->phis.find_slot_with_hash (&vp1, vp1.hashcode, NO_INSERT);
2546 if (!slot && current_info == optimistic_info)
2547 slot = valid_info->phis.find_slot_with_hash (&vp1, vp1.hashcode, NO_INSERT);
2548 if (!slot)
2549 return NULL_TREE;
2550 return (*slot)->result;
2553 /* Insert PHI into the current hash table with a value number of
2554 RESULT. */
2556 static vn_phi_t
2557 vn_phi_insert (gimple phi, tree result)
2559 vn_phi_s **slot;
2560 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
2561 unsigned i;
2562 vec<tree> args = vNULL;
2564 /* Canonicalize the SSA_NAME's to their value number. */
2565 for (i = 0; i < gimple_phi_num_args (phi); i++)
2567 tree def = PHI_ARG_DEF (phi, i);
2568 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2569 args.safe_push (def);
2571 vp1->value_id = VN_INFO (result)->value_id;
2572 vp1->type = TREE_TYPE (gimple_phi_result (phi));
2573 vp1->phiargs = args;
2574 vp1->block = gimple_bb (phi);
2575 vp1->result = result;
2576 vp1->hashcode = vn_phi_compute_hash (vp1);
2578 slot = current_info->phis.find_slot_with_hash (vp1, vp1->hashcode, INSERT);
2580 /* Because we iterate over phi operations more than once, it's
2581 possible the slot might already exist here, hence no assert.*/
2582 *slot = vp1;
2583 return vp1;
2587 /* Print set of components in strongly connected component SCC to OUT. */
2589 static void
2590 print_scc (FILE *out, vec<tree> scc)
2592 tree var;
2593 unsigned int i;
2595 fprintf (out, "SCC consists of:");
2596 FOR_EACH_VEC_ELT (scc, i, var)
2598 fprintf (out, " ");
2599 print_generic_expr (out, var, 0);
2601 fprintf (out, "\n");
2604 /* Set the value number of FROM to TO, return true if it has changed
2605 as a result. */
2607 static inline bool
2608 set_ssa_val_to (tree from, tree to)
2610 tree currval = SSA_VAL (from);
2611 HOST_WIDE_INT toff, coff;
2613 if (from != to)
2615 if (currval == from)
2617 if (dump_file && (dump_flags & TDF_DETAILS))
2619 fprintf (dump_file, "Not changing value number of ");
2620 print_generic_expr (dump_file, from, 0);
2621 fprintf (dump_file, " from VARYING to ");
2622 print_generic_expr (dump_file, to, 0);
2623 fprintf (dump_file, "\n");
2625 return false;
2627 else if (TREE_CODE (to) == SSA_NAME
2628 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2629 to = from;
2632 /* The only thing we allow as value numbers are VN_TOP, ssa_names
2633 and invariants. So assert that here. */
2634 gcc_assert (to != NULL_TREE
2635 && (to == VN_TOP
2636 || TREE_CODE (to) == SSA_NAME
2637 || is_gimple_min_invariant (to)));
2639 if (dump_file && (dump_flags & TDF_DETAILS))
2641 fprintf (dump_file, "Setting value number of ");
2642 print_generic_expr (dump_file, from, 0);
2643 fprintf (dump_file, " to ");
2644 print_generic_expr (dump_file, to, 0);
2647 if (currval != to
2648 && !operand_equal_p (currval, to, 0)
2649 /* ??? For addresses involving volatile objects or types operand_equal_p
2650 does not reliably detect ADDR_EXPRs as equal. We know we are only
2651 getting invariant gimple addresses here, so can use
2652 get_addr_base_and_unit_offset to do this comparison. */
2653 && !(TREE_CODE (currval) == ADDR_EXPR
2654 && TREE_CODE (to) == ADDR_EXPR
2655 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
2656 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
2657 && coff == toff))
2659 VN_INFO (from)->valnum = to;
2660 if (dump_file && (dump_flags & TDF_DETAILS))
2661 fprintf (dump_file, " (changed)\n");
2662 return true;
2664 if (dump_file && (dump_flags & TDF_DETAILS))
2665 fprintf (dump_file, "\n");
2666 return false;
2669 /* Mark as processed all the definitions in the defining stmt of USE, or
2670 the USE itself. */
2672 static void
2673 mark_use_processed (tree use)
2675 ssa_op_iter iter;
2676 def_operand_p defp;
2677 gimple stmt = SSA_NAME_DEF_STMT (use);
2679 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
2681 VN_INFO (use)->use_processed = true;
2682 return;
2685 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2687 tree def = DEF_FROM_PTR (defp);
2689 VN_INFO (def)->use_processed = true;
2693 /* Set all definitions in STMT to value number to themselves.
2694 Return true if a value number changed. */
2696 static bool
2697 defs_to_varying (gimple stmt)
2699 bool changed = false;
2700 ssa_op_iter iter;
2701 def_operand_p defp;
2703 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2705 tree def = DEF_FROM_PTR (defp);
2706 changed |= set_ssa_val_to (def, def);
2708 return changed;
2711 static bool expr_has_constants (tree expr);
2712 static tree valueize_expr (tree expr);
2714 /* Visit a copy between LHS and RHS, return true if the value number
2715 changed. */
2717 static bool
2718 visit_copy (tree lhs, tree rhs)
2720 /* The copy may have a more interesting constant filled expression
2721 (we don't, since we know our RHS is just an SSA name). */
2722 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2723 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2725 /* And finally valueize. */
2726 rhs = SSA_VAL (rhs);
2728 return set_ssa_val_to (lhs, rhs);
2731 /* Visit a nary operator RHS, value number it, and return true if the
2732 value number of LHS has changed as a result. */
2734 static bool
2735 visit_nary_op (tree lhs, gimple stmt)
2737 bool changed = false;
2738 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2740 if (result)
2741 changed = set_ssa_val_to (lhs, result);
2742 else
2744 changed = set_ssa_val_to (lhs, lhs);
2745 vn_nary_op_insert_stmt (stmt, lhs);
2748 return changed;
2751 /* Visit a call STMT storing into LHS. Return true if the value number
2752 of the LHS has changed as a result. */
2754 static bool
2755 visit_reference_op_call (tree lhs, gimple stmt)
2757 bool changed = false;
2758 struct vn_reference_s vr1;
2759 vn_reference_t vnresult = NULL;
2760 tree vuse = gimple_vuse (stmt);
2761 tree vdef = gimple_vdef (stmt);
2763 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
2764 if (lhs && TREE_CODE (lhs) != SSA_NAME)
2765 lhs = NULL_TREE;
2767 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2768 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
2769 vr1.type = gimple_expr_type (stmt);
2770 vr1.set = 0;
2771 vr1.hashcode = vn_reference_compute_hash (&vr1);
2772 vn_reference_lookup_1 (&vr1, &vnresult);
2774 if (vnresult)
2776 if (vnresult->result_vdef)
2777 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
2779 if (!vnresult->result && lhs)
2780 vnresult->result = lhs;
2782 if (vnresult->result && lhs)
2784 changed |= set_ssa_val_to (lhs, vnresult->result);
2786 if (VN_INFO (vnresult->result)->has_constants)
2787 VN_INFO (lhs)->has_constants = true;
2790 else
2792 vn_reference_s **slot;
2793 vn_reference_t vr2;
2794 if (vdef)
2795 changed |= set_ssa_val_to (vdef, vdef);
2796 if (lhs)
2797 changed |= set_ssa_val_to (lhs, lhs);
2798 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
2799 vr2->vuse = vr1.vuse;
2800 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
2801 vr2->type = vr1.type;
2802 vr2->set = vr1.set;
2803 vr2->hashcode = vr1.hashcode;
2804 vr2->result = lhs;
2805 vr2->result_vdef = vdef;
2806 slot = current_info->references.find_slot_with_hash (vr2, vr2->hashcode,
2807 INSERT);
2808 if (*slot)
2809 free_reference (*slot);
2810 *slot = vr2;
2813 return changed;
2816 /* Visit a load from a reference operator RHS, part of STMT, value number it,
2817 and return true if the value number of the LHS has changed as a result. */
2819 static bool
2820 visit_reference_op_load (tree lhs, tree op, gimple stmt)
2822 bool changed = false;
2823 tree last_vuse;
2824 tree result;
2826 last_vuse = gimple_vuse (stmt);
2827 last_vuse_ptr = &last_vuse;
2828 result = vn_reference_lookup (op, gimple_vuse (stmt),
2829 default_vn_walk_kind, NULL);
2830 last_vuse_ptr = NULL;
2832 /* If we have a VCE, try looking up its operand as it might be stored in
2833 a different type. */
2834 if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
2835 result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
2836 default_vn_walk_kind, NULL);
2838 /* We handle type-punning through unions by value-numbering based
2839 on offset and size of the access. Be prepared to handle a
2840 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
2841 if (result
2842 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
2844 /* We will be setting the value number of lhs to the value number
2845 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2846 So first simplify and lookup this expression to see if it
2847 is already available. */
2848 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
2849 if ((CONVERT_EXPR_P (val)
2850 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
2851 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
2853 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
2854 if ((CONVERT_EXPR_P (tem)
2855 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
2856 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
2857 TREE_TYPE (val), tem)))
2858 val = tem;
2860 result = val;
2861 if (!is_gimple_min_invariant (val)
2862 && TREE_CODE (val) != SSA_NAME)
2863 result = vn_nary_op_lookup (val, NULL);
2864 /* If the expression is not yet available, value-number lhs to
2865 a new SSA_NAME we create. */
2866 if (!result)
2868 result = make_temp_ssa_name (TREE_TYPE (lhs), gimple_build_nop (),
2869 "vntemp");
2870 /* Initialize value-number information properly. */
2871 VN_INFO_GET (result)->valnum = result;
2872 VN_INFO (result)->value_id = get_next_value_id ();
2873 VN_INFO (result)->expr = val;
2874 VN_INFO (result)->has_constants = expr_has_constants (val);
2875 VN_INFO (result)->needs_insertion = true;
2876 /* As all "inserted" statements are singleton SCCs, insert
2877 to the valid table. This is strictly needed to
2878 avoid re-generating new value SSA_NAMEs for the same
2879 expression during SCC iteration over and over (the
2880 optimistic table gets cleared after each iteration).
2881 We do not need to insert into the optimistic table, as
2882 lookups there will fall back to the valid table. */
2883 if (current_info == optimistic_info)
2885 current_info = valid_info;
2886 vn_nary_op_insert (val, result);
2887 current_info = optimistic_info;
2889 else
2890 vn_nary_op_insert (val, result);
2891 if (dump_file && (dump_flags & TDF_DETAILS))
2893 fprintf (dump_file, "Inserting name ");
2894 print_generic_expr (dump_file, result, 0);
2895 fprintf (dump_file, " for expression ");
2896 print_generic_expr (dump_file, val, 0);
2897 fprintf (dump_file, "\n");
2902 if (result)
2904 changed = set_ssa_val_to (lhs, result);
2905 if (TREE_CODE (result) == SSA_NAME
2906 && VN_INFO (result)->has_constants)
2908 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2909 VN_INFO (lhs)->has_constants = true;
2912 else
2914 changed = set_ssa_val_to (lhs, lhs);
2915 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
2918 return changed;
2922 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2923 and return true if the value number of the LHS has changed as a result. */
2925 static bool
2926 visit_reference_op_store (tree lhs, tree op, gimple stmt)
2928 bool changed = false;
2929 vn_reference_t vnresult = NULL;
2930 tree result, assign;
2931 bool resultsame = false;
2932 tree vuse = gimple_vuse (stmt);
2933 tree vdef = gimple_vdef (stmt);
2935 /* First we want to lookup using the *vuses* from the store and see
2936 if there the last store to this location with the same address
2937 had the same value.
2939 The vuses represent the memory state before the store. If the
2940 memory state, address, and value of the store is the same as the
2941 last store to this location, then this store will produce the
2942 same memory state as that store.
2944 In this case the vdef versions for this store are value numbered to those
2945 vuse versions, since they represent the same memory state after
2946 this store.
2948 Otherwise, the vdefs for the store are used when inserting into
2949 the table, since the store generates a new memory state. */
2951 result = vn_reference_lookup (lhs, vuse, VN_NOWALK, NULL);
2953 if (result)
2955 if (TREE_CODE (result) == SSA_NAME)
2956 result = SSA_VAL (result);
2957 if (TREE_CODE (op) == SSA_NAME)
2958 op = SSA_VAL (op);
2959 resultsame = expressions_equal_p (result, op);
2962 if (!result || !resultsame)
2964 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
2965 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult);
2966 if (vnresult)
2968 VN_INFO (vdef)->use_processed = true;
2969 return set_ssa_val_to (vdef, vnresult->result_vdef);
2973 if (!result || !resultsame)
2975 if (dump_file && (dump_flags & TDF_DETAILS))
2977 fprintf (dump_file, "No store match\n");
2978 fprintf (dump_file, "Value numbering store ");
2979 print_generic_expr (dump_file, lhs, 0);
2980 fprintf (dump_file, " to ");
2981 print_generic_expr (dump_file, op, 0);
2982 fprintf (dump_file, "\n");
2984 /* Have to set value numbers before insert, since insert is
2985 going to valueize the references in-place. */
2986 if (vdef)
2988 changed |= set_ssa_val_to (vdef, vdef);
2991 /* Do not insert structure copies into the tables. */
2992 if (is_gimple_min_invariant (op)
2993 || is_gimple_reg (op))
2994 vn_reference_insert (lhs, op, vdef, NULL);
2996 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
2997 vn_reference_insert (assign, lhs, vuse, vdef);
2999 else
3001 /* We had a match, so value number the vdef to have the value
3002 number of the vuse it came from. */
3004 if (dump_file && (dump_flags & TDF_DETAILS))
3005 fprintf (dump_file, "Store matched earlier value,"
3006 "value numbering store vdefs to matching vuses.\n");
3008 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3011 return changed;
3014 /* Visit and value number PHI, return true if the value number
3015 changed. */
3017 static bool
3018 visit_phi (gimple phi)
3020 bool changed = false;
3021 tree result;
3022 tree sameval = VN_TOP;
3023 bool allsame = true;
3024 unsigned i;
3026 /* TODO: We could check for this in init_sccvn, and replace this
3027 with a gcc_assert. */
3028 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3029 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3031 /* See if all non-TOP arguments have the same value. TOP is
3032 equivalent to everything, so we can ignore it. */
3033 for (i = 0; i < gimple_phi_num_args (phi); i++)
3035 tree def = PHI_ARG_DEF (phi, i);
3037 if (TREE_CODE (def) == SSA_NAME)
3038 def = SSA_VAL (def);
3039 if (def == VN_TOP)
3040 continue;
3041 if (sameval == VN_TOP)
3043 sameval = def;
3045 else
3047 if (!expressions_equal_p (def, sameval))
3049 allsame = false;
3050 break;
3055 /* If all value numbered to the same value, the phi node has that
3056 value. */
3057 if (allsame)
3059 if (is_gimple_min_invariant (sameval))
3061 VN_INFO (PHI_RESULT (phi))->has_constants = true;
3062 VN_INFO (PHI_RESULT (phi))->expr = sameval;
3064 else
3066 VN_INFO (PHI_RESULT (phi))->has_constants = false;
3067 VN_INFO (PHI_RESULT (phi))->expr = sameval;
3070 if (TREE_CODE (sameval) == SSA_NAME)
3071 return visit_copy (PHI_RESULT (phi), sameval);
3073 return set_ssa_val_to (PHI_RESULT (phi), sameval);
3076 /* Otherwise, see if it is equivalent to a phi node in this block. */
3077 result = vn_phi_lookup (phi);
3078 if (result)
3080 if (TREE_CODE (result) == SSA_NAME)
3081 changed = visit_copy (PHI_RESULT (phi), result);
3082 else
3083 changed = set_ssa_val_to (PHI_RESULT (phi), result);
3085 else
3087 vn_phi_insert (phi, PHI_RESULT (phi));
3088 VN_INFO (PHI_RESULT (phi))->has_constants = false;
3089 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
3090 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3093 return changed;
3096 /* Return true if EXPR contains constants. */
3098 static bool
3099 expr_has_constants (tree expr)
3101 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
3103 case tcc_unary:
3104 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
3106 case tcc_binary:
3107 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
3108 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
3109 /* Constants inside reference ops are rarely interesting, but
3110 it can take a lot of looking to find them. */
3111 case tcc_reference:
3112 case tcc_declaration:
3113 return false;
3114 default:
3115 return is_gimple_min_invariant (expr);
3117 return false;
3120 /* Return true if STMT contains constants. */
3122 static bool
3123 stmt_has_constants (gimple stmt)
3125 tree tem;
3127 if (gimple_code (stmt) != GIMPLE_ASSIGN)
3128 return false;
3130 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
3132 case GIMPLE_TERNARY_RHS:
3133 tem = gimple_assign_rhs3 (stmt);
3134 if (TREE_CODE (tem) == SSA_NAME)
3135 tem = SSA_VAL (tem);
3136 if (is_gimple_min_invariant (tem))
3137 return true;
3138 /* Fallthru. */
3140 case GIMPLE_BINARY_RHS:
3141 tem = gimple_assign_rhs2 (stmt);
3142 if (TREE_CODE (tem) == SSA_NAME)
3143 tem = SSA_VAL (tem);
3144 if (is_gimple_min_invariant (tem))
3145 return true;
3146 /* Fallthru. */
3148 case GIMPLE_SINGLE_RHS:
3149 /* Constants inside reference ops are rarely interesting, but
3150 it can take a lot of looking to find them. */
3151 case GIMPLE_UNARY_RHS:
3152 tem = gimple_assign_rhs1 (stmt);
3153 if (TREE_CODE (tem) == SSA_NAME)
3154 tem = SSA_VAL (tem);
3155 return is_gimple_min_invariant (tem);
3157 default:
3158 gcc_unreachable ();
3160 return false;
3163 /* Replace SSA_NAMES in expr with their value numbers, and return the
3164 result.
3165 This is performed in place. */
3167 static tree
3168 valueize_expr (tree expr)
3170 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
3172 case tcc_binary:
3173 TREE_OPERAND (expr, 1) = vn_valueize (TREE_OPERAND (expr, 1));
3174 /* Fallthru. */
3175 case tcc_unary:
3176 TREE_OPERAND (expr, 0) = vn_valueize (TREE_OPERAND (expr, 0));
3177 break;
3178 default:;
3180 return expr;
3183 /* Simplify the binary expression RHS, and return the result if
3184 simplified. */
3186 static tree
3187 simplify_binary_expression (gimple stmt)
3189 tree result = NULL_TREE;
3190 tree op0 = gimple_assign_rhs1 (stmt);
3191 tree op1 = gimple_assign_rhs2 (stmt);
3192 enum tree_code code = gimple_assign_rhs_code (stmt);
3194 /* This will not catch every single case we could combine, but will
3195 catch those with constants. The goal here is to simultaneously
3196 combine constants between expressions, but avoid infinite
3197 expansion of expressions during simplification. */
3198 if (TREE_CODE (op0) == SSA_NAME)
3200 if (VN_INFO (op0)->has_constants
3201 || TREE_CODE_CLASS (code) == tcc_comparison
3202 || code == COMPLEX_EXPR)
3203 op0 = valueize_expr (vn_get_expr_for (op0));
3204 else
3205 op0 = vn_valueize (op0);
3208 if (TREE_CODE (op1) == SSA_NAME)
3210 if (VN_INFO (op1)->has_constants
3211 || code == COMPLEX_EXPR)
3212 op1 = valueize_expr (vn_get_expr_for (op1));
3213 else
3214 op1 = vn_valueize (op1);
3217 /* Pointer plus constant can be represented as invariant address.
3218 Do so to allow further propatation, see also tree forwprop. */
3219 if (code == POINTER_PLUS_EXPR
3220 && host_integerp (op1, 1)
3221 && TREE_CODE (op0) == ADDR_EXPR
3222 && is_gimple_min_invariant (op0))
3223 return build_invariant_address (TREE_TYPE (op0),
3224 TREE_OPERAND (op0, 0),
3225 TREE_INT_CST_LOW (op1));
3227 /* Avoid folding if nothing changed. */
3228 if (op0 == gimple_assign_rhs1 (stmt)
3229 && op1 == gimple_assign_rhs2 (stmt))
3230 return NULL_TREE;
3232 fold_defer_overflow_warnings ();
3234 result = fold_binary (code, gimple_expr_type (stmt), op0, op1);
3235 if (result)
3236 STRIP_USELESS_TYPE_CONVERSION (result);
3238 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
3239 stmt, 0);
3241 /* Make sure result is not a complex expression consisting
3242 of operators of operators (IE (a + b) + (a + c))
3243 Otherwise, we will end up with unbounded expressions if
3244 fold does anything at all. */
3245 if (result && valid_gimple_rhs_p (result))
3246 return result;
3248 return NULL_TREE;
3251 /* Simplify the unary expression RHS, and return the result if
3252 simplified. */
3254 static tree
3255 simplify_unary_expression (gimple stmt)
3257 tree result = NULL_TREE;
3258 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
3259 enum tree_code code = gimple_assign_rhs_code (stmt);
3261 /* We handle some tcc_reference codes here that are all
3262 GIMPLE_ASSIGN_SINGLE codes. */
3263 if (code == REALPART_EXPR
3264 || code == IMAGPART_EXPR
3265 || code == VIEW_CONVERT_EXPR
3266 || code == BIT_FIELD_REF)
3267 op0 = TREE_OPERAND (op0, 0);
3269 if (TREE_CODE (op0) != SSA_NAME)
3270 return NULL_TREE;
3272 orig_op0 = op0;
3273 if (VN_INFO (op0)->has_constants)
3274 op0 = valueize_expr (vn_get_expr_for (op0));
3275 else if (CONVERT_EXPR_CODE_P (code)
3276 || code == REALPART_EXPR
3277 || code == IMAGPART_EXPR
3278 || code == VIEW_CONVERT_EXPR
3279 || code == BIT_FIELD_REF)
3281 /* We want to do tree-combining on conversion-like expressions.
3282 Make sure we feed only SSA_NAMEs or constants to fold though. */
3283 tree tem = valueize_expr (vn_get_expr_for (op0));
3284 if (UNARY_CLASS_P (tem)
3285 || BINARY_CLASS_P (tem)
3286 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
3287 || TREE_CODE (tem) == SSA_NAME
3288 || TREE_CODE (tem) == CONSTRUCTOR
3289 || is_gimple_min_invariant (tem))
3290 op0 = tem;
3293 /* Avoid folding if nothing changed, but remember the expression. */
3294 if (op0 == orig_op0)
3295 return NULL_TREE;
3297 if (code == BIT_FIELD_REF)
3299 tree rhs = gimple_assign_rhs1 (stmt);
3300 result = fold_ternary (BIT_FIELD_REF, TREE_TYPE (rhs),
3301 op0, TREE_OPERAND (rhs, 1), TREE_OPERAND (rhs, 2));
3303 else
3304 result = fold_unary_ignore_overflow (code, gimple_expr_type (stmt), op0);
3305 if (result)
3307 STRIP_USELESS_TYPE_CONVERSION (result);
3308 if (valid_gimple_rhs_p (result))
3309 return result;
3312 return NULL_TREE;
3315 /* Try to simplify RHS using equivalences and constant folding. */
3317 static tree
3318 try_to_simplify (gimple stmt)
3320 enum tree_code code = gimple_assign_rhs_code (stmt);
3321 tree tem;
3323 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3324 in this case, there is no point in doing extra work. */
3325 if (code == SSA_NAME)
3326 return NULL_TREE;
3328 /* First try constant folding based on our current lattice. */
3329 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize);
3330 if (tem
3331 && (TREE_CODE (tem) == SSA_NAME
3332 || is_gimple_min_invariant (tem)))
3333 return tem;
3335 /* If that didn't work try combining multiple statements. */
3336 switch (TREE_CODE_CLASS (code))
3338 case tcc_reference:
3339 /* Fallthrough for some unary codes that can operate on registers. */
3340 if (!(code == REALPART_EXPR
3341 || code == IMAGPART_EXPR
3342 || code == VIEW_CONVERT_EXPR
3343 || code == BIT_FIELD_REF))
3344 break;
3345 /* We could do a little more with unary ops, if they expand
3346 into binary ops, but it's debatable whether it is worth it. */
3347 case tcc_unary:
3348 return simplify_unary_expression (stmt);
3350 case tcc_comparison:
3351 case tcc_binary:
3352 return simplify_binary_expression (stmt);
3354 default:
3355 break;
3358 return NULL_TREE;
3361 /* Visit and value number USE, return true if the value number
3362 changed. */
3364 static bool
3365 visit_use (tree use)
3367 bool changed = false;
3368 gimple stmt = SSA_NAME_DEF_STMT (use);
3370 mark_use_processed (use);
3372 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3373 if (dump_file && (dump_flags & TDF_DETAILS)
3374 && !SSA_NAME_IS_DEFAULT_DEF (use))
3376 fprintf (dump_file, "Value numbering ");
3377 print_generic_expr (dump_file, use, 0);
3378 fprintf (dump_file, " stmt = ");
3379 print_gimple_stmt (dump_file, stmt, 0, 0);
3382 /* Handle uninitialized uses. */
3383 if (SSA_NAME_IS_DEFAULT_DEF (use))
3384 changed = set_ssa_val_to (use, use);
3385 else
3387 if (gimple_code (stmt) == GIMPLE_PHI)
3388 changed = visit_phi (stmt);
3389 else if (gimple_has_volatile_ops (stmt))
3390 changed = defs_to_varying (stmt);
3391 else if (is_gimple_assign (stmt))
3393 enum tree_code code = gimple_assign_rhs_code (stmt);
3394 tree lhs = gimple_assign_lhs (stmt);
3395 tree rhs1 = gimple_assign_rhs1 (stmt);
3396 tree simplified;
3398 /* Shortcut for copies. Simplifying copies is pointless,
3399 since we copy the expression and value they represent. */
3400 if (code == SSA_NAME
3401 && TREE_CODE (lhs) == SSA_NAME)
3403 changed = visit_copy (lhs, rhs1);
3404 goto done;
3406 simplified = try_to_simplify (stmt);
3407 if (simplified)
3409 if (dump_file && (dump_flags & TDF_DETAILS))
3411 fprintf (dump_file, "RHS ");
3412 print_gimple_expr (dump_file, stmt, 0, 0);
3413 fprintf (dump_file, " simplified to ");
3414 print_generic_expr (dump_file, simplified, 0);
3415 if (TREE_CODE (lhs) == SSA_NAME)
3416 fprintf (dump_file, " has constants %d\n",
3417 expr_has_constants (simplified));
3418 else
3419 fprintf (dump_file, "\n");
3422 /* Setting value numbers to constants will occasionally
3423 screw up phi congruence because constants are not
3424 uniquely associated with a single ssa name that can be
3425 looked up. */
3426 if (simplified
3427 && is_gimple_min_invariant (simplified)
3428 && TREE_CODE (lhs) == SSA_NAME)
3430 VN_INFO (lhs)->expr = simplified;
3431 VN_INFO (lhs)->has_constants = true;
3432 changed = set_ssa_val_to (lhs, simplified);
3433 goto done;
3435 else if (simplified
3436 && TREE_CODE (simplified) == SSA_NAME
3437 && TREE_CODE (lhs) == SSA_NAME)
3439 changed = visit_copy (lhs, simplified);
3440 goto done;
3442 else if (simplified)
3444 if (TREE_CODE (lhs) == SSA_NAME)
3446 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
3447 /* We have to unshare the expression or else
3448 valuizing may change the IL stream. */
3449 VN_INFO (lhs)->expr = unshare_expr (simplified);
3452 else if (stmt_has_constants (stmt)
3453 && TREE_CODE (lhs) == SSA_NAME)
3454 VN_INFO (lhs)->has_constants = true;
3455 else if (TREE_CODE (lhs) == SSA_NAME)
3457 /* We reset expr and constantness here because we may
3458 have been value numbering optimistically, and
3459 iterating. They may become non-constant in this case,
3460 even if they were optimistically constant. */
3462 VN_INFO (lhs)->has_constants = false;
3463 VN_INFO (lhs)->expr = NULL_TREE;
3466 if ((TREE_CODE (lhs) == SSA_NAME
3467 /* We can substitute SSA_NAMEs that are live over
3468 abnormal edges with their constant value. */
3469 && !(gimple_assign_copy_p (stmt)
3470 && is_gimple_min_invariant (rhs1))
3471 && !(simplified
3472 && is_gimple_min_invariant (simplified))
3473 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3474 /* Stores or copies from SSA_NAMEs that are live over
3475 abnormal edges are a problem. */
3476 || (code == SSA_NAME
3477 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3478 changed = defs_to_varying (stmt);
3479 else if (REFERENCE_CLASS_P (lhs)
3480 || DECL_P (lhs))
3481 changed = visit_reference_op_store (lhs, rhs1, stmt);
3482 else if (TREE_CODE (lhs) == SSA_NAME)
3484 if ((gimple_assign_copy_p (stmt)
3485 && is_gimple_min_invariant (rhs1))
3486 || (simplified
3487 && is_gimple_min_invariant (simplified)))
3489 VN_INFO (lhs)->has_constants = true;
3490 if (simplified)
3491 changed = set_ssa_val_to (lhs, simplified);
3492 else
3493 changed = set_ssa_val_to (lhs, rhs1);
3495 else
3497 /* First try to lookup the simplified expression. */
3498 if (simplified)
3500 enum gimple_rhs_class rhs_class;
3503 rhs_class = get_gimple_rhs_class (TREE_CODE (simplified));
3504 if ((rhs_class == GIMPLE_UNARY_RHS
3505 || rhs_class == GIMPLE_BINARY_RHS
3506 || rhs_class == GIMPLE_TERNARY_RHS)
3507 && valid_gimple_rhs_p (simplified))
3509 tree result = vn_nary_op_lookup (simplified, NULL);
3510 if (result)
3512 changed = set_ssa_val_to (lhs, result);
3513 goto done;
3518 /* Otherwise visit the original statement. */
3519 switch (vn_get_stmt_kind (stmt))
3521 case VN_NARY:
3522 changed = visit_nary_op (lhs, stmt);
3523 break;
3524 case VN_REFERENCE:
3525 changed = visit_reference_op_load (lhs, rhs1, stmt);
3526 break;
3527 default:
3528 changed = defs_to_varying (stmt);
3529 break;
3533 else
3534 changed = defs_to_varying (stmt);
3536 else if (is_gimple_call (stmt))
3538 tree lhs = gimple_call_lhs (stmt);
3540 /* ??? We could try to simplify calls. */
3542 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3544 if (stmt_has_constants (stmt))
3545 VN_INFO (lhs)->has_constants = true;
3546 else
3548 /* We reset expr and constantness here because we may
3549 have been value numbering optimistically, and
3550 iterating. They may become non-constant in this case,
3551 even if they were optimistically constant. */
3552 VN_INFO (lhs)->has_constants = false;
3553 VN_INFO (lhs)->expr = NULL_TREE;
3556 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3558 changed = defs_to_varying (stmt);
3559 goto done;
3563 if (!gimple_call_internal_p (stmt)
3564 && (/* Calls to the same function with the same vuse
3565 and the same operands do not necessarily return the same
3566 value, unless they're pure or const. */
3567 gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST)
3568 /* If calls have a vdef, subsequent calls won't have
3569 the same incoming vuse. So, if 2 calls with vdef have the
3570 same vuse, we know they're not subsequent.
3571 We can value number 2 calls to the same function with the
3572 same vuse and the same operands which are not subsequent
3573 the same, because there is no code in the program that can
3574 compare the 2 values... */
3575 || (gimple_vdef (stmt)
3576 /* ... unless the call returns a pointer which does
3577 not alias with anything else. In which case the
3578 information that the values are distinct are encoded
3579 in the IL. */
3580 && !(gimple_call_return_flags (stmt) & ERF_NOALIAS))))
3581 changed = visit_reference_op_call (lhs, stmt);
3582 else
3583 changed = defs_to_varying (stmt);
3585 else
3586 changed = defs_to_varying (stmt);
3588 done:
3589 return changed;
3592 /* Compare two operands by reverse postorder index */
3594 static int
3595 compare_ops (const void *pa, const void *pb)
3597 const tree opa = *((const tree *)pa);
3598 const tree opb = *((const tree *)pb);
3599 gimple opstmta = SSA_NAME_DEF_STMT (opa);
3600 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
3601 basic_block bba;
3602 basic_block bbb;
3604 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3605 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3606 else if (gimple_nop_p (opstmta))
3607 return -1;
3608 else if (gimple_nop_p (opstmtb))
3609 return 1;
3611 bba = gimple_bb (opstmta);
3612 bbb = gimple_bb (opstmtb);
3614 if (!bba && !bbb)
3615 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3616 else if (!bba)
3617 return -1;
3618 else if (!bbb)
3619 return 1;
3621 if (bba == bbb)
3623 if (gimple_code (opstmta) == GIMPLE_PHI
3624 && gimple_code (opstmtb) == GIMPLE_PHI)
3625 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3626 else if (gimple_code (opstmta) == GIMPLE_PHI)
3627 return -1;
3628 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3629 return 1;
3630 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3631 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3632 else
3633 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3635 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3638 /* Sort an array containing members of a strongly connected component
3639 SCC so that the members are ordered by RPO number.
3640 This means that when the sort is complete, iterating through the
3641 array will give you the members in RPO order. */
3643 static void
3644 sort_scc (vec<tree> scc)
3646 scc.qsort (compare_ops);
3649 /* Insert the no longer used nary ONARY to the hash INFO. */
3651 static void
3652 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3654 size_t size = sizeof_vn_nary_op (onary->length);
3655 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3656 &info->nary_obstack);
3657 memcpy (nary, onary, size);
3658 vn_nary_op_insert_into (nary, info->nary, false);
3661 /* Insert the no longer used phi OPHI to the hash INFO. */
3663 static void
3664 copy_phi (vn_phi_t ophi, vn_tables_t info)
3666 vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
3667 vn_phi_s **slot;
3668 memcpy (phi, ophi, sizeof (*phi));
3669 ophi->phiargs.create (0);
3670 slot = info->phis.find_slot_with_hash (phi, phi->hashcode, INSERT);
3671 gcc_assert (!*slot);
3672 *slot = phi;
3675 /* Insert the no longer used reference OREF to the hash INFO. */
3677 static void
3678 copy_reference (vn_reference_t oref, vn_tables_t info)
3680 vn_reference_t ref;
3681 vn_reference_s **slot;
3682 ref = (vn_reference_t) pool_alloc (info->references_pool);
3683 memcpy (ref, oref, sizeof (*ref));
3684 oref->operands.create (0);
3685 slot = info->references.find_slot_with_hash (ref, ref->hashcode, INSERT);
3686 if (*slot)
3687 free_reference (*slot);
3688 *slot = ref;
3691 /* Process a strongly connected component in the SSA graph. */
3693 static void
3694 process_scc (vec<tree> scc)
3696 tree var;
3697 unsigned int i;
3698 unsigned int iterations = 0;
3699 bool changed = true;
3700 vn_nary_op_iterator_type hin;
3701 vn_phi_iterator_type hip;
3702 vn_reference_iterator_type hir;
3703 vn_nary_op_t nary;
3704 vn_phi_t phi;
3705 vn_reference_t ref;
3707 /* If the SCC has a single member, just visit it. */
3708 if (scc.length () == 1)
3710 tree use = scc[0];
3711 if (VN_INFO (use)->use_processed)
3712 return;
3713 /* We need to make sure it doesn't form a cycle itself, which can
3714 happen for self-referential PHI nodes. In that case we would
3715 end up inserting an expression with VN_TOP operands into the
3716 valid table which makes us derive bogus equivalences later.
3717 The cheapest way to check this is to assume it for all PHI nodes. */
3718 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3719 /* Fallthru to iteration. */ ;
3720 else
3722 visit_use (use);
3723 return;
3727 /* Iterate over the SCC with the optimistic table until it stops
3728 changing. */
3729 current_info = optimistic_info;
3730 while (changed)
3732 changed = false;
3733 iterations++;
3734 if (dump_file && (dump_flags & TDF_DETAILS))
3735 fprintf (dump_file, "Starting iteration %d\n", iterations);
3736 /* As we are value-numbering optimistically we have to
3737 clear the expression tables and the simplified expressions
3738 in each iteration until we converge. */
3739 optimistic_info->nary.empty ();
3740 optimistic_info->phis.empty ();
3741 optimistic_info->references.empty ();
3742 obstack_free (&optimistic_info->nary_obstack, NULL);
3743 gcc_obstack_init (&optimistic_info->nary_obstack);
3744 empty_alloc_pool (optimistic_info->phis_pool);
3745 empty_alloc_pool (optimistic_info->references_pool);
3746 FOR_EACH_VEC_ELT (scc, i, var)
3747 VN_INFO (var)->expr = NULL_TREE;
3748 FOR_EACH_VEC_ELT (scc, i, var)
3749 changed |= visit_use (var);
3752 statistics_histogram_event (cfun, "SCC iterations", iterations);
3754 /* Finally, copy the contents of the no longer used optimistic
3755 table to the valid table. */
3756 FOR_EACH_HASH_TABLE_ELEMENT (optimistic_info->nary, nary, vn_nary_op_t, hin)
3757 copy_nary (nary, valid_info);
3758 FOR_EACH_HASH_TABLE_ELEMENT (optimistic_info->phis, phi, vn_phi_t, hip)
3759 copy_phi (phi, valid_info);
3760 FOR_EACH_HASH_TABLE_ELEMENT (optimistic_info->references,
3761 ref, vn_reference_t, hir)
3762 copy_reference (ref, valid_info);
3764 current_info = valid_info;
3768 /* Pop the components of the found SCC for NAME off the SCC stack
3769 and process them. Returns true if all went well, false if
3770 we run into resource limits. */
3772 static bool
3773 extract_and_process_scc_for_name (tree name)
3775 vec<tree> scc = vNULL;
3776 tree x;
3778 /* Found an SCC, pop the components off the SCC stack and
3779 process them. */
3782 x = sccstack.pop ();
3784 VN_INFO (x)->on_sccstack = false;
3785 scc.safe_push (x);
3786 } while (x != name);
3788 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
3789 if (scc.length ()
3790 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
3792 if (dump_file)
3793 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
3794 "SCC size %u exceeding %u\n", scc.length (),
3795 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
3797 scc.release ();
3798 return false;
3801 if (scc.length () > 1)
3802 sort_scc (scc);
3804 if (dump_file && (dump_flags & TDF_DETAILS))
3805 print_scc (dump_file, scc);
3807 process_scc (scc);
3809 scc.release ();
3811 return true;
3814 /* Depth first search on NAME to discover and process SCC's in the SSA
3815 graph.
3816 Execution of this algorithm relies on the fact that the SCC's are
3817 popped off the stack in topological order.
3818 Returns true if successful, false if we stopped processing SCC's due
3819 to resource constraints. */
3821 static bool
3822 DFS (tree name)
3824 vec<ssa_op_iter> itervec = vNULL;
3825 vec<tree> namevec = vNULL;
3826 use_operand_p usep = NULL;
3827 gimple defstmt;
3828 tree use;
3829 ssa_op_iter iter;
3831 start_over:
3832 /* SCC info */
3833 VN_INFO (name)->dfsnum = next_dfs_num++;
3834 VN_INFO (name)->visited = true;
3835 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
3837 sccstack.safe_push (name);
3838 VN_INFO (name)->on_sccstack = true;
3839 defstmt = SSA_NAME_DEF_STMT (name);
3841 /* Recursively DFS on our operands, looking for SCC's. */
3842 if (!gimple_nop_p (defstmt))
3844 /* Push a new iterator. */
3845 if (gimple_code (defstmt) == GIMPLE_PHI)
3846 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
3847 else
3848 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
3850 else
3851 clear_and_done_ssa_iter (&iter);
3853 while (1)
3855 /* If we are done processing uses of a name, go up the stack
3856 of iterators and process SCCs as we found them. */
3857 if (op_iter_done (&iter))
3859 /* See if we found an SCC. */
3860 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
3861 if (!extract_and_process_scc_for_name (name))
3863 namevec.release ();
3864 itervec.release ();
3865 return false;
3868 /* Check if we are done. */
3869 if (namevec.is_empty ())
3871 namevec.release ();
3872 itervec.release ();
3873 return true;
3876 /* Restore the last use walker and continue walking there. */
3877 use = name;
3878 name = namevec.pop ();
3879 memcpy (&iter, &itervec.last (),
3880 sizeof (ssa_op_iter));
3881 itervec.pop ();
3882 goto continue_walking;
3885 use = USE_FROM_PTR (usep);
3887 /* Since we handle phi nodes, we will sometimes get
3888 invariants in the use expression. */
3889 if (TREE_CODE (use) == SSA_NAME)
3891 if (! (VN_INFO (use)->visited))
3893 /* Recurse by pushing the current use walking state on
3894 the stack and starting over. */
3895 itervec.safe_push (iter);
3896 namevec.safe_push (name);
3897 name = use;
3898 goto start_over;
3900 continue_walking:
3901 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3902 VN_INFO (use)->low);
3904 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3905 && VN_INFO (use)->on_sccstack)
3907 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3908 VN_INFO (name)->low);
3912 usep = op_iter_next_use (&iter);
3916 /* Allocate a value number table. */
3918 static void
3919 allocate_vn_table (vn_tables_t table)
3921 table->phis.create (23);
3922 table->nary.create (23);
3923 table->references.create (23);
3925 gcc_obstack_init (&table->nary_obstack);
3926 table->phis_pool = create_alloc_pool ("VN phis",
3927 sizeof (struct vn_phi_s),
3928 30);
3929 table->references_pool = create_alloc_pool ("VN references",
3930 sizeof (struct vn_reference_s),
3931 30);
3934 /* Free a value number table. */
3936 static void
3937 free_vn_table (vn_tables_t table)
3939 table->phis.dispose ();
3940 table->nary.dispose ();
3941 table->references.dispose ();
3942 obstack_free (&table->nary_obstack, NULL);
3943 free_alloc_pool (table->phis_pool);
3944 free_alloc_pool (table->references_pool);
3947 static void
3948 init_scc_vn (void)
3950 size_t i;
3951 int j;
3952 int *rpo_numbers_temp;
3954 calculate_dominance_info (CDI_DOMINATORS);
3955 sccstack.create (0);
3956 constant_to_value_id.create (23);
3958 constant_value_ids = BITMAP_ALLOC (NULL);
3960 next_dfs_num = 1;
3961 next_value_id = 1;
3963 vn_ssa_aux_table.create (num_ssa_names + 1);
3964 /* VEC_alloc doesn't actually grow it to the right size, it just
3965 preallocates the space to do so. */
3966 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
3967 gcc_obstack_init (&vn_ssa_aux_obstack);
3969 shared_lookup_phiargs.create (0);
3970 shared_lookup_references.create (0);
3971 rpo_numbers = XNEWVEC (int, last_basic_block);
3972 rpo_numbers_temp = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
3973 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
3975 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3976 the i'th block in RPO order is bb. We want to map bb's to RPO
3977 numbers, so we need to rearrange this array. */
3978 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
3979 rpo_numbers[rpo_numbers_temp[j]] = j;
3981 XDELETE (rpo_numbers_temp);
3983 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
3985 /* Create the VN_INFO structures, and initialize value numbers to
3986 TOP. */
3987 for (i = 0; i < num_ssa_names; i++)
3989 tree name = ssa_name (i);
3990 if (name)
3992 VN_INFO_GET (name)->valnum = VN_TOP;
3993 VN_INFO (name)->expr = NULL_TREE;
3994 VN_INFO (name)->value_id = 0;
3998 renumber_gimple_stmt_uids ();
4000 /* Create the valid and optimistic value numbering tables. */
4001 valid_info = XCNEW (struct vn_tables_s);
4002 allocate_vn_table (valid_info);
4003 optimistic_info = XCNEW (struct vn_tables_s);
4004 allocate_vn_table (optimistic_info);
4007 void
4008 free_scc_vn (void)
4010 size_t i;
4012 constant_to_value_id.dispose ();
4013 BITMAP_FREE (constant_value_ids);
4014 shared_lookup_phiargs.release ();
4015 shared_lookup_references.release ();
4016 XDELETEVEC (rpo_numbers);
4018 for (i = 0; i < num_ssa_names; i++)
4020 tree name = ssa_name (i);
4021 if (name
4022 && VN_INFO (name)->needs_insertion)
4023 release_ssa_name (name);
4025 obstack_free (&vn_ssa_aux_obstack, NULL);
4026 vn_ssa_aux_table.release ();
4028 sccstack.release ();
4029 free_vn_table (valid_info);
4030 XDELETE (valid_info);
4031 free_vn_table (optimistic_info);
4032 XDELETE (optimistic_info);
4035 /* Set *ID according to RESULT. */
4037 static void
4038 set_value_id_for_result (tree result, unsigned int *id)
4040 if (result && TREE_CODE (result) == SSA_NAME)
4041 *id = VN_INFO (result)->value_id;
4042 else if (result && is_gimple_min_invariant (result))
4043 *id = get_or_alloc_constant_value_id (result);
4044 else
4045 *id = get_next_value_id ();
4048 /* Set the value ids in the valid hash tables. */
4050 static void
4051 set_hashtable_value_ids (void)
4053 vn_nary_op_iterator_type hin;
4054 vn_phi_iterator_type hip;
4055 vn_reference_iterator_type hir;
4056 vn_nary_op_t vno;
4057 vn_reference_t vr;
4058 vn_phi_t vp;
4060 /* Now set the value ids of the things we had put in the hash
4061 table. */
4063 FOR_EACH_HASH_TABLE_ELEMENT (valid_info->nary, vno, vn_nary_op_t, hin)
4064 set_value_id_for_result (vno->result, &vno->value_id);
4066 FOR_EACH_HASH_TABLE_ELEMENT (valid_info->phis, vp, vn_phi_t, hip)
4067 set_value_id_for_result (vp->result, &vp->value_id);
4069 FOR_EACH_HASH_TABLE_ELEMENT (valid_info->references, vr, vn_reference_t, hir)
4070 set_value_id_for_result (vr->result, &vr->value_id);
4073 /* Do SCCVN. Returns true if it finished, false if we bailed out
4074 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
4075 how we use the alias oracle walking during the VN process. */
4077 bool
4078 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
4080 size_t i;
4081 tree param;
4083 default_vn_walk_kind = default_vn_walk_kind_;
4085 init_scc_vn ();
4086 current_info = valid_info;
4088 for (param = DECL_ARGUMENTS (current_function_decl);
4089 param;
4090 param = DECL_CHAIN (param))
4092 tree def = ssa_default_def (cfun, param);
4093 if (def)
4094 VN_INFO (def)->valnum = def;
4097 for (i = 1; i < num_ssa_names; ++i)
4099 tree name = ssa_name (i);
4100 if (name
4101 && VN_INFO (name)->visited == false
4102 && !has_zero_uses (name))
4103 if (!DFS (name))
4105 free_scc_vn ();
4106 return false;
4110 /* Initialize the value ids. */
4112 for (i = 1; i < num_ssa_names; ++i)
4114 tree name = ssa_name (i);
4115 vn_ssa_aux_t info;
4116 if (!name)
4117 continue;
4118 info = VN_INFO (name);
4119 if (info->valnum == name
4120 || info->valnum == VN_TOP)
4121 info->value_id = get_next_value_id ();
4122 else if (is_gimple_min_invariant (info->valnum))
4123 info->value_id = get_or_alloc_constant_value_id (info->valnum);
4126 /* Propagate. */
4127 for (i = 1; i < num_ssa_names; ++i)
4129 tree name = ssa_name (i);
4130 vn_ssa_aux_t info;
4131 if (!name)
4132 continue;
4133 info = VN_INFO (name);
4134 if (TREE_CODE (info->valnum) == SSA_NAME
4135 && info->valnum != name
4136 && info->value_id != VN_INFO (info->valnum)->value_id)
4137 info->value_id = VN_INFO (info->valnum)->value_id;
4140 set_hashtable_value_ids ();
4142 if (dump_file && (dump_flags & TDF_DETAILS))
4144 fprintf (dump_file, "Value numbers:\n");
4145 for (i = 0; i < num_ssa_names; i++)
4147 tree name = ssa_name (i);
4148 if (name
4149 && VN_INFO (name)->visited
4150 && SSA_VAL (name) != name)
4152 print_generic_expr (dump_file, name, 0);
4153 fprintf (dump_file, " = ");
4154 print_generic_expr (dump_file, SSA_VAL (name), 0);
4155 fprintf (dump_file, "\n");
4160 return true;
4163 /* Return the maximum value id we have ever seen. */
4165 unsigned int
4166 get_max_value_id (void)
4168 return next_value_id;
4171 /* Return the next unique value id. */
4173 unsigned int
4174 get_next_value_id (void)
4176 return next_value_id++;
4180 /* Compare two expressions E1 and E2 and return true if they are equal. */
4182 bool
4183 expressions_equal_p (tree e1, tree e2)
4185 /* The obvious case. */
4186 if (e1 == e2)
4187 return true;
4189 /* If only one of them is null, they cannot be equal. */
4190 if (!e1 || !e2)
4191 return false;
4193 /* Now perform the actual comparison. */
4194 if (TREE_CODE (e1) == TREE_CODE (e2)
4195 && operand_equal_p (e1, e2, OEP_PURE_SAME))
4196 return true;
4198 return false;
4202 /* Return true if the nary operation NARY may trap. This is a copy
4203 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4205 bool
4206 vn_nary_may_trap (vn_nary_op_t nary)
4208 tree type;
4209 tree rhs2 = NULL_TREE;
4210 bool honor_nans = false;
4211 bool honor_snans = false;
4212 bool fp_operation = false;
4213 bool honor_trapv = false;
4214 bool handled, ret;
4215 unsigned i;
4217 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4218 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4219 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4221 type = nary->type;
4222 fp_operation = FLOAT_TYPE_P (type);
4223 if (fp_operation)
4225 honor_nans = flag_trapping_math && !flag_finite_math_only;
4226 honor_snans = flag_signaling_nans != 0;
4228 else if (INTEGRAL_TYPE_P (type)
4229 && TYPE_OVERFLOW_TRAPS (type))
4230 honor_trapv = true;
4232 if (nary->length >= 2)
4233 rhs2 = nary->op[1];
4234 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4235 honor_trapv,
4236 honor_nans, honor_snans, rhs2,
4237 &handled);
4238 if (handled
4239 && ret)
4240 return true;
4242 for (i = 0; i < nary->length; ++i)
4243 if (tree_could_trap_p (nary->op[i]))
4244 return true;
4246 return false;