PR middle-end/59175
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blob786cfaa0988c3e072e6e9961129c08e95f30953a
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2013 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "basic-block.h"
27 #include "gimple-pretty-print.h"
28 #include "tree-inline.h"
29 #include "gimple.h"
30 #include "gimplify.h"
31 #include "gimple-ssa.h"
32 #include "tree-phinodes.h"
33 #include "ssa-iterators.h"
34 #include "tree-ssanames.h"
35 #include "tree-dfa.h"
36 #include "tree-ssa.h"
37 #include "dumpfile.h"
38 #include "hash-table.h"
39 #include "alloc-pool.h"
40 #include "flags.h"
41 #include "cfgloop.h"
42 #include "params.h"
43 #include "tree-ssa-propagate.h"
44 #include "tree-ssa-sccvn.h"
46 /* This algorithm is based on the SCC algorithm presented by Keith
47 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
48 (http://citeseer.ist.psu.edu/41805.html). In
49 straight line code, it is equivalent to a regular hash based value
50 numbering that is performed in reverse postorder.
52 For code with cycles, there are two alternatives, both of which
53 require keeping the hashtables separate from the actual list of
54 value numbers for SSA names.
56 1. Iterate value numbering in an RPO walk of the blocks, removing
57 all the entries from the hashtable after each iteration (but
58 keeping the SSA name->value number mapping between iterations).
59 Iterate until it does not change.
61 2. Perform value numbering as part of an SCC walk on the SSA graph,
62 iterating only the cycles in the SSA graph until they do not change
63 (using a separate, optimistic hashtable for value numbering the SCC
64 operands).
66 The second is not just faster in practice (because most SSA graph
67 cycles do not involve all the variables in the graph), it also has
68 some nice properties.
70 One of these nice properties is that when we pop an SCC off the
71 stack, we are guaranteed to have processed all the operands coming from
72 *outside of that SCC*, so we do not need to do anything special to
73 ensure they have value numbers.
75 Another nice property is that the SCC walk is done as part of a DFS
76 of the SSA graph, which makes it easy to perform combining and
77 simplifying operations at the same time.
79 The code below is deliberately written in a way that makes it easy
80 to separate the SCC walk from the other work it does.
82 In order to propagate constants through the code, we track which
83 expressions contain constants, and use those while folding. In
84 theory, we could also track expressions whose value numbers are
85 replaced, in case we end up folding based on expression
86 identities.
88 In order to value number memory, we assign value numbers to vuses.
89 This enables us to note that, for example, stores to the same
90 address of the same value from the same starting memory states are
91 equivalent.
92 TODO:
94 1. We can iterate only the changing portions of the SCC's, but
95 I have not seen an SCC big enough for this to be a win.
96 2. If you differentiate between phi nodes for loops and phi nodes
97 for if-then-else, you can properly consider phi nodes in different
98 blocks for equivalence.
99 3. We could value number vuses in more cases, particularly, whole
100 structure copies.
104 /* vn_nary_op hashtable helpers. */
106 struct vn_nary_op_hasher : typed_noop_remove <vn_nary_op_s>
108 typedef vn_nary_op_s value_type;
109 typedef vn_nary_op_s compare_type;
110 static inline hashval_t hash (const value_type *);
111 static inline bool equal (const value_type *, const compare_type *);
114 /* Return the computed hashcode for nary operation P1. */
116 inline hashval_t
117 vn_nary_op_hasher::hash (const value_type *vno1)
119 return vno1->hashcode;
122 /* Compare nary operations P1 and P2 and return true if they are
123 equivalent. */
125 inline bool
126 vn_nary_op_hasher::equal (const value_type *vno1, const compare_type *vno2)
128 return vn_nary_op_eq (vno1, vno2);
131 typedef hash_table <vn_nary_op_hasher> vn_nary_op_table_type;
132 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
135 /* vn_phi hashtable helpers. */
137 static int
138 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
140 struct vn_phi_hasher
142 typedef vn_phi_s value_type;
143 typedef vn_phi_s compare_type;
144 static inline hashval_t hash (const value_type *);
145 static inline bool equal (const value_type *, const compare_type *);
146 static inline void remove (value_type *);
149 /* Return the computed hashcode for phi operation P1. */
151 inline hashval_t
152 vn_phi_hasher::hash (const value_type *vp1)
154 return vp1->hashcode;
157 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
159 inline bool
160 vn_phi_hasher::equal (const value_type *vp1, const compare_type *vp2)
162 return vn_phi_eq (vp1, vp2);
165 /* Free a phi operation structure VP. */
167 inline void
168 vn_phi_hasher::remove (value_type *phi)
170 phi->phiargs.release ();
173 typedef hash_table <vn_phi_hasher> vn_phi_table_type;
174 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
177 /* Compare two reference operands P1 and P2 for equality. Return true if
178 they are equal, and false otherwise. */
180 static int
181 vn_reference_op_eq (const void *p1, const void *p2)
183 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
184 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
186 return (vro1->opcode == vro2->opcode
187 /* We do not care for differences in type qualification. */
188 && (vro1->type == vro2->type
189 || (vro1->type && vro2->type
190 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
191 TYPE_MAIN_VARIANT (vro2->type))))
192 && expressions_equal_p (vro1->op0, vro2->op0)
193 && expressions_equal_p (vro1->op1, vro2->op1)
194 && expressions_equal_p (vro1->op2, vro2->op2));
197 /* Free a reference operation structure VP. */
199 static inline void
200 free_reference (vn_reference_s *vr)
202 vr->operands.release ();
206 /* vn_reference hashtable helpers. */
208 struct vn_reference_hasher
210 typedef vn_reference_s value_type;
211 typedef vn_reference_s compare_type;
212 static inline hashval_t hash (const value_type *);
213 static inline bool equal (const value_type *, const compare_type *);
214 static inline void remove (value_type *);
217 /* Return the hashcode for a given reference operation P1. */
219 inline hashval_t
220 vn_reference_hasher::hash (const value_type *vr1)
222 return vr1->hashcode;
225 inline bool
226 vn_reference_hasher::equal (const value_type *v, const compare_type *c)
228 return vn_reference_eq (v, c);
231 inline void
232 vn_reference_hasher::remove (value_type *v)
234 free_reference (v);
237 typedef hash_table <vn_reference_hasher> vn_reference_table_type;
238 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
241 /* The set of hashtables and alloc_pool's for their items. */
243 typedef struct vn_tables_s
245 vn_nary_op_table_type nary;
246 vn_phi_table_type phis;
247 vn_reference_table_type references;
248 struct obstack nary_obstack;
249 alloc_pool phis_pool;
250 alloc_pool references_pool;
251 } *vn_tables_t;
254 /* vn_constant hashtable helpers. */
256 struct vn_constant_hasher : typed_free_remove <vn_constant_s>
258 typedef vn_constant_s value_type;
259 typedef vn_constant_s compare_type;
260 static inline hashval_t hash (const value_type *);
261 static inline bool equal (const value_type *, const compare_type *);
264 /* Hash table hash function for vn_constant_t. */
266 inline hashval_t
267 vn_constant_hasher::hash (const value_type *vc1)
269 return vc1->hashcode;
272 /* Hash table equality function for vn_constant_t. */
274 inline bool
275 vn_constant_hasher::equal (const value_type *vc1, const compare_type *vc2)
277 if (vc1->hashcode != vc2->hashcode)
278 return false;
280 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
283 static hash_table <vn_constant_hasher> constant_to_value_id;
284 static bitmap constant_value_ids;
287 /* Valid hashtables storing information we have proven to be
288 correct. */
290 static vn_tables_t valid_info;
292 /* Optimistic hashtables storing information we are making assumptions about
293 during iterations. */
295 static vn_tables_t optimistic_info;
297 /* Pointer to the set of hashtables that is currently being used.
298 Should always point to either the optimistic_info, or the
299 valid_info. */
301 static vn_tables_t current_info;
304 /* Reverse post order index for each basic block. */
306 static int *rpo_numbers;
308 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
310 /* This represents the top of the VN lattice, which is the universal
311 value. */
313 tree VN_TOP;
315 /* Unique counter for our value ids. */
317 static unsigned int next_value_id;
319 /* Next DFS number and the stack for strongly connected component
320 detection. */
322 static unsigned int next_dfs_num;
323 static vec<tree> sccstack;
327 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
328 are allocated on an obstack for locality reasons, and to free them
329 without looping over the vec. */
331 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
332 static struct obstack vn_ssa_aux_obstack;
334 /* Return the value numbering information for a given SSA name. */
336 vn_ssa_aux_t
337 VN_INFO (tree name)
339 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
340 gcc_checking_assert (res);
341 return res;
344 /* Set the value numbering info for a given SSA name to a given
345 value. */
347 static inline void
348 VN_INFO_SET (tree name, vn_ssa_aux_t value)
350 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
353 /* Initialize the value numbering info for a given SSA name.
354 This should be called just once for every SSA name. */
356 vn_ssa_aux_t
357 VN_INFO_GET (tree name)
359 vn_ssa_aux_t newinfo;
361 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
362 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
363 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
364 vn_ssa_aux_table.safe_grow (SSA_NAME_VERSION (name) + 1);
365 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
366 return newinfo;
370 /* Get the representative expression for the SSA_NAME NAME. Returns
371 the representative SSA_NAME if there is no expression associated with it. */
373 tree
374 vn_get_expr_for (tree name)
376 vn_ssa_aux_t vn = VN_INFO (name);
377 gimple def_stmt;
378 tree expr = NULL_TREE;
379 enum tree_code code;
381 if (vn->valnum == VN_TOP)
382 return name;
384 /* If the value-number is a constant it is the representative
385 expression. */
386 if (TREE_CODE (vn->valnum) != SSA_NAME)
387 return vn->valnum;
389 /* Get to the information of the value of this SSA_NAME. */
390 vn = VN_INFO (vn->valnum);
392 /* If the value-number is a constant it is the representative
393 expression. */
394 if (TREE_CODE (vn->valnum) != SSA_NAME)
395 return vn->valnum;
397 /* Else if we have an expression, return it. */
398 if (vn->expr != NULL_TREE)
399 return vn->expr;
401 /* Otherwise use the defining statement to build the expression. */
402 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
404 /* If the value number is not an assignment use it directly. */
405 if (!is_gimple_assign (def_stmt))
406 return vn->valnum;
408 /* FIXME tuples. This is incomplete and likely will miss some
409 simplifications. */
410 code = gimple_assign_rhs_code (def_stmt);
411 switch (TREE_CODE_CLASS (code))
413 case tcc_reference:
414 if ((code == REALPART_EXPR
415 || code == IMAGPART_EXPR
416 || code == VIEW_CONVERT_EXPR)
417 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (def_stmt),
418 0)) == SSA_NAME)
419 expr = fold_build1 (code,
420 gimple_expr_type (def_stmt),
421 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
422 break;
424 case tcc_unary:
425 expr = fold_build1 (code,
426 gimple_expr_type (def_stmt),
427 gimple_assign_rhs1 (def_stmt));
428 break;
430 case tcc_binary:
431 expr = fold_build2 (code,
432 gimple_expr_type (def_stmt),
433 gimple_assign_rhs1 (def_stmt),
434 gimple_assign_rhs2 (def_stmt));
435 break;
437 case tcc_exceptional:
438 if (code == CONSTRUCTOR
439 && TREE_CODE
440 (TREE_TYPE (gimple_assign_rhs1 (def_stmt))) == VECTOR_TYPE)
441 expr = gimple_assign_rhs1 (def_stmt);
442 break;
444 default:;
446 if (expr == NULL_TREE)
447 return vn->valnum;
449 /* Cache the expression. */
450 vn->expr = expr;
452 return expr;
455 /* Return the vn_kind the expression computed by the stmt should be
456 associated with. */
458 enum vn_kind
459 vn_get_stmt_kind (gimple stmt)
461 switch (gimple_code (stmt))
463 case GIMPLE_CALL:
464 return VN_REFERENCE;
465 case GIMPLE_PHI:
466 return VN_PHI;
467 case GIMPLE_ASSIGN:
469 enum tree_code code = gimple_assign_rhs_code (stmt);
470 tree rhs1 = gimple_assign_rhs1 (stmt);
471 switch (get_gimple_rhs_class (code))
473 case GIMPLE_UNARY_RHS:
474 case GIMPLE_BINARY_RHS:
475 case GIMPLE_TERNARY_RHS:
476 return VN_NARY;
477 case GIMPLE_SINGLE_RHS:
478 switch (TREE_CODE_CLASS (code))
480 case tcc_reference:
481 /* VOP-less references can go through unary case. */
482 if ((code == REALPART_EXPR
483 || code == IMAGPART_EXPR
484 || code == VIEW_CONVERT_EXPR
485 || code == BIT_FIELD_REF)
486 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
487 return VN_NARY;
489 /* Fallthrough. */
490 case tcc_declaration:
491 return VN_REFERENCE;
493 case tcc_constant:
494 return VN_CONSTANT;
496 default:
497 if (code == ADDR_EXPR)
498 return (is_gimple_min_invariant (rhs1)
499 ? VN_CONSTANT : VN_REFERENCE);
500 else if (code == CONSTRUCTOR)
501 return VN_NARY;
502 return VN_NONE;
504 default:
505 return VN_NONE;
508 default:
509 return VN_NONE;
513 /* Lookup a value id for CONSTANT and return it. If it does not
514 exist returns 0. */
516 unsigned int
517 get_constant_value_id (tree constant)
519 vn_constant_s **slot;
520 struct vn_constant_s vc;
522 vc.hashcode = vn_hash_constant_with_type (constant);
523 vc.constant = constant;
524 slot = constant_to_value_id.find_slot_with_hash (&vc, vc.hashcode, NO_INSERT);
525 if (slot)
526 return (*slot)->value_id;
527 return 0;
530 /* Lookup a value id for CONSTANT, and if it does not exist, create a
531 new one and return it. If it does exist, return it. */
533 unsigned int
534 get_or_alloc_constant_value_id (tree constant)
536 vn_constant_s **slot;
537 struct vn_constant_s vc;
538 vn_constant_t vcp;
540 vc.hashcode = vn_hash_constant_with_type (constant);
541 vc.constant = constant;
542 slot = constant_to_value_id.find_slot_with_hash (&vc, vc.hashcode, INSERT);
543 if (*slot)
544 return (*slot)->value_id;
546 vcp = XNEW (struct vn_constant_s);
547 vcp->hashcode = vc.hashcode;
548 vcp->constant = constant;
549 vcp->value_id = get_next_value_id ();
550 *slot = vcp;
551 bitmap_set_bit (constant_value_ids, vcp->value_id);
552 return vcp->value_id;
555 /* Return true if V is a value id for a constant. */
557 bool
558 value_id_constant_p (unsigned int v)
560 return bitmap_bit_p (constant_value_ids, v);
563 /* Compute the hash for a reference operand VRO1. */
565 static hashval_t
566 vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result)
568 result = iterative_hash_hashval_t (vro1->opcode, result);
569 if (vro1->op0)
570 result = iterative_hash_expr (vro1->op0, result);
571 if (vro1->op1)
572 result = iterative_hash_expr (vro1->op1, result);
573 if (vro1->op2)
574 result = iterative_hash_expr (vro1->op2, result);
575 return result;
578 /* Compute a hash for the reference operation VR1 and return it. */
580 hashval_t
581 vn_reference_compute_hash (const vn_reference_t vr1)
583 hashval_t result = 0;
584 int i;
585 vn_reference_op_t vro;
586 HOST_WIDE_INT off = -1;
587 bool deref = false;
589 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
591 if (vro->opcode == MEM_REF)
592 deref = true;
593 else if (vro->opcode != ADDR_EXPR)
594 deref = false;
595 if (vro->off != -1)
597 if (off == -1)
598 off = 0;
599 off += vro->off;
601 else
603 if (off != -1
604 && off != 0)
605 result = iterative_hash_hashval_t (off, result);
606 off = -1;
607 if (deref
608 && vro->opcode == ADDR_EXPR)
610 if (vro->op0)
612 tree op = TREE_OPERAND (vro->op0, 0);
613 result = iterative_hash_hashval_t (TREE_CODE (op), result);
614 result = iterative_hash_expr (op, result);
617 else
618 result = vn_reference_op_compute_hash (vro, result);
621 if (vr1->vuse)
622 result += SSA_NAME_VERSION (vr1->vuse);
624 return result;
627 /* Return true if reference operations VR1 and VR2 are equivalent. This
628 means they have the same set of operands and vuses. */
630 bool
631 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
633 unsigned i, j;
635 if (vr1->hashcode != vr2->hashcode)
636 return false;
638 /* Early out if this is not a hash collision. */
639 if (vr1->hashcode != vr2->hashcode)
640 return false;
642 /* The VOP needs to be the same. */
643 if (vr1->vuse != vr2->vuse)
644 return false;
646 /* If the operands are the same we are done. */
647 if (vr1->operands == vr2->operands)
648 return true;
650 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
651 return false;
653 if (INTEGRAL_TYPE_P (vr1->type)
654 && INTEGRAL_TYPE_P (vr2->type))
656 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
657 return false;
659 else if (INTEGRAL_TYPE_P (vr1->type)
660 && (TYPE_PRECISION (vr1->type)
661 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
662 return false;
663 else if (INTEGRAL_TYPE_P (vr2->type)
664 && (TYPE_PRECISION (vr2->type)
665 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
666 return false;
668 i = 0;
669 j = 0;
672 HOST_WIDE_INT off1 = 0, off2 = 0;
673 vn_reference_op_t vro1, vro2;
674 vn_reference_op_s tem1, tem2;
675 bool deref1 = false, deref2 = false;
676 for (; vr1->operands.iterate (i, &vro1); i++)
678 if (vro1->opcode == MEM_REF)
679 deref1 = true;
680 if (vro1->off == -1)
681 break;
682 off1 += vro1->off;
684 for (; vr2->operands.iterate (j, &vro2); j++)
686 if (vro2->opcode == MEM_REF)
687 deref2 = true;
688 if (vro2->off == -1)
689 break;
690 off2 += vro2->off;
692 if (off1 != off2)
693 return false;
694 if (deref1 && vro1->opcode == ADDR_EXPR)
696 memset (&tem1, 0, sizeof (tem1));
697 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
698 tem1.type = TREE_TYPE (tem1.op0);
699 tem1.opcode = TREE_CODE (tem1.op0);
700 vro1 = &tem1;
701 deref1 = false;
703 if (deref2 && vro2->opcode == ADDR_EXPR)
705 memset (&tem2, 0, sizeof (tem2));
706 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
707 tem2.type = TREE_TYPE (tem2.op0);
708 tem2.opcode = TREE_CODE (tem2.op0);
709 vro2 = &tem2;
710 deref2 = false;
712 if (deref1 != deref2)
713 return false;
714 if (!vn_reference_op_eq (vro1, vro2))
715 return false;
716 ++j;
717 ++i;
719 while (vr1->operands.length () != i
720 || vr2->operands.length () != j);
722 return true;
725 /* Copy the operations present in load/store REF into RESULT, a vector of
726 vn_reference_op_s's. */
728 void
729 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
731 if (TREE_CODE (ref) == TARGET_MEM_REF)
733 vn_reference_op_s temp;
735 result->reserve (3);
737 memset (&temp, 0, sizeof (temp));
738 temp.type = TREE_TYPE (ref);
739 temp.opcode = TREE_CODE (ref);
740 temp.op0 = TMR_INDEX (ref);
741 temp.op1 = TMR_STEP (ref);
742 temp.op2 = TMR_OFFSET (ref);
743 temp.off = -1;
744 result->quick_push (temp);
746 memset (&temp, 0, sizeof (temp));
747 temp.type = NULL_TREE;
748 temp.opcode = ERROR_MARK;
749 temp.op0 = TMR_INDEX2 (ref);
750 temp.off = -1;
751 result->quick_push (temp);
753 memset (&temp, 0, sizeof (temp));
754 temp.type = NULL_TREE;
755 temp.opcode = TREE_CODE (TMR_BASE (ref));
756 temp.op0 = TMR_BASE (ref);
757 temp.off = -1;
758 result->quick_push (temp);
759 return;
762 /* For non-calls, store the information that makes up the address. */
763 tree orig = ref;
764 while (ref)
766 vn_reference_op_s temp;
768 memset (&temp, 0, sizeof (temp));
769 temp.type = TREE_TYPE (ref);
770 temp.opcode = TREE_CODE (ref);
771 temp.off = -1;
773 switch (temp.opcode)
775 case MODIFY_EXPR:
776 temp.op0 = TREE_OPERAND (ref, 1);
777 break;
778 case WITH_SIZE_EXPR:
779 temp.op0 = TREE_OPERAND (ref, 1);
780 temp.off = 0;
781 break;
782 case MEM_REF:
783 /* The base address gets its own vn_reference_op_s structure. */
784 temp.op0 = TREE_OPERAND (ref, 1);
785 if (tree_fits_shwi_p (TREE_OPERAND (ref, 1)))
786 temp.off = TREE_INT_CST_LOW (TREE_OPERAND (ref, 1));
787 break;
788 case BIT_FIELD_REF:
789 /* Record bits and position. */
790 temp.op0 = TREE_OPERAND (ref, 1);
791 temp.op1 = TREE_OPERAND (ref, 2);
792 break;
793 case COMPONENT_REF:
794 /* The field decl is enough to unambiguously specify the field,
795 a matching type is not necessary and a mismatching type
796 is always a spurious difference. */
797 temp.type = NULL_TREE;
798 temp.op0 = TREE_OPERAND (ref, 1);
799 temp.op1 = TREE_OPERAND (ref, 2);
801 tree this_offset = component_ref_field_offset (ref);
802 if (this_offset
803 && TREE_CODE (this_offset) == INTEGER_CST)
805 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
806 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
808 double_int off
809 = tree_to_double_int (this_offset)
810 + tree_to_double_int (bit_offset)
811 .rshift (BITS_PER_UNIT == 8
812 ? 3 : exact_log2 (BITS_PER_UNIT));
813 if (off.fits_shwi ()
814 /* Probibit value-numbering zero offset components
815 of addresses the same before the pass folding
816 __builtin_object_size had a chance to run
817 (checking cfun->after_inlining does the
818 trick here). */
819 && (TREE_CODE (orig) != ADDR_EXPR
820 || !off.is_zero ()
821 || cfun->after_inlining))
822 temp.off = off.low;
826 break;
827 case ARRAY_RANGE_REF:
828 case ARRAY_REF:
829 /* Record index as operand. */
830 temp.op0 = TREE_OPERAND (ref, 1);
831 /* Always record lower bounds and element size. */
832 temp.op1 = array_ref_low_bound (ref);
833 temp.op2 = array_ref_element_size (ref);
834 if (TREE_CODE (temp.op0) == INTEGER_CST
835 && TREE_CODE (temp.op1) == INTEGER_CST
836 && TREE_CODE (temp.op2) == INTEGER_CST)
838 double_int off = tree_to_double_int (temp.op0);
839 off += -tree_to_double_int (temp.op1);
840 off *= tree_to_double_int (temp.op2);
841 if (off.fits_shwi ())
842 temp.off = off.low;
844 break;
845 case VAR_DECL:
846 if (DECL_HARD_REGISTER (ref))
848 temp.op0 = ref;
849 break;
851 /* Fallthru. */
852 case PARM_DECL:
853 case CONST_DECL:
854 case RESULT_DECL:
855 /* Canonicalize decls to MEM[&decl] which is what we end up with
856 when valueizing MEM[ptr] with ptr = &decl. */
857 temp.opcode = MEM_REF;
858 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
859 temp.off = 0;
860 result->safe_push (temp);
861 temp.opcode = ADDR_EXPR;
862 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
863 temp.type = TREE_TYPE (temp.op0);
864 temp.off = -1;
865 break;
866 case STRING_CST:
867 case INTEGER_CST:
868 case COMPLEX_CST:
869 case VECTOR_CST:
870 case REAL_CST:
871 case FIXED_CST:
872 case CONSTRUCTOR:
873 case SSA_NAME:
874 temp.op0 = ref;
875 break;
876 case ADDR_EXPR:
877 if (is_gimple_min_invariant (ref))
879 temp.op0 = ref;
880 break;
882 /* Fallthrough. */
883 /* These are only interesting for their operands, their
884 existence, and their type. They will never be the last
885 ref in the chain of references (IE they require an
886 operand), so we don't have to put anything
887 for op* as it will be handled by the iteration */
888 case REALPART_EXPR:
889 case VIEW_CONVERT_EXPR:
890 temp.off = 0;
891 break;
892 case IMAGPART_EXPR:
893 /* This is only interesting for its constant offset. */
894 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
895 break;
896 default:
897 gcc_unreachable ();
899 result->safe_push (temp);
901 if (REFERENCE_CLASS_P (ref)
902 || TREE_CODE (ref) == MODIFY_EXPR
903 || TREE_CODE (ref) == WITH_SIZE_EXPR
904 || (TREE_CODE (ref) == ADDR_EXPR
905 && !is_gimple_min_invariant (ref)))
906 ref = TREE_OPERAND (ref, 0);
907 else
908 ref = NULL_TREE;
912 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
913 operands in *OPS, the reference alias set SET and the reference type TYPE.
914 Return true if something useful was produced. */
916 bool
917 ao_ref_init_from_vn_reference (ao_ref *ref,
918 alias_set_type set, tree type,
919 vec<vn_reference_op_s> ops)
921 vn_reference_op_t op;
922 unsigned i;
923 tree base = NULL_TREE;
924 tree *op0_p = &base;
925 HOST_WIDE_INT offset = 0;
926 HOST_WIDE_INT max_size;
927 HOST_WIDE_INT size = -1;
928 tree size_tree = NULL_TREE;
929 alias_set_type base_alias_set = -1;
931 /* First get the final access size from just the outermost expression. */
932 op = &ops[0];
933 if (op->opcode == COMPONENT_REF)
934 size_tree = DECL_SIZE (op->op0);
935 else if (op->opcode == BIT_FIELD_REF)
936 size_tree = op->op0;
937 else
939 enum machine_mode mode = TYPE_MODE (type);
940 if (mode == BLKmode)
941 size_tree = TYPE_SIZE (type);
942 else
943 size = GET_MODE_BITSIZE (mode);
945 if (size_tree != NULL_TREE)
947 if (!tree_fits_uhwi_p (size_tree))
948 size = -1;
949 else
950 size = TREE_INT_CST_LOW (size_tree);
953 /* Initially, maxsize is the same as the accessed element size.
954 In the following it will only grow (or become -1). */
955 max_size = size;
957 /* Compute cumulative bit-offset for nested component-refs and array-refs,
958 and find the ultimate containing object. */
959 FOR_EACH_VEC_ELT (ops, i, op)
961 switch (op->opcode)
963 /* These may be in the reference ops, but we cannot do anything
964 sensible with them here. */
965 case ADDR_EXPR:
966 /* Apart from ADDR_EXPR arguments to MEM_REF. */
967 if (base != NULL_TREE
968 && TREE_CODE (base) == MEM_REF
969 && op->op0
970 && DECL_P (TREE_OPERAND (op->op0, 0)))
972 vn_reference_op_t pop = &ops[i-1];
973 base = TREE_OPERAND (op->op0, 0);
974 if (pop->off == -1)
976 max_size = -1;
977 offset = 0;
979 else
980 offset += pop->off * BITS_PER_UNIT;
981 op0_p = NULL;
982 break;
984 /* Fallthru. */
985 case CALL_EXPR:
986 return false;
988 /* Record the base objects. */
989 case MEM_REF:
990 base_alias_set = get_deref_alias_set (op->op0);
991 *op0_p = build2 (MEM_REF, op->type,
992 NULL_TREE, op->op0);
993 op0_p = &TREE_OPERAND (*op0_p, 0);
994 break;
996 case VAR_DECL:
997 case PARM_DECL:
998 case RESULT_DECL:
999 case SSA_NAME:
1000 *op0_p = op->op0;
1001 op0_p = NULL;
1002 break;
1004 /* And now the usual component-reference style ops. */
1005 case BIT_FIELD_REF:
1006 offset += tree_to_shwi (op->op1);
1007 break;
1009 case COMPONENT_REF:
1011 tree field = op->op0;
1012 /* We do not have a complete COMPONENT_REF tree here so we
1013 cannot use component_ref_field_offset. Do the interesting
1014 parts manually. */
1016 if (op->op1
1017 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (field)))
1018 max_size = -1;
1019 else
1021 offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1022 * BITS_PER_UNIT);
1023 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1025 break;
1028 case ARRAY_RANGE_REF:
1029 case ARRAY_REF:
1030 /* We recorded the lower bound and the element size. */
1031 if (!tree_fits_shwi_p (op->op0)
1032 || !tree_fits_shwi_p (op->op1)
1033 || !tree_fits_shwi_p (op->op2))
1034 max_size = -1;
1035 else
1037 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
1038 hindex -= TREE_INT_CST_LOW (op->op1);
1039 hindex *= TREE_INT_CST_LOW (op->op2);
1040 hindex *= BITS_PER_UNIT;
1041 offset += hindex;
1043 break;
1045 case REALPART_EXPR:
1046 break;
1048 case IMAGPART_EXPR:
1049 offset += size;
1050 break;
1052 case VIEW_CONVERT_EXPR:
1053 break;
1055 case STRING_CST:
1056 case INTEGER_CST:
1057 case COMPLEX_CST:
1058 case VECTOR_CST:
1059 case REAL_CST:
1060 case CONSTRUCTOR:
1061 case CONST_DECL:
1062 return false;
1064 default:
1065 return false;
1069 if (base == NULL_TREE)
1070 return false;
1072 ref->ref = NULL_TREE;
1073 ref->base = base;
1074 ref->offset = offset;
1075 ref->size = size;
1076 ref->max_size = max_size;
1077 ref->ref_alias_set = set;
1078 if (base_alias_set != -1)
1079 ref->base_alias_set = base_alias_set;
1080 else
1081 ref->base_alias_set = get_alias_set (base);
1082 /* We discount volatiles from value-numbering elsewhere. */
1083 ref->volatile_p = false;
1085 return true;
1088 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1089 vn_reference_op_s's. */
1091 void
1092 copy_reference_ops_from_call (gimple call,
1093 vec<vn_reference_op_s> *result)
1095 vn_reference_op_s temp;
1096 unsigned i;
1097 tree lhs = gimple_call_lhs (call);
1099 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1100 different. By adding the lhs here in the vector, we ensure that the
1101 hashcode is different, guaranteeing a different value number. */
1102 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1104 memset (&temp, 0, sizeof (temp));
1105 temp.opcode = MODIFY_EXPR;
1106 temp.type = TREE_TYPE (lhs);
1107 temp.op0 = lhs;
1108 temp.off = -1;
1109 result->safe_push (temp);
1112 /* Copy the type, opcode, function being called and static chain. */
1113 memset (&temp, 0, sizeof (temp));
1114 temp.type = gimple_call_return_type (call);
1115 temp.opcode = CALL_EXPR;
1116 temp.op0 = gimple_call_fn (call);
1117 temp.op1 = gimple_call_chain (call);
1118 temp.off = -1;
1119 result->safe_push (temp);
1121 /* Copy the call arguments. As they can be references as well,
1122 just chain them together. */
1123 for (i = 0; i < gimple_call_num_args (call); ++i)
1125 tree callarg = gimple_call_arg (call, i);
1126 copy_reference_ops_from_ref (callarg, result);
1130 /* Create a vector of vn_reference_op_s structures from CALL, a
1131 call statement. The vector is not shared. */
1133 static vec<vn_reference_op_s>
1134 create_reference_ops_from_call (gimple call)
1136 vec<vn_reference_op_s> result = vNULL;
1138 copy_reference_ops_from_call (call, &result);
1139 return result;
1142 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1143 *I_P to point to the last element of the replacement. */
1144 void
1145 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1146 unsigned int *i_p)
1148 unsigned int i = *i_p;
1149 vn_reference_op_t op = &(*ops)[i];
1150 vn_reference_op_t mem_op = &(*ops)[i - 1];
1151 tree addr_base;
1152 HOST_WIDE_INT addr_offset = 0;
1154 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1155 from .foo.bar to the preceding MEM_REF offset and replace the
1156 address with &OBJ. */
1157 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1158 &addr_offset);
1159 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1160 if (addr_base != TREE_OPERAND (op->op0, 0))
1162 double_int off = tree_to_double_int (mem_op->op0);
1163 off = off.sext (TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1164 off += double_int::from_shwi (addr_offset);
1165 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1166 op->op0 = build_fold_addr_expr (addr_base);
1167 if (tree_fits_shwi_p (mem_op->op0))
1168 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1169 else
1170 mem_op->off = -1;
1174 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1175 *I_P to point to the last element of the replacement. */
1176 static void
1177 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1178 unsigned int *i_p)
1180 unsigned int i = *i_p;
1181 vn_reference_op_t op = &(*ops)[i];
1182 vn_reference_op_t mem_op = &(*ops)[i - 1];
1183 gimple def_stmt;
1184 enum tree_code code;
1185 double_int off;
1187 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1188 if (!is_gimple_assign (def_stmt))
1189 return;
1191 code = gimple_assign_rhs_code (def_stmt);
1192 if (code != ADDR_EXPR
1193 && code != POINTER_PLUS_EXPR)
1194 return;
1196 off = tree_to_double_int (mem_op->op0);
1197 off = off.sext (TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1199 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1200 from .foo.bar to the preceding MEM_REF offset and replace the
1201 address with &OBJ. */
1202 if (code == ADDR_EXPR)
1204 tree addr, addr_base;
1205 HOST_WIDE_INT addr_offset;
1207 addr = gimple_assign_rhs1 (def_stmt);
1208 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1209 &addr_offset);
1210 if (!addr_base
1211 || TREE_CODE (addr_base) != MEM_REF)
1212 return;
1214 off += double_int::from_shwi (addr_offset);
1215 off += mem_ref_offset (addr_base);
1216 op->op0 = TREE_OPERAND (addr_base, 0);
1218 else
1220 tree ptr, ptroff;
1221 ptr = gimple_assign_rhs1 (def_stmt);
1222 ptroff = gimple_assign_rhs2 (def_stmt);
1223 if (TREE_CODE (ptr) != SSA_NAME
1224 || TREE_CODE (ptroff) != INTEGER_CST)
1225 return;
1227 off += tree_to_double_int (ptroff);
1228 op->op0 = ptr;
1231 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1232 if (tree_fits_shwi_p (mem_op->op0))
1233 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1234 else
1235 mem_op->off = -1;
1236 if (TREE_CODE (op->op0) == SSA_NAME)
1237 op->op0 = SSA_VAL (op->op0);
1238 if (TREE_CODE (op->op0) != SSA_NAME)
1239 op->opcode = TREE_CODE (op->op0);
1241 /* And recurse. */
1242 if (TREE_CODE (op->op0) == SSA_NAME)
1243 vn_reference_maybe_forwprop_address (ops, i_p);
1244 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1245 vn_reference_fold_indirect (ops, i_p);
1248 /* Optimize the reference REF to a constant if possible or return
1249 NULL_TREE if not. */
1251 tree
1252 fully_constant_vn_reference_p (vn_reference_t ref)
1254 vec<vn_reference_op_s> operands = ref->operands;
1255 vn_reference_op_t op;
1257 /* Try to simplify the translated expression if it is
1258 a call to a builtin function with at most two arguments. */
1259 op = &operands[0];
1260 if (op->opcode == CALL_EXPR
1261 && TREE_CODE (op->op0) == ADDR_EXPR
1262 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1263 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1264 && operands.length () >= 2
1265 && operands.length () <= 3)
1267 vn_reference_op_t arg0, arg1 = NULL;
1268 bool anyconst = false;
1269 arg0 = &operands[1];
1270 if (operands.length () > 2)
1271 arg1 = &operands[2];
1272 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1273 || (arg0->opcode == ADDR_EXPR
1274 && is_gimple_min_invariant (arg0->op0)))
1275 anyconst = true;
1276 if (arg1
1277 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1278 || (arg1->opcode == ADDR_EXPR
1279 && is_gimple_min_invariant (arg1->op0))))
1280 anyconst = true;
1281 if (anyconst)
1283 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1284 arg1 ? 2 : 1,
1285 arg0->op0,
1286 arg1 ? arg1->op0 : NULL);
1287 if (folded
1288 && TREE_CODE (folded) == NOP_EXPR)
1289 folded = TREE_OPERAND (folded, 0);
1290 if (folded
1291 && is_gimple_min_invariant (folded))
1292 return folded;
1296 /* Simplify reads from constant strings. */
1297 else if (op->opcode == ARRAY_REF
1298 && TREE_CODE (op->op0) == INTEGER_CST
1299 && integer_zerop (op->op1)
1300 && operands.length () == 2)
1302 vn_reference_op_t arg0;
1303 arg0 = &operands[1];
1304 if (arg0->opcode == STRING_CST
1305 && (TYPE_MODE (op->type)
1306 == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
1307 && GET_MODE_CLASS (TYPE_MODE (op->type)) == MODE_INT
1308 && GET_MODE_SIZE (TYPE_MODE (op->type)) == 1
1309 && tree_int_cst_sgn (op->op0) >= 0
1310 && compare_tree_int (op->op0, TREE_STRING_LENGTH (arg0->op0)) < 0)
1311 return build_int_cst_type (op->type,
1312 (TREE_STRING_POINTER (arg0->op0)
1313 [TREE_INT_CST_LOW (op->op0)]));
1316 return NULL_TREE;
1319 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1320 structures into their value numbers. This is done in-place, and
1321 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1322 whether any operands were valueized. */
1324 static vec<vn_reference_op_s>
1325 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1327 vn_reference_op_t vro;
1328 unsigned int i;
1330 *valueized_anything = false;
1332 FOR_EACH_VEC_ELT (orig, i, vro)
1334 if (vro->opcode == SSA_NAME
1335 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1337 tree tem = SSA_VAL (vro->op0);
1338 if (tem != vro->op0)
1340 *valueized_anything = true;
1341 vro->op0 = tem;
1343 /* If it transforms from an SSA_NAME to a constant, update
1344 the opcode. */
1345 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1346 vro->opcode = TREE_CODE (vro->op0);
1348 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1350 tree tem = SSA_VAL (vro->op1);
1351 if (tem != vro->op1)
1353 *valueized_anything = true;
1354 vro->op1 = tem;
1357 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1359 tree tem = SSA_VAL (vro->op2);
1360 if (tem != vro->op2)
1362 *valueized_anything = true;
1363 vro->op2 = tem;
1366 /* If it transforms from an SSA_NAME to an address, fold with
1367 a preceding indirect reference. */
1368 if (i > 0
1369 && vro->op0
1370 && TREE_CODE (vro->op0) == ADDR_EXPR
1371 && orig[i - 1].opcode == MEM_REF)
1372 vn_reference_fold_indirect (&orig, &i);
1373 else if (i > 0
1374 && vro->opcode == SSA_NAME
1375 && orig[i - 1].opcode == MEM_REF)
1376 vn_reference_maybe_forwprop_address (&orig, &i);
1377 /* If it transforms a non-constant ARRAY_REF into a constant
1378 one, adjust the constant offset. */
1379 else if (vro->opcode == ARRAY_REF
1380 && vro->off == -1
1381 && TREE_CODE (vro->op0) == INTEGER_CST
1382 && TREE_CODE (vro->op1) == INTEGER_CST
1383 && TREE_CODE (vro->op2) == INTEGER_CST)
1385 double_int off = tree_to_double_int (vro->op0);
1386 off += -tree_to_double_int (vro->op1);
1387 off *= tree_to_double_int (vro->op2);
1388 if (off.fits_shwi ())
1389 vro->off = off.low;
1393 return orig;
1396 static vec<vn_reference_op_s>
1397 valueize_refs (vec<vn_reference_op_s> orig)
1399 bool tem;
1400 return valueize_refs_1 (orig, &tem);
1403 static vec<vn_reference_op_s> shared_lookup_references;
1405 /* Create a vector of vn_reference_op_s structures from REF, a
1406 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1407 this function. *VALUEIZED_ANYTHING will specify whether any
1408 operands were valueized. */
1410 static vec<vn_reference_op_s>
1411 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1413 if (!ref)
1414 return vNULL;
1415 shared_lookup_references.truncate (0);
1416 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1417 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1418 valueized_anything);
1419 return shared_lookup_references;
1422 /* Create a vector of vn_reference_op_s structures from CALL, a
1423 call statement. The vector is shared among all callers of
1424 this function. */
1426 static vec<vn_reference_op_s>
1427 valueize_shared_reference_ops_from_call (gimple call)
1429 if (!call)
1430 return vNULL;
1431 shared_lookup_references.truncate (0);
1432 copy_reference_ops_from_call (call, &shared_lookup_references);
1433 shared_lookup_references = valueize_refs (shared_lookup_references);
1434 return shared_lookup_references;
1437 /* Lookup a SCCVN reference operation VR in the current hash table.
1438 Returns the resulting value number if it exists in the hash table,
1439 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1440 vn_reference_t stored in the hashtable if something is found. */
1442 static tree
1443 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1445 vn_reference_s **slot;
1446 hashval_t hash;
1448 hash = vr->hashcode;
1449 slot = current_info->references.find_slot_with_hash (vr, hash, NO_INSERT);
1450 if (!slot && current_info == optimistic_info)
1451 slot = valid_info->references.find_slot_with_hash (vr, hash, NO_INSERT);
1452 if (slot)
1454 if (vnresult)
1455 *vnresult = (vn_reference_t)*slot;
1456 return ((vn_reference_t)*slot)->result;
1459 return NULL_TREE;
1462 static tree *last_vuse_ptr;
1463 static vn_lookup_kind vn_walk_kind;
1464 static vn_lookup_kind default_vn_walk_kind;
1466 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1467 with the current VUSE and performs the expression lookup. */
1469 static void *
1470 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1471 unsigned int cnt, void *vr_)
1473 vn_reference_t vr = (vn_reference_t)vr_;
1474 vn_reference_s **slot;
1475 hashval_t hash;
1477 /* This bounds the stmt walks we perform on reference lookups
1478 to O(1) instead of O(N) where N is the number of dominating
1479 stores. */
1480 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1481 return (void *)-1;
1483 if (last_vuse_ptr)
1484 *last_vuse_ptr = vuse;
1486 /* Fixup vuse and hash. */
1487 if (vr->vuse)
1488 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1489 vr->vuse = SSA_VAL (vuse);
1490 if (vr->vuse)
1491 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1493 hash = vr->hashcode;
1494 slot = current_info->references.find_slot_with_hash (vr, hash, NO_INSERT);
1495 if (!slot && current_info == optimistic_info)
1496 slot = valid_info->references.find_slot_with_hash (vr, hash, NO_INSERT);
1497 if (slot)
1498 return *slot;
1500 return NULL;
1503 /* Lookup an existing or insert a new vn_reference entry into the
1504 value table for the VUSE, SET, TYPE, OPERANDS reference which
1505 has the value VALUE which is either a constant or an SSA name. */
1507 static vn_reference_t
1508 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1509 alias_set_type set,
1510 tree type,
1511 vec<vn_reference_op_s,
1512 va_heap> operands,
1513 tree value)
1515 struct vn_reference_s vr1;
1516 vn_reference_t result;
1517 unsigned value_id;
1518 vr1.vuse = vuse;
1519 vr1.operands = operands;
1520 vr1.type = type;
1521 vr1.set = set;
1522 vr1.hashcode = vn_reference_compute_hash (&vr1);
1523 if (vn_reference_lookup_1 (&vr1, &result))
1524 return result;
1525 if (TREE_CODE (value) == SSA_NAME)
1526 value_id = VN_INFO (value)->value_id;
1527 else
1528 value_id = get_or_alloc_constant_value_id (value);
1529 return vn_reference_insert_pieces (vuse, set, type,
1530 operands.copy (), value, value_id);
1533 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1534 from the statement defining VUSE and if not successful tries to
1535 translate *REFP and VR_ through an aggregate copy at the definition
1536 of VUSE. */
1538 static void *
1539 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
1541 vn_reference_t vr = (vn_reference_t)vr_;
1542 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1543 tree base;
1544 HOST_WIDE_INT offset, maxsize;
1545 static vec<vn_reference_op_s>
1546 lhs_ops = vNULL;
1547 ao_ref lhs_ref;
1548 bool lhs_ref_ok = false;
1550 /* First try to disambiguate after value-replacing in the definitions LHS. */
1551 if (is_gimple_assign (def_stmt))
1553 vec<vn_reference_op_s> tem;
1554 tree lhs = gimple_assign_lhs (def_stmt);
1555 bool valueized_anything = false;
1556 /* Avoid re-allocation overhead. */
1557 lhs_ops.truncate (0);
1558 copy_reference_ops_from_ref (lhs, &lhs_ops);
1559 tem = lhs_ops;
1560 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1561 gcc_assert (lhs_ops == tem);
1562 if (valueized_anything)
1564 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1565 get_alias_set (lhs),
1566 TREE_TYPE (lhs), lhs_ops);
1567 if (lhs_ref_ok
1568 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1569 return NULL;
1571 else
1573 ao_ref_init (&lhs_ref, lhs);
1574 lhs_ref_ok = true;
1578 base = ao_ref_base (ref);
1579 offset = ref->offset;
1580 maxsize = ref->max_size;
1582 /* If we cannot constrain the size of the reference we cannot
1583 test if anything kills it. */
1584 if (maxsize == -1)
1585 return (void *)-1;
1587 /* We can't deduce anything useful from clobbers. */
1588 if (gimple_clobber_p (def_stmt))
1589 return (void *)-1;
1591 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1592 from that definition.
1593 1) Memset. */
1594 if (is_gimple_reg_type (vr->type)
1595 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1596 && integer_zerop (gimple_call_arg (def_stmt, 1))
1597 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2))
1598 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1600 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1601 tree base2;
1602 HOST_WIDE_INT offset2, size2, maxsize2;
1603 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1604 size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
1605 if ((unsigned HOST_WIDE_INT)size2 / 8
1606 == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
1607 && maxsize2 != -1
1608 && operand_equal_p (base, base2, 0)
1609 && offset2 <= offset
1610 && offset2 + size2 >= offset + maxsize)
1612 tree val = build_zero_cst (vr->type);
1613 return vn_reference_lookup_or_insert_for_pieces
1614 (vuse, vr->set, vr->type, vr->operands, val);
1618 /* 2) Assignment from an empty CONSTRUCTOR. */
1619 else if (is_gimple_reg_type (vr->type)
1620 && gimple_assign_single_p (def_stmt)
1621 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1622 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1624 tree base2;
1625 HOST_WIDE_INT offset2, size2, maxsize2;
1626 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1627 &offset2, &size2, &maxsize2);
1628 if (maxsize2 != -1
1629 && operand_equal_p (base, base2, 0)
1630 && offset2 <= offset
1631 && offset2 + size2 >= offset + maxsize)
1633 tree val = build_zero_cst (vr->type);
1634 return vn_reference_lookup_or_insert_for_pieces
1635 (vuse, vr->set, vr->type, vr->operands, val);
1639 /* 3) Assignment from a constant. We can use folds native encode/interpret
1640 routines to extract the assigned bits. */
1641 else if (vn_walk_kind == VN_WALKREWRITE
1642 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1643 && ref->size == maxsize
1644 && maxsize % BITS_PER_UNIT == 0
1645 && offset % BITS_PER_UNIT == 0
1646 && is_gimple_reg_type (vr->type)
1647 && gimple_assign_single_p (def_stmt)
1648 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
1650 tree base2;
1651 HOST_WIDE_INT offset2, size2, maxsize2;
1652 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1653 &offset2, &size2, &maxsize2);
1654 if (maxsize2 != -1
1655 && maxsize2 == size2
1656 && size2 % BITS_PER_UNIT == 0
1657 && offset2 % BITS_PER_UNIT == 0
1658 && operand_equal_p (base, base2, 0)
1659 && offset2 <= offset
1660 && offset2 + size2 >= offset + maxsize)
1662 /* We support up to 512-bit values (for V8DFmode). */
1663 unsigned char buffer[64];
1664 int len;
1666 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1667 buffer, sizeof (buffer));
1668 if (len > 0)
1670 tree val = native_interpret_expr (vr->type,
1671 buffer
1672 + ((offset - offset2)
1673 / BITS_PER_UNIT),
1674 ref->size / BITS_PER_UNIT);
1675 if (val)
1676 return vn_reference_lookup_or_insert_for_pieces
1677 (vuse, vr->set, vr->type, vr->operands, val);
1682 /* 4) Assignment from an SSA name which definition we may be able
1683 to access pieces from. */
1684 else if (ref->size == maxsize
1685 && is_gimple_reg_type (vr->type)
1686 && gimple_assign_single_p (def_stmt)
1687 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
1689 tree rhs1 = gimple_assign_rhs1 (def_stmt);
1690 gimple def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
1691 if (is_gimple_assign (def_stmt2)
1692 && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
1693 || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
1694 && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1))))
1696 tree base2;
1697 HOST_WIDE_INT offset2, size2, maxsize2, off;
1698 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1699 &offset2, &size2, &maxsize2);
1700 off = offset - offset2;
1701 if (maxsize2 != -1
1702 && maxsize2 == size2
1703 && operand_equal_p (base, base2, 0)
1704 && offset2 <= offset
1705 && offset2 + size2 >= offset + maxsize)
1707 tree val = NULL_TREE;
1708 HOST_WIDE_INT elsz
1709 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1))));
1710 if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR)
1712 if (off == 0)
1713 val = gimple_assign_rhs1 (def_stmt2);
1714 else if (off == elsz)
1715 val = gimple_assign_rhs2 (def_stmt2);
1717 else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR
1718 && off % elsz == 0)
1720 tree ctor = gimple_assign_rhs1 (def_stmt2);
1721 unsigned i = off / elsz;
1722 if (i < CONSTRUCTOR_NELTS (ctor))
1724 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
1725 if (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
1727 if (TREE_CODE (TREE_TYPE (elt->value))
1728 != VECTOR_TYPE)
1729 val = elt->value;
1733 if (val)
1734 return vn_reference_lookup_or_insert_for_pieces
1735 (vuse, vr->set, vr->type, vr->operands, val);
1740 /* 5) For aggregate copies translate the reference through them if
1741 the copy kills ref. */
1742 else if (vn_walk_kind == VN_WALKREWRITE
1743 && gimple_assign_single_p (def_stmt)
1744 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1745 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1746 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1748 tree base2;
1749 HOST_WIDE_INT offset2, size2, maxsize2;
1750 int i, j;
1751 vec<vn_reference_op_s>
1752 rhs = vNULL;
1753 vn_reference_op_t vro;
1754 ao_ref r;
1756 if (!lhs_ref_ok)
1757 return (void *)-1;
1759 /* See if the assignment kills REF. */
1760 base2 = ao_ref_base (&lhs_ref);
1761 offset2 = lhs_ref.offset;
1762 size2 = lhs_ref.size;
1763 maxsize2 = lhs_ref.max_size;
1764 if (maxsize2 == -1
1765 || (base != base2 && !operand_equal_p (base, base2, 0))
1766 || offset2 > offset
1767 || offset2 + size2 < offset + maxsize)
1768 return (void *)-1;
1770 /* Find the common base of ref and the lhs. lhs_ops already
1771 contains valueized operands for the lhs. */
1772 i = vr->operands.length () - 1;
1773 j = lhs_ops.length () - 1;
1774 while (j >= 0 && i >= 0
1775 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
1777 i--;
1778 j--;
1781 /* ??? The innermost op should always be a MEM_REF and we already
1782 checked that the assignment to the lhs kills vr. Thus for
1783 aggregate copies using char[] types the vn_reference_op_eq
1784 may fail when comparing types for compatibility. But we really
1785 don't care here - further lookups with the rewritten operands
1786 will simply fail if we messed up types too badly. */
1787 if (j == 0 && i >= 0
1788 && lhs_ops[0].opcode == MEM_REF
1789 && lhs_ops[0].off != -1
1790 && (lhs_ops[0].off == vr->operands[i].off))
1791 i--, j--;
1793 /* i now points to the first additional op.
1794 ??? LHS may not be completely contained in VR, one or more
1795 VIEW_CONVERT_EXPRs could be in its way. We could at least
1796 try handling outermost VIEW_CONVERT_EXPRs. */
1797 if (j != -1)
1798 return (void *)-1;
1800 /* Now re-write REF to be based on the rhs of the assignment. */
1801 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1802 /* We need to pre-pend vr->operands[0..i] to rhs. */
1803 if (i + 1 + rhs.length () > vr->operands.length ())
1805 vec<vn_reference_op_s> old = vr->operands;
1806 vr->operands.safe_grow (i + 1 + rhs.length ());
1807 if (old == shared_lookup_references
1808 && vr->operands != old)
1809 shared_lookup_references = vNULL;
1811 else
1812 vr->operands.truncate (i + 1 + rhs.length ());
1813 FOR_EACH_VEC_ELT (rhs, j, vro)
1814 vr->operands[i + 1 + j] = *vro;
1815 rhs.release ();
1816 vr->operands = valueize_refs (vr->operands);
1817 vr->hashcode = vn_reference_compute_hash (vr);
1819 /* Adjust *ref from the new operands. */
1820 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1821 return (void *)-1;
1822 /* This can happen with bitfields. */
1823 if (ref->size != r.size)
1824 return (void *)-1;
1825 *ref = r;
1827 /* Do not update last seen VUSE after translating. */
1828 last_vuse_ptr = NULL;
1830 /* Keep looking for the adjusted *REF / VR pair. */
1831 return NULL;
1834 /* 6) For memcpy copies translate the reference through them if
1835 the copy kills ref. */
1836 else if (vn_walk_kind == VN_WALKREWRITE
1837 && is_gimple_reg_type (vr->type)
1838 /* ??? Handle BCOPY as well. */
1839 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
1840 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
1841 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
1842 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
1843 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
1844 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
1845 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
1846 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2)))
1848 tree lhs, rhs;
1849 ao_ref r;
1850 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
1851 vn_reference_op_s op;
1852 HOST_WIDE_INT at;
1855 /* Only handle non-variable, addressable refs. */
1856 if (ref->size != maxsize
1857 || offset % BITS_PER_UNIT != 0
1858 || ref->size % BITS_PER_UNIT != 0)
1859 return (void *)-1;
1861 /* Extract a pointer base and an offset for the destination. */
1862 lhs = gimple_call_arg (def_stmt, 0);
1863 lhs_offset = 0;
1864 if (TREE_CODE (lhs) == SSA_NAME)
1865 lhs = SSA_VAL (lhs);
1866 if (TREE_CODE (lhs) == ADDR_EXPR)
1868 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
1869 &lhs_offset);
1870 if (!tem)
1871 return (void *)-1;
1872 if (TREE_CODE (tem) == MEM_REF
1873 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
1875 lhs = TREE_OPERAND (tem, 0);
1876 lhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1878 else if (DECL_P (tem))
1879 lhs = build_fold_addr_expr (tem);
1880 else
1881 return (void *)-1;
1883 if (TREE_CODE (lhs) != SSA_NAME
1884 && TREE_CODE (lhs) != ADDR_EXPR)
1885 return (void *)-1;
1887 /* Extract a pointer base and an offset for the source. */
1888 rhs = gimple_call_arg (def_stmt, 1);
1889 rhs_offset = 0;
1890 if (TREE_CODE (rhs) == SSA_NAME)
1891 rhs = SSA_VAL (rhs);
1892 if (TREE_CODE (rhs) == ADDR_EXPR)
1894 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
1895 &rhs_offset);
1896 if (!tem)
1897 return (void *)-1;
1898 if (TREE_CODE (tem) == MEM_REF
1899 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
1901 rhs = TREE_OPERAND (tem, 0);
1902 rhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1904 else if (DECL_P (tem))
1905 rhs = build_fold_addr_expr (tem);
1906 else
1907 return (void *)-1;
1909 if (TREE_CODE (rhs) != SSA_NAME
1910 && TREE_CODE (rhs) != ADDR_EXPR)
1911 return (void *)-1;
1913 copy_size = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2));
1915 /* The bases of the destination and the references have to agree. */
1916 if ((TREE_CODE (base) != MEM_REF
1917 && !DECL_P (base))
1918 || (TREE_CODE (base) == MEM_REF
1919 && (TREE_OPERAND (base, 0) != lhs
1920 || !tree_fits_uhwi_p (TREE_OPERAND (base, 1))))
1921 || (DECL_P (base)
1922 && (TREE_CODE (lhs) != ADDR_EXPR
1923 || TREE_OPERAND (lhs, 0) != base)))
1924 return (void *)-1;
1926 /* And the access has to be contained within the memcpy destination. */
1927 at = offset / BITS_PER_UNIT;
1928 if (TREE_CODE (base) == MEM_REF)
1929 at += TREE_INT_CST_LOW (TREE_OPERAND (base, 1));
1930 if (lhs_offset > at
1931 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
1932 return (void *)-1;
1934 /* Make room for 2 operands in the new reference. */
1935 if (vr->operands.length () < 2)
1937 vec<vn_reference_op_s> old = vr->operands;
1938 vr->operands.safe_grow_cleared (2);
1939 if (old == shared_lookup_references
1940 && vr->operands != old)
1941 shared_lookup_references.create (0);
1943 else
1944 vr->operands.truncate (2);
1946 /* The looked-through reference is a simple MEM_REF. */
1947 memset (&op, 0, sizeof (op));
1948 op.type = vr->type;
1949 op.opcode = MEM_REF;
1950 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
1951 op.off = at - lhs_offset + rhs_offset;
1952 vr->operands[0] = op;
1953 op.type = TREE_TYPE (rhs);
1954 op.opcode = TREE_CODE (rhs);
1955 op.op0 = rhs;
1956 op.off = -1;
1957 vr->operands[1] = op;
1958 vr->hashcode = vn_reference_compute_hash (vr);
1960 /* Adjust *ref from the new operands. */
1961 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1962 return (void *)-1;
1963 /* This can happen with bitfields. */
1964 if (ref->size != r.size)
1965 return (void *)-1;
1966 *ref = r;
1968 /* Do not update last seen VUSE after translating. */
1969 last_vuse_ptr = NULL;
1971 /* Keep looking for the adjusted *REF / VR pair. */
1972 return NULL;
1975 /* Bail out and stop walking. */
1976 return (void *)-1;
1979 /* Lookup a reference operation by it's parts, in the current hash table.
1980 Returns the resulting value number if it exists in the hash table,
1981 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1982 vn_reference_t stored in the hashtable if something is found. */
1984 tree
1985 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
1986 vec<vn_reference_op_s> operands,
1987 vn_reference_t *vnresult, vn_lookup_kind kind)
1989 struct vn_reference_s vr1;
1990 vn_reference_t tmp;
1991 tree cst;
1993 if (!vnresult)
1994 vnresult = &tmp;
1995 *vnresult = NULL;
1997 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1998 shared_lookup_references.truncate (0);
1999 shared_lookup_references.safe_grow (operands.length ());
2000 memcpy (shared_lookup_references.address (),
2001 operands.address (),
2002 sizeof (vn_reference_op_s)
2003 * operands.length ());
2004 vr1.operands = operands = shared_lookup_references
2005 = valueize_refs (shared_lookup_references);
2006 vr1.type = type;
2007 vr1.set = set;
2008 vr1.hashcode = vn_reference_compute_hash (&vr1);
2009 if ((cst = fully_constant_vn_reference_p (&vr1)))
2010 return cst;
2012 vn_reference_lookup_1 (&vr1, vnresult);
2013 if (!*vnresult
2014 && kind != VN_NOWALK
2015 && vr1.vuse)
2017 ao_ref r;
2018 vn_walk_kind = kind;
2019 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2020 *vnresult =
2021 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2022 vn_reference_lookup_2,
2023 vn_reference_lookup_3, &vr1);
2024 if (vr1.operands != operands)
2025 vr1.operands.release ();
2028 if (*vnresult)
2029 return (*vnresult)->result;
2031 return NULL_TREE;
2034 /* Lookup OP in the current hash table, and return the resulting value
2035 number if it exists in the hash table. Return NULL_TREE if it does
2036 not exist in the hash table or if the result field of the structure
2037 was NULL.. VNRESULT will be filled in with the vn_reference_t
2038 stored in the hashtable if one exists. */
2040 tree
2041 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2042 vn_reference_t *vnresult)
2044 vec<vn_reference_op_s> operands;
2045 struct vn_reference_s vr1;
2046 tree cst;
2047 bool valuezied_anything;
2049 if (vnresult)
2050 *vnresult = NULL;
2052 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2053 vr1.operands = operands
2054 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2055 vr1.type = TREE_TYPE (op);
2056 vr1.set = get_alias_set (op);
2057 vr1.hashcode = vn_reference_compute_hash (&vr1);
2058 if ((cst = fully_constant_vn_reference_p (&vr1)))
2059 return cst;
2061 if (kind != VN_NOWALK
2062 && vr1.vuse)
2064 vn_reference_t wvnresult;
2065 ao_ref r;
2066 /* Make sure to use a valueized reference if we valueized anything.
2067 Otherwise preserve the full reference for advanced TBAA. */
2068 if (!valuezied_anything
2069 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2070 vr1.operands))
2071 ao_ref_init (&r, op);
2072 vn_walk_kind = kind;
2073 wvnresult =
2074 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2075 vn_reference_lookup_2,
2076 vn_reference_lookup_3, &vr1);
2077 if (vr1.operands != operands)
2078 vr1.operands.release ();
2079 if (wvnresult)
2081 if (vnresult)
2082 *vnresult = wvnresult;
2083 return wvnresult->result;
2086 return NULL_TREE;
2089 return vn_reference_lookup_1 (&vr1, vnresult);
2093 /* Insert OP into the current hash table with a value number of
2094 RESULT, and return the resulting reference structure we created. */
2096 vn_reference_t
2097 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2099 vn_reference_s **slot;
2100 vn_reference_t vr1;
2101 bool tem;
2103 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
2104 if (TREE_CODE (result) == SSA_NAME)
2105 vr1->value_id = VN_INFO (result)->value_id;
2106 else
2107 vr1->value_id = get_or_alloc_constant_value_id (result);
2108 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2109 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2110 vr1->type = TREE_TYPE (op);
2111 vr1->set = get_alias_set (op);
2112 vr1->hashcode = vn_reference_compute_hash (vr1);
2113 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2114 vr1->result_vdef = vdef;
2116 slot = current_info->references.find_slot_with_hash (vr1, vr1->hashcode,
2117 INSERT);
2119 /* Because we lookup stores using vuses, and value number failures
2120 using the vdefs (see visit_reference_op_store for how and why),
2121 it's possible that on failure we may try to insert an already
2122 inserted store. This is not wrong, there is no ssa name for a
2123 store that we could use as a differentiator anyway. Thus, unlike
2124 the other lookup functions, you cannot gcc_assert (!*slot)
2125 here. */
2127 /* But free the old slot in case of a collision. */
2128 if (*slot)
2129 free_reference (*slot);
2131 *slot = vr1;
2132 return vr1;
2135 /* Insert a reference by it's pieces into the current hash table with
2136 a value number of RESULT. Return the resulting reference
2137 structure we created. */
2139 vn_reference_t
2140 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2141 vec<vn_reference_op_s> operands,
2142 tree result, unsigned int value_id)
2145 vn_reference_s **slot;
2146 vn_reference_t vr1;
2148 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
2149 vr1->value_id = value_id;
2150 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2151 vr1->operands = valueize_refs (operands);
2152 vr1->type = type;
2153 vr1->set = set;
2154 vr1->hashcode = vn_reference_compute_hash (vr1);
2155 if (result && TREE_CODE (result) == SSA_NAME)
2156 result = SSA_VAL (result);
2157 vr1->result = result;
2159 slot = current_info->references.find_slot_with_hash (vr1, vr1->hashcode,
2160 INSERT);
2162 /* At this point we should have all the things inserted that we have
2163 seen before, and we should never try inserting something that
2164 already exists. */
2165 gcc_assert (!*slot);
2166 if (*slot)
2167 free_reference (*slot);
2169 *slot = vr1;
2170 return vr1;
2173 /* Compute and return the hash value for nary operation VBO1. */
2175 hashval_t
2176 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2178 hashval_t hash;
2179 unsigned i;
2181 for (i = 0; i < vno1->length; ++i)
2182 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2183 vno1->op[i] = SSA_VAL (vno1->op[i]);
2185 if (vno1->length == 2
2186 && commutative_tree_code (vno1->opcode)
2187 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2189 tree temp = vno1->op[0];
2190 vno1->op[0] = vno1->op[1];
2191 vno1->op[1] = temp;
2194 hash = iterative_hash_hashval_t (vno1->opcode, 0);
2195 for (i = 0; i < vno1->length; ++i)
2196 hash = iterative_hash_expr (vno1->op[i], hash);
2198 return hash;
2201 /* Compare nary operations VNO1 and VNO2 and return true if they are
2202 equivalent. */
2204 bool
2205 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2207 unsigned i;
2209 if (vno1->hashcode != vno2->hashcode)
2210 return false;
2212 if (vno1->length != vno2->length)
2213 return false;
2215 if (vno1->opcode != vno2->opcode
2216 || !types_compatible_p (vno1->type, vno2->type))
2217 return false;
2219 for (i = 0; i < vno1->length; ++i)
2220 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2221 return false;
2223 return true;
2226 /* Initialize VNO from the pieces provided. */
2228 static void
2229 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2230 enum tree_code code, tree type, tree *ops)
2232 vno->opcode = code;
2233 vno->length = length;
2234 vno->type = type;
2235 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2238 /* Initialize VNO from OP. */
2240 static void
2241 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2243 unsigned i;
2245 vno->opcode = TREE_CODE (op);
2246 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2247 vno->type = TREE_TYPE (op);
2248 for (i = 0; i < vno->length; ++i)
2249 vno->op[i] = TREE_OPERAND (op, i);
2252 /* Return the number of operands for a vn_nary ops structure from STMT. */
2254 static unsigned int
2255 vn_nary_length_from_stmt (gimple stmt)
2257 switch (gimple_assign_rhs_code (stmt))
2259 case REALPART_EXPR:
2260 case IMAGPART_EXPR:
2261 case VIEW_CONVERT_EXPR:
2262 return 1;
2264 case BIT_FIELD_REF:
2265 return 3;
2267 case CONSTRUCTOR:
2268 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2270 default:
2271 return gimple_num_ops (stmt) - 1;
2275 /* Initialize VNO from STMT. */
2277 static void
2278 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
2280 unsigned i;
2282 vno->opcode = gimple_assign_rhs_code (stmt);
2283 vno->type = gimple_expr_type (stmt);
2284 switch (vno->opcode)
2286 case REALPART_EXPR:
2287 case IMAGPART_EXPR:
2288 case VIEW_CONVERT_EXPR:
2289 vno->length = 1;
2290 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2291 break;
2293 case BIT_FIELD_REF:
2294 vno->length = 3;
2295 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2296 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2297 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2298 break;
2300 case CONSTRUCTOR:
2301 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2302 for (i = 0; i < vno->length; ++i)
2303 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2304 break;
2306 default:
2307 gcc_checking_assert (!gimple_assign_single_p (stmt));
2308 vno->length = gimple_num_ops (stmt) - 1;
2309 for (i = 0; i < vno->length; ++i)
2310 vno->op[i] = gimple_op (stmt, i + 1);
2314 /* Compute the hashcode for VNO and look for it in the hash table;
2315 return the resulting value number if it exists in the hash table.
2316 Return NULL_TREE if it does not exist in the hash table or if the
2317 result field of the operation is NULL. VNRESULT will contain the
2318 vn_nary_op_t from the hashtable if it exists. */
2320 static tree
2321 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2323 vn_nary_op_s **slot;
2325 if (vnresult)
2326 *vnresult = NULL;
2328 vno->hashcode = vn_nary_op_compute_hash (vno);
2329 slot = current_info->nary.find_slot_with_hash (vno, vno->hashcode, NO_INSERT);
2330 if (!slot && current_info == optimistic_info)
2331 slot = valid_info->nary.find_slot_with_hash (vno, vno->hashcode, NO_INSERT);
2332 if (!slot)
2333 return NULL_TREE;
2334 if (vnresult)
2335 *vnresult = *slot;
2336 return (*slot)->result;
2339 /* Lookup a n-ary operation by its pieces and return the resulting value
2340 number if it exists in the hash table. Return NULL_TREE if it does
2341 not exist in the hash table or if the result field of the operation
2342 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2343 if it exists. */
2345 tree
2346 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2347 tree type, tree *ops, vn_nary_op_t *vnresult)
2349 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2350 sizeof_vn_nary_op (length));
2351 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2352 return vn_nary_op_lookup_1 (vno1, vnresult);
2355 /* Lookup OP in the current hash table, and return the resulting value
2356 number if it exists in the hash table. Return NULL_TREE if it does
2357 not exist in the hash table or if the result field of the operation
2358 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2359 if it exists. */
2361 tree
2362 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2364 vn_nary_op_t vno1
2365 = XALLOCAVAR (struct vn_nary_op_s,
2366 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2367 init_vn_nary_op_from_op (vno1, op);
2368 return vn_nary_op_lookup_1 (vno1, vnresult);
2371 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2372 value number if it exists in the hash table. Return NULL_TREE if
2373 it does not exist in the hash table. VNRESULT will contain the
2374 vn_nary_op_t from the hashtable if it exists. */
2376 tree
2377 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
2379 vn_nary_op_t vno1
2380 = XALLOCAVAR (struct vn_nary_op_s,
2381 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2382 init_vn_nary_op_from_stmt (vno1, stmt);
2383 return vn_nary_op_lookup_1 (vno1, vnresult);
2386 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2388 static vn_nary_op_t
2389 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2391 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2394 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2395 obstack. */
2397 static vn_nary_op_t
2398 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2400 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2401 &current_info->nary_obstack);
2403 vno1->value_id = value_id;
2404 vno1->length = length;
2405 vno1->result = result;
2407 return vno1;
2410 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2411 VNO->HASHCODE first. */
2413 static vn_nary_op_t
2414 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type table,
2415 bool compute_hash)
2417 vn_nary_op_s **slot;
2419 if (compute_hash)
2420 vno->hashcode = vn_nary_op_compute_hash (vno);
2422 slot = table.find_slot_with_hash (vno, vno->hashcode, INSERT);
2423 gcc_assert (!*slot);
2425 *slot = vno;
2426 return vno;
2429 /* Insert a n-ary operation into the current hash table using it's
2430 pieces. Return the vn_nary_op_t structure we created and put in
2431 the hashtable. */
2433 vn_nary_op_t
2434 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2435 tree type, tree *ops,
2436 tree result, unsigned int value_id)
2438 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2439 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2440 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2443 /* Insert OP into the current hash table with a value number of
2444 RESULT. Return the vn_nary_op_t structure we created and put in
2445 the hashtable. */
2447 vn_nary_op_t
2448 vn_nary_op_insert (tree op, tree result)
2450 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2451 vn_nary_op_t vno1;
2453 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2454 init_vn_nary_op_from_op (vno1, op);
2455 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2458 /* Insert the rhs of STMT into the current hash table with a value number of
2459 RESULT. */
2461 vn_nary_op_t
2462 vn_nary_op_insert_stmt (gimple stmt, tree result)
2464 vn_nary_op_t vno1
2465 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2466 result, VN_INFO (result)->value_id);
2467 init_vn_nary_op_from_stmt (vno1, stmt);
2468 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2471 /* Compute a hashcode for PHI operation VP1 and return it. */
2473 static inline hashval_t
2474 vn_phi_compute_hash (vn_phi_t vp1)
2476 hashval_t result;
2477 int i;
2478 tree phi1op;
2479 tree type;
2481 result = vp1->block->index;
2483 /* If all PHI arguments are constants we need to distinguish
2484 the PHI node via its type. */
2485 type = vp1->type;
2486 result += vn_hash_type (type);
2488 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2490 if (phi1op == VN_TOP)
2491 continue;
2492 result = iterative_hash_expr (phi1op, result);
2495 return result;
2498 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2500 static int
2501 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
2503 if (vp1->hashcode != vp2->hashcode)
2504 return false;
2506 if (vp1->block == vp2->block)
2508 int i;
2509 tree phi1op;
2511 /* If the PHI nodes do not have compatible types
2512 they are not the same. */
2513 if (!types_compatible_p (vp1->type, vp2->type))
2514 return false;
2516 /* Any phi in the same block will have it's arguments in the
2517 same edge order, because of how we store phi nodes. */
2518 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2520 tree phi2op = vp2->phiargs[i];
2521 if (phi1op == VN_TOP || phi2op == VN_TOP)
2522 continue;
2523 if (!expressions_equal_p (phi1op, phi2op))
2524 return false;
2526 return true;
2528 return false;
2531 static vec<tree> shared_lookup_phiargs;
2533 /* Lookup PHI in the current hash table, and return the resulting
2534 value number if it exists in the hash table. Return NULL_TREE if
2535 it does not exist in the hash table. */
2537 static tree
2538 vn_phi_lookup (gimple phi)
2540 vn_phi_s **slot;
2541 struct vn_phi_s vp1;
2542 unsigned i;
2544 shared_lookup_phiargs.truncate (0);
2546 /* Canonicalize the SSA_NAME's to their value number. */
2547 for (i = 0; i < gimple_phi_num_args (phi); i++)
2549 tree def = PHI_ARG_DEF (phi, i);
2550 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2551 shared_lookup_phiargs.safe_push (def);
2553 vp1.type = TREE_TYPE (gimple_phi_result (phi));
2554 vp1.phiargs = shared_lookup_phiargs;
2555 vp1.block = gimple_bb (phi);
2556 vp1.hashcode = vn_phi_compute_hash (&vp1);
2557 slot = current_info->phis.find_slot_with_hash (&vp1, vp1.hashcode, NO_INSERT);
2558 if (!slot && current_info == optimistic_info)
2559 slot = valid_info->phis.find_slot_with_hash (&vp1, vp1.hashcode, NO_INSERT);
2560 if (!slot)
2561 return NULL_TREE;
2562 return (*slot)->result;
2565 /* Insert PHI into the current hash table with a value number of
2566 RESULT. */
2568 static vn_phi_t
2569 vn_phi_insert (gimple phi, tree result)
2571 vn_phi_s **slot;
2572 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
2573 unsigned i;
2574 vec<tree> args = vNULL;
2576 /* Canonicalize the SSA_NAME's to their value number. */
2577 for (i = 0; i < gimple_phi_num_args (phi); i++)
2579 tree def = PHI_ARG_DEF (phi, i);
2580 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2581 args.safe_push (def);
2583 vp1->value_id = VN_INFO (result)->value_id;
2584 vp1->type = TREE_TYPE (gimple_phi_result (phi));
2585 vp1->phiargs = args;
2586 vp1->block = gimple_bb (phi);
2587 vp1->result = result;
2588 vp1->hashcode = vn_phi_compute_hash (vp1);
2590 slot = current_info->phis.find_slot_with_hash (vp1, vp1->hashcode, INSERT);
2592 /* Because we iterate over phi operations more than once, it's
2593 possible the slot might already exist here, hence no assert.*/
2594 *slot = vp1;
2595 return vp1;
2599 /* Print set of components in strongly connected component SCC to OUT. */
2601 static void
2602 print_scc (FILE *out, vec<tree> scc)
2604 tree var;
2605 unsigned int i;
2607 fprintf (out, "SCC consists of:");
2608 FOR_EACH_VEC_ELT (scc, i, var)
2610 fprintf (out, " ");
2611 print_generic_expr (out, var, 0);
2613 fprintf (out, "\n");
2616 /* Set the value number of FROM to TO, return true if it has changed
2617 as a result. */
2619 static inline bool
2620 set_ssa_val_to (tree from, tree to)
2622 tree currval = SSA_VAL (from);
2623 HOST_WIDE_INT toff, coff;
2625 if (from != to)
2627 if (currval == from)
2629 if (dump_file && (dump_flags & TDF_DETAILS))
2631 fprintf (dump_file, "Not changing value number of ");
2632 print_generic_expr (dump_file, from, 0);
2633 fprintf (dump_file, " from VARYING to ");
2634 print_generic_expr (dump_file, to, 0);
2635 fprintf (dump_file, "\n");
2637 return false;
2639 else if (TREE_CODE (to) == SSA_NAME
2640 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2641 to = from;
2644 /* The only thing we allow as value numbers are VN_TOP, ssa_names
2645 and invariants. So assert that here. */
2646 gcc_assert (to != NULL_TREE
2647 && (to == VN_TOP
2648 || TREE_CODE (to) == SSA_NAME
2649 || is_gimple_min_invariant (to)));
2651 if (dump_file && (dump_flags & TDF_DETAILS))
2653 fprintf (dump_file, "Setting value number of ");
2654 print_generic_expr (dump_file, from, 0);
2655 fprintf (dump_file, " to ");
2656 print_generic_expr (dump_file, to, 0);
2659 if (currval != to
2660 && !operand_equal_p (currval, to, 0)
2661 /* ??? For addresses involving volatile objects or types operand_equal_p
2662 does not reliably detect ADDR_EXPRs as equal. We know we are only
2663 getting invariant gimple addresses here, so can use
2664 get_addr_base_and_unit_offset to do this comparison. */
2665 && !(TREE_CODE (currval) == ADDR_EXPR
2666 && TREE_CODE (to) == ADDR_EXPR
2667 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
2668 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
2669 && coff == toff))
2671 VN_INFO (from)->valnum = to;
2672 if (dump_file && (dump_flags & TDF_DETAILS))
2673 fprintf (dump_file, " (changed)\n");
2674 return true;
2676 if (dump_file && (dump_flags & TDF_DETAILS))
2677 fprintf (dump_file, "\n");
2678 return false;
2681 /* Mark as processed all the definitions in the defining stmt of USE, or
2682 the USE itself. */
2684 static void
2685 mark_use_processed (tree use)
2687 ssa_op_iter iter;
2688 def_operand_p defp;
2689 gimple stmt = SSA_NAME_DEF_STMT (use);
2691 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
2693 VN_INFO (use)->use_processed = true;
2694 return;
2697 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2699 tree def = DEF_FROM_PTR (defp);
2701 VN_INFO (def)->use_processed = true;
2705 /* Set all definitions in STMT to value number to themselves.
2706 Return true if a value number changed. */
2708 static bool
2709 defs_to_varying (gimple stmt)
2711 bool changed = false;
2712 ssa_op_iter iter;
2713 def_operand_p defp;
2715 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2717 tree def = DEF_FROM_PTR (defp);
2718 changed |= set_ssa_val_to (def, def);
2720 return changed;
2723 static bool expr_has_constants (tree expr);
2724 static tree valueize_expr (tree expr);
2726 /* Visit a copy between LHS and RHS, return true if the value number
2727 changed. */
2729 static bool
2730 visit_copy (tree lhs, tree rhs)
2732 /* The copy may have a more interesting constant filled expression
2733 (we don't, since we know our RHS is just an SSA name). */
2734 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2735 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2737 /* And finally valueize. */
2738 rhs = SSA_VAL (rhs);
2740 return set_ssa_val_to (lhs, rhs);
2743 /* Visit a nary operator RHS, value number it, and return true if the
2744 value number of LHS has changed as a result. */
2746 static bool
2747 visit_nary_op (tree lhs, gimple stmt)
2749 bool changed = false;
2750 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2752 if (result)
2753 changed = set_ssa_val_to (lhs, result);
2754 else
2756 changed = set_ssa_val_to (lhs, lhs);
2757 vn_nary_op_insert_stmt (stmt, lhs);
2760 return changed;
2763 /* Visit a call STMT storing into LHS. Return true if the value number
2764 of the LHS has changed as a result. */
2766 static bool
2767 visit_reference_op_call (tree lhs, gimple stmt)
2769 bool changed = false;
2770 struct vn_reference_s vr1;
2771 vn_reference_t vnresult = NULL;
2772 tree vuse = gimple_vuse (stmt);
2773 tree vdef = gimple_vdef (stmt);
2775 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
2776 if (lhs && TREE_CODE (lhs) != SSA_NAME)
2777 lhs = NULL_TREE;
2779 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2780 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
2781 vr1.type = gimple_expr_type (stmt);
2782 vr1.set = 0;
2783 vr1.hashcode = vn_reference_compute_hash (&vr1);
2784 vn_reference_lookup_1 (&vr1, &vnresult);
2786 if (vnresult)
2788 if (vnresult->result_vdef)
2789 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
2791 if (!vnresult->result && lhs)
2792 vnresult->result = lhs;
2794 if (vnresult->result && lhs)
2796 changed |= set_ssa_val_to (lhs, vnresult->result);
2798 if (VN_INFO (vnresult->result)->has_constants)
2799 VN_INFO (lhs)->has_constants = true;
2802 else
2804 vn_reference_s **slot;
2805 vn_reference_t vr2;
2806 if (vdef)
2807 changed |= set_ssa_val_to (vdef, vdef);
2808 if (lhs)
2809 changed |= set_ssa_val_to (lhs, lhs);
2810 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
2811 vr2->vuse = vr1.vuse;
2812 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
2813 vr2->type = vr1.type;
2814 vr2->set = vr1.set;
2815 vr2->hashcode = vr1.hashcode;
2816 vr2->result = lhs;
2817 vr2->result_vdef = vdef;
2818 slot = current_info->references.find_slot_with_hash (vr2, vr2->hashcode,
2819 INSERT);
2820 if (*slot)
2821 free_reference (*slot);
2822 *slot = vr2;
2825 return changed;
2828 /* Visit a load from a reference operator RHS, part of STMT, value number it,
2829 and return true if the value number of the LHS has changed as a result. */
2831 static bool
2832 visit_reference_op_load (tree lhs, tree op, gimple stmt)
2834 bool changed = false;
2835 tree last_vuse;
2836 tree result;
2838 last_vuse = gimple_vuse (stmt);
2839 last_vuse_ptr = &last_vuse;
2840 result = vn_reference_lookup (op, gimple_vuse (stmt),
2841 default_vn_walk_kind, NULL);
2842 last_vuse_ptr = NULL;
2844 /* If we have a VCE, try looking up its operand as it might be stored in
2845 a different type. */
2846 if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
2847 result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
2848 default_vn_walk_kind, NULL);
2850 /* We handle type-punning through unions by value-numbering based
2851 on offset and size of the access. Be prepared to handle a
2852 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
2853 if (result
2854 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
2856 /* We will be setting the value number of lhs to the value number
2857 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2858 So first simplify and lookup this expression to see if it
2859 is already available. */
2860 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
2861 if ((CONVERT_EXPR_P (val)
2862 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
2863 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
2865 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
2866 if ((CONVERT_EXPR_P (tem)
2867 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
2868 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
2869 TREE_TYPE (val), tem)))
2870 val = tem;
2872 result = val;
2873 if (!is_gimple_min_invariant (val)
2874 && TREE_CODE (val) != SSA_NAME)
2875 result = vn_nary_op_lookup (val, NULL);
2876 /* If the expression is not yet available, value-number lhs to
2877 a new SSA_NAME we create. */
2878 if (!result)
2880 result = make_temp_ssa_name (TREE_TYPE (lhs), gimple_build_nop (),
2881 "vntemp");
2882 /* Initialize value-number information properly. */
2883 VN_INFO_GET (result)->valnum = result;
2884 VN_INFO (result)->value_id = get_next_value_id ();
2885 VN_INFO (result)->expr = val;
2886 VN_INFO (result)->has_constants = expr_has_constants (val);
2887 VN_INFO (result)->needs_insertion = true;
2888 /* As all "inserted" statements are singleton SCCs, insert
2889 to the valid table. This is strictly needed to
2890 avoid re-generating new value SSA_NAMEs for the same
2891 expression during SCC iteration over and over (the
2892 optimistic table gets cleared after each iteration).
2893 We do not need to insert into the optimistic table, as
2894 lookups there will fall back to the valid table. */
2895 if (current_info == optimistic_info)
2897 current_info = valid_info;
2898 vn_nary_op_insert (val, result);
2899 current_info = optimistic_info;
2901 else
2902 vn_nary_op_insert (val, result);
2903 if (dump_file && (dump_flags & TDF_DETAILS))
2905 fprintf (dump_file, "Inserting name ");
2906 print_generic_expr (dump_file, result, 0);
2907 fprintf (dump_file, " for expression ");
2908 print_generic_expr (dump_file, val, 0);
2909 fprintf (dump_file, "\n");
2914 if (result)
2916 changed = set_ssa_val_to (lhs, result);
2917 if (TREE_CODE (result) == SSA_NAME
2918 && VN_INFO (result)->has_constants)
2920 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2921 VN_INFO (lhs)->has_constants = true;
2924 else
2926 changed = set_ssa_val_to (lhs, lhs);
2927 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
2930 return changed;
2934 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2935 and return true if the value number of the LHS has changed as a result. */
2937 static bool
2938 visit_reference_op_store (tree lhs, tree op, gimple stmt)
2940 bool changed = false;
2941 vn_reference_t vnresult = NULL;
2942 tree result, assign;
2943 bool resultsame = false;
2944 tree vuse = gimple_vuse (stmt);
2945 tree vdef = gimple_vdef (stmt);
2947 /* First we want to lookup using the *vuses* from the store and see
2948 if there the last store to this location with the same address
2949 had the same value.
2951 The vuses represent the memory state before the store. If the
2952 memory state, address, and value of the store is the same as the
2953 last store to this location, then this store will produce the
2954 same memory state as that store.
2956 In this case the vdef versions for this store are value numbered to those
2957 vuse versions, since they represent the same memory state after
2958 this store.
2960 Otherwise, the vdefs for the store are used when inserting into
2961 the table, since the store generates a new memory state. */
2963 result = vn_reference_lookup (lhs, vuse, VN_NOWALK, NULL);
2965 if (result)
2967 if (TREE_CODE (result) == SSA_NAME)
2968 result = SSA_VAL (result);
2969 if (TREE_CODE (op) == SSA_NAME)
2970 op = SSA_VAL (op);
2971 resultsame = expressions_equal_p (result, op);
2974 if (!result || !resultsame)
2976 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
2977 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult);
2978 if (vnresult)
2980 VN_INFO (vdef)->use_processed = true;
2981 return set_ssa_val_to (vdef, vnresult->result_vdef);
2985 if (!result || !resultsame)
2987 if (dump_file && (dump_flags & TDF_DETAILS))
2989 fprintf (dump_file, "No store match\n");
2990 fprintf (dump_file, "Value numbering store ");
2991 print_generic_expr (dump_file, lhs, 0);
2992 fprintf (dump_file, " to ");
2993 print_generic_expr (dump_file, op, 0);
2994 fprintf (dump_file, "\n");
2996 /* Have to set value numbers before insert, since insert is
2997 going to valueize the references in-place. */
2998 if (vdef)
3000 changed |= set_ssa_val_to (vdef, vdef);
3003 /* Do not insert structure copies into the tables. */
3004 if (is_gimple_min_invariant (op)
3005 || is_gimple_reg (op))
3006 vn_reference_insert (lhs, op, vdef, NULL);
3008 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3009 vn_reference_insert (assign, lhs, vuse, vdef);
3011 else
3013 /* We had a match, so value number the vdef to have the value
3014 number of the vuse it came from. */
3016 if (dump_file && (dump_flags & TDF_DETAILS))
3017 fprintf (dump_file, "Store matched earlier value,"
3018 "value numbering store vdefs to matching vuses.\n");
3020 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3023 return changed;
3026 /* Visit and value number PHI, return true if the value number
3027 changed. */
3029 static bool
3030 visit_phi (gimple phi)
3032 bool changed = false;
3033 tree result;
3034 tree sameval = VN_TOP;
3035 bool allsame = true;
3036 unsigned i;
3038 /* TODO: We could check for this in init_sccvn, and replace this
3039 with a gcc_assert. */
3040 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3041 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3043 /* See if all non-TOP arguments have the same value. TOP is
3044 equivalent to everything, so we can ignore it. */
3045 for (i = 0; i < gimple_phi_num_args (phi); i++)
3047 tree def = PHI_ARG_DEF (phi, i);
3049 if (TREE_CODE (def) == SSA_NAME)
3050 def = SSA_VAL (def);
3051 if (def == VN_TOP)
3052 continue;
3053 if (sameval == VN_TOP)
3055 sameval = def;
3057 else
3059 if (!expressions_equal_p (def, sameval))
3061 allsame = false;
3062 break;
3067 /* If all value numbered to the same value, the phi node has that
3068 value. */
3069 if (allsame)
3071 if (is_gimple_min_invariant (sameval))
3073 VN_INFO (PHI_RESULT (phi))->has_constants = true;
3074 VN_INFO (PHI_RESULT (phi))->expr = sameval;
3076 else
3078 VN_INFO (PHI_RESULT (phi))->has_constants = false;
3079 VN_INFO (PHI_RESULT (phi))->expr = sameval;
3082 if (TREE_CODE (sameval) == SSA_NAME)
3083 return visit_copy (PHI_RESULT (phi), sameval);
3085 return set_ssa_val_to (PHI_RESULT (phi), sameval);
3088 /* Otherwise, see if it is equivalent to a phi node in this block. */
3089 result = vn_phi_lookup (phi);
3090 if (result)
3092 if (TREE_CODE (result) == SSA_NAME)
3093 changed = visit_copy (PHI_RESULT (phi), result);
3094 else
3095 changed = set_ssa_val_to (PHI_RESULT (phi), result);
3097 else
3099 vn_phi_insert (phi, PHI_RESULT (phi));
3100 VN_INFO (PHI_RESULT (phi))->has_constants = false;
3101 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
3102 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3105 return changed;
3108 /* Return true if EXPR contains constants. */
3110 static bool
3111 expr_has_constants (tree expr)
3113 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
3115 case tcc_unary:
3116 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
3118 case tcc_binary:
3119 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
3120 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
3121 /* Constants inside reference ops are rarely interesting, but
3122 it can take a lot of looking to find them. */
3123 case tcc_reference:
3124 case tcc_declaration:
3125 return false;
3126 default:
3127 return is_gimple_min_invariant (expr);
3129 return false;
3132 /* Return true if STMT contains constants. */
3134 static bool
3135 stmt_has_constants (gimple stmt)
3137 tree tem;
3139 if (gimple_code (stmt) != GIMPLE_ASSIGN)
3140 return false;
3142 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
3144 case GIMPLE_TERNARY_RHS:
3145 tem = gimple_assign_rhs3 (stmt);
3146 if (TREE_CODE (tem) == SSA_NAME)
3147 tem = SSA_VAL (tem);
3148 if (is_gimple_min_invariant (tem))
3149 return true;
3150 /* Fallthru. */
3152 case GIMPLE_BINARY_RHS:
3153 tem = gimple_assign_rhs2 (stmt);
3154 if (TREE_CODE (tem) == SSA_NAME)
3155 tem = SSA_VAL (tem);
3156 if (is_gimple_min_invariant (tem))
3157 return true;
3158 /* Fallthru. */
3160 case GIMPLE_SINGLE_RHS:
3161 /* Constants inside reference ops are rarely interesting, but
3162 it can take a lot of looking to find them. */
3163 case GIMPLE_UNARY_RHS:
3164 tem = gimple_assign_rhs1 (stmt);
3165 if (TREE_CODE (tem) == SSA_NAME)
3166 tem = SSA_VAL (tem);
3167 return is_gimple_min_invariant (tem);
3169 default:
3170 gcc_unreachable ();
3172 return false;
3175 /* Replace SSA_NAMES in expr with their value numbers, and return the
3176 result.
3177 This is performed in place. */
3179 static tree
3180 valueize_expr (tree expr)
3182 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
3184 case tcc_binary:
3185 TREE_OPERAND (expr, 1) = vn_valueize (TREE_OPERAND (expr, 1));
3186 /* Fallthru. */
3187 case tcc_unary:
3188 TREE_OPERAND (expr, 0) = vn_valueize (TREE_OPERAND (expr, 0));
3189 break;
3190 default:;
3192 return expr;
3195 /* Simplify the binary expression RHS, and return the result if
3196 simplified. */
3198 static tree
3199 simplify_binary_expression (gimple stmt)
3201 tree result = NULL_TREE;
3202 tree op0 = gimple_assign_rhs1 (stmt);
3203 tree op1 = gimple_assign_rhs2 (stmt);
3204 enum tree_code code = gimple_assign_rhs_code (stmt);
3206 /* This will not catch every single case we could combine, but will
3207 catch those with constants. The goal here is to simultaneously
3208 combine constants between expressions, but avoid infinite
3209 expansion of expressions during simplification. */
3210 if (TREE_CODE (op0) == SSA_NAME)
3212 if (VN_INFO (op0)->has_constants
3213 || TREE_CODE_CLASS (code) == tcc_comparison
3214 || code == COMPLEX_EXPR)
3215 op0 = valueize_expr (vn_get_expr_for (op0));
3216 else
3217 op0 = vn_valueize (op0);
3220 if (TREE_CODE (op1) == SSA_NAME)
3222 if (VN_INFO (op1)->has_constants
3223 || code == COMPLEX_EXPR)
3224 op1 = valueize_expr (vn_get_expr_for (op1));
3225 else
3226 op1 = vn_valueize (op1);
3229 /* Pointer plus constant can be represented as invariant address.
3230 Do so to allow further propatation, see also tree forwprop. */
3231 if (code == POINTER_PLUS_EXPR
3232 && tree_fits_uhwi_p (op1)
3233 && TREE_CODE (op0) == ADDR_EXPR
3234 && is_gimple_min_invariant (op0))
3235 return build_invariant_address (TREE_TYPE (op0),
3236 TREE_OPERAND (op0, 0),
3237 TREE_INT_CST_LOW (op1));
3239 /* Avoid folding if nothing changed. */
3240 if (op0 == gimple_assign_rhs1 (stmt)
3241 && op1 == gimple_assign_rhs2 (stmt))
3242 return NULL_TREE;
3244 fold_defer_overflow_warnings ();
3246 result = fold_binary (code, gimple_expr_type (stmt), op0, op1);
3247 if (result)
3248 STRIP_USELESS_TYPE_CONVERSION (result);
3250 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
3251 stmt, 0);
3253 /* Make sure result is not a complex expression consisting
3254 of operators of operators (IE (a + b) + (a + c))
3255 Otherwise, we will end up with unbounded expressions if
3256 fold does anything at all. */
3257 if (result && valid_gimple_rhs_p (result))
3258 return result;
3260 return NULL_TREE;
3263 /* Simplify the unary expression RHS, and return the result if
3264 simplified. */
3266 static tree
3267 simplify_unary_expression (gimple stmt)
3269 tree result = NULL_TREE;
3270 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
3271 enum tree_code code = gimple_assign_rhs_code (stmt);
3273 /* We handle some tcc_reference codes here that are all
3274 GIMPLE_ASSIGN_SINGLE codes. */
3275 if (code == REALPART_EXPR
3276 || code == IMAGPART_EXPR
3277 || code == VIEW_CONVERT_EXPR
3278 || code == BIT_FIELD_REF)
3279 op0 = TREE_OPERAND (op0, 0);
3281 if (TREE_CODE (op0) != SSA_NAME)
3282 return NULL_TREE;
3284 orig_op0 = op0;
3285 if (VN_INFO (op0)->has_constants)
3286 op0 = valueize_expr (vn_get_expr_for (op0));
3287 else if (CONVERT_EXPR_CODE_P (code)
3288 || code == REALPART_EXPR
3289 || code == IMAGPART_EXPR
3290 || code == VIEW_CONVERT_EXPR
3291 || code == BIT_FIELD_REF)
3293 /* We want to do tree-combining on conversion-like expressions.
3294 Make sure we feed only SSA_NAMEs or constants to fold though. */
3295 tree tem = valueize_expr (vn_get_expr_for (op0));
3296 if (UNARY_CLASS_P (tem)
3297 || BINARY_CLASS_P (tem)
3298 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
3299 || TREE_CODE (tem) == SSA_NAME
3300 || TREE_CODE (tem) == CONSTRUCTOR
3301 || is_gimple_min_invariant (tem))
3302 op0 = tem;
3305 /* Avoid folding if nothing changed, but remember the expression. */
3306 if (op0 == orig_op0)
3307 return NULL_TREE;
3309 if (code == BIT_FIELD_REF)
3311 tree rhs = gimple_assign_rhs1 (stmt);
3312 result = fold_ternary (BIT_FIELD_REF, TREE_TYPE (rhs),
3313 op0, TREE_OPERAND (rhs, 1), TREE_OPERAND (rhs, 2));
3315 else
3316 result = fold_unary_ignore_overflow (code, gimple_expr_type (stmt), op0);
3317 if (result)
3319 STRIP_USELESS_TYPE_CONVERSION (result);
3320 if (valid_gimple_rhs_p (result))
3321 return result;
3324 return NULL_TREE;
3327 /* Try to simplify RHS using equivalences and constant folding. */
3329 static tree
3330 try_to_simplify (gimple stmt)
3332 enum tree_code code = gimple_assign_rhs_code (stmt);
3333 tree tem;
3335 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3336 in this case, there is no point in doing extra work. */
3337 if (code == SSA_NAME)
3338 return NULL_TREE;
3340 /* First try constant folding based on our current lattice. */
3341 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize);
3342 if (tem
3343 && (TREE_CODE (tem) == SSA_NAME
3344 || is_gimple_min_invariant (tem)))
3345 return tem;
3347 /* If that didn't work try combining multiple statements. */
3348 switch (TREE_CODE_CLASS (code))
3350 case tcc_reference:
3351 /* Fallthrough for some unary codes that can operate on registers. */
3352 if (!(code == REALPART_EXPR
3353 || code == IMAGPART_EXPR
3354 || code == VIEW_CONVERT_EXPR
3355 || code == BIT_FIELD_REF))
3356 break;
3357 /* We could do a little more with unary ops, if they expand
3358 into binary ops, but it's debatable whether it is worth it. */
3359 case tcc_unary:
3360 return simplify_unary_expression (stmt);
3362 case tcc_comparison:
3363 case tcc_binary:
3364 return simplify_binary_expression (stmt);
3366 default:
3367 break;
3370 return NULL_TREE;
3373 /* Visit and value number USE, return true if the value number
3374 changed. */
3376 static bool
3377 visit_use (tree use)
3379 bool changed = false;
3380 gimple stmt = SSA_NAME_DEF_STMT (use);
3382 mark_use_processed (use);
3384 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3385 if (dump_file && (dump_flags & TDF_DETAILS)
3386 && !SSA_NAME_IS_DEFAULT_DEF (use))
3388 fprintf (dump_file, "Value numbering ");
3389 print_generic_expr (dump_file, use, 0);
3390 fprintf (dump_file, " stmt = ");
3391 print_gimple_stmt (dump_file, stmt, 0, 0);
3394 /* Handle uninitialized uses. */
3395 if (SSA_NAME_IS_DEFAULT_DEF (use))
3396 changed = set_ssa_val_to (use, use);
3397 else
3399 if (gimple_code (stmt) == GIMPLE_PHI)
3400 changed = visit_phi (stmt);
3401 else if (gimple_has_volatile_ops (stmt))
3402 changed = defs_to_varying (stmt);
3403 else if (is_gimple_assign (stmt))
3405 enum tree_code code = gimple_assign_rhs_code (stmt);
3406 tree lhs = gimple_assign_lhs (stmt);
3407 tree rhs1 = gimple_assign_rhs1 (stmt);
3408 tree simplified;
3410 /* Shortcut for copies. Simplifying copies is pointless,
3411 since we copy the expression and value they represent. */
3412 if (code == SSA_NAME
3413 && TREE_CODE (lhs) == SSA_NAME)
3415 changed = visit_copy (lhs, rhs1);
3416 goto done;
3418 simplified = try_to_simplify (stmt);
3419 if (simplified)
3421 if (dump_file && (dump_flags & TDF_DETAILS))
3423 fprintf (dump_file, "RHS ");
3424 print_gimple_expr (dump_file, stmt, 0, 0);
3425 fprintf (dump_file, " simplified to ");
3426 print_generic_expr (dump_file, simplified, 0);
3427 if (TREE_CODE (lhs) == SSA_NAME)
3428 fprintf (dump_file, " has constants %d\n",
3429 expr_has_constants (simplified));
3430 else
3431 fprintf (dump_file, "\n");
3434 /* Setting value numbers to constants will occasionally
3435 screw up phi congruence because constants are not
3436 uniquely associated with a single ssa name that can be
3437 looked up. */
3438 if (simplified
3439 && is_gimple_min_invariant (simplified)
3440 && TREE_CODE (lhs) == SSA_NAME)
3442 VN_INFO (lhs)->expr = simplified;
3443 VN_INFO (lhs)->has_constants = true;
3444 changed = set_ssa_val_to (lhs, simplified);
3445 goto done;
3447 else if (simplified
3448 && TREE_CODE (simplified) == SSA_NAME
3449 && TREE_CODE (lhs) == SSA_NAME)
3451 changed = visit_copy (lhs, simplified);
3452 goto done;
3454 else if (simplified)
3456 if (TREE_CODE (lhs) == SSA_NAME)
3458 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
3459 /* We have to unshare the expression or else
3460 valuizing may change the IL stream. */
3461 VN_INFO (lhs)->expr = unshare_expr (simplified);
3464 else if (stmt_has_constants (stmt)
3465 && TREE_CODE (lhs) == SSA_NAME)
3466 VN_INFO (lhs)->has_constants = true;
3467 else if (TREE_CODE (lhs) == SSA_NAME)
3469 /* We reset expr and constantness here because we may
3470 have been value numbering optimistically, and
3471 iterating. They may become non-constant in this case,
3472 even if they were optimistically constant. */
3474 VN_INFO (lhs)->has_constants = false;
3475 VN_INFO (lhs)->expr = NULL_TREE;
3478 if ((TREE_CODE (lhs) == SSA_NAME
3479 /* We can substitute SSA_NAMEs that are live over
3480 abnormal edges with their constant value. */
3481 && !(gimple_assign_copy_p (stmt)
3482 && is_gimple_min_invariant (rhs1))
3483 && !(simplified
3484 && is_gimple_min_invariant (simplified))
3485 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3486 /* Stores or copies from SSA_NAMEs that are live over
3487 abnormal edges are a problem. */
3488 || (code == SSA_NAME
3489 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3490 changed = defs_to_varying (stmt);
3491 else if (REFERENCE_CLASS_P (lhs)
3492 || DECL_P (lhs))
3493 changed = visit_reference_op_store (lhs, rhs1, stmt);
3494 else if (TREE_CODE (lhs) == SSA_NAME)
3496 if ((gimple_assign_copy_p (stmt)
3497 && is_gimple_min_invariant (rhs1))
3498 || (simplified
3499 && is_gimple_min_invariant (simplified)))
3501 VN_INFO (lhs)->has_constants = true;
3502 if (simplified)
3503 changed = set_ssa_val_to (lhs, simplified);
3504 else
3505 changed = set_ssa_val_to (lhs, rhs1);
3507 else
3509 /* First try to lookup the simplified expression. */
3510 if (simplified)
3512 enum gimple_rhs_class rhs_class;
3515 rhs_class = get_gimple_rhs_class (TREE_CODE (simplified));
3516 if ((rhs_class == GIMPLE_UNARY_RHS
3517 || rhs_class == GIMPLE_BINARY_RHS
3518 || rhs_class == GIMPLE_TERNARY_RHS)
3519 && valid_gimple_rhs_p (simplified))
3521 tree result = vn_nary_op_lookup (simplified, NULL);
3522 if (result)
3524 changed = set_ssa_val_to (lhs, result);
3525 goto done;
3530 /* Otherwise visit the original statement. */
3531 switch (vn_get_stmt_kind (stmt))
3533 case VN_NARY:
3534 changed = visit_nary_op (lhs, stmt);
3535 break;
3536 case VN_REFERENCE:
3537 changed = visit_reference_op_load (lhs, rhs1, stmt);
3538 break;
3539 default:
3540 changed = defs_to_varying (stmt);
3541 break;
3545 else
3546 changed = defs_to_varying (stmt);
3548 else if (is_gimple_call (stmt))
3550 tree lhs = gimple_call_lhs (stmt);
3552 /* ??? We could try to simplify calls. */
3554 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3556 if (stmt_has_constants (stmt))
3557 VN_INFO (lhs)->has_constants = true;
3558 else
3560 /* We reset expr and constantness here because we may
3561 have been value numbering optimistically, and
3562 iterating. They may become non-constant in this case,
3563 even if they were optimistically constant. */
3564 VN_INFO (lhs)->has_constants = false;
3565 VN_INFO (lhs)->expr = NULL_TREE;
3568 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3570 changed = defs_to_varying (stmt);
3571 goto done;
3575 if (!gimple_call_internal_p (stmt)
3576 && (/* Calls to the same function with the same vuse
3577 and the same operands do not necessarily return the same
3578 value, unless they're pure or const. */
3579 gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST)
3580 /* If calls have a vdef, subsequent calls won't have
3581 the same incoming vuse. So, if 2 calls with vdef have the
3582 same vuse, we know they're not subsequent.
3583 We can value number 2 calls to the same function with the
3584 same vuse and the same operands which are not subsequent
3585 the same, because there is no code in the program that can
3586 compare the 2 values... */
3587 || (gimple_vdef (stmt)
3588 /* ... unless the call returns a pointer which does
3589 not alias with anything else. In which case the
3590 information that the values are distinct are encoded
3591 in the IL. */
3592 && !(gimple_call_return_flags (stmt) & ERF_NOALIAS))))
3593 changed = visit_reference_op_call (lhs, stmt);
3594 else
3595 changed = defs_to_varying (stmt);
3597 else
3598 changed = defs_to_varying (stmt);
3600 done:
3601 return changed;
3604 /* Compare two operands by reverse postorder index */
3606 static int
3607 compare_ops (const void *pa, const void *pb)
3609 const tree opa = *((const tree *)pa);
3610 const tree opb = *((const tree *)pb);
3611 gimple opstmta = SSA_NAME_DEF_STMT (opa);
3612 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
3613 basic_block bba;
3614 basic_block bbb;
3616 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3617 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3618 else if (gimple_nop_p (opstmta))
3619 return -1;
3620 else if (gimple_nop_p (opstmtb))
3621 return 1;
3623 bba = gimple_bb (opstmta);
3624 bbb = gimple_bb (opstmtb);
3626 if (!bba && !bbb)
3627 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3628 else if (!bba)
3629 return -1;
3630 else if (!bbb)
3631 return 1;
3633 if (bba == bbb)
3635 if (gimple_code (opstmta) == GIMPLE_PHI
3636 && gimple_code (opstmtb) == GIMPLE_PHI)
3637 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3638 else if (gimple_code (opstmta) == GIMPLE_PHI)
3639 return -1;
3640 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3641 return 1;
3642 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3643 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3644 else
3645 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3647 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3650 /* Sort an array containing members of a strongly connected component
3651 SCC so that the members are ordered by RPO number.
3652 This means that when the sort is complete, iterating through the
3653 array will give you the members in RPO order. */
3655 static void
3656 sort_scc (vec<tree> scc)
3658 scc.qsort (compare_ops);
3661 /* Insert the no longer used nary ONARY to the hash INFO. */
3663 static void
3664 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3666 size_t size = sizeof_vn_nary_op (onary->length);
3667 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3668 &info->nary_obstack);
3669 memcpy (nary, onary, size);
3670 vn_nary_op_insert_into (nary, info->nary, false);
3673 /* Insert the no longer used phi OPHI to the hash INFO. */
3675 static void
3676 copy_phi (vn_phi_t ophi, vn_tables_t info)
3678 vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
3679 vn_phi_s **slot;
3680 memcpy (phi, ophi, sizeof (*phi));
3681 ophi->phiargs.create (0);
3682 slot = info->phis.find_slot_with_hash (phi, phi->hashcode, INSERT);
3683 gcc_assert (!*slot);
3684 *slot = phi;
3687 /* Insert the no longer used reference OREF to the hash INFO. */
3689 static void
3690 copy_reference (vn_reference_t oref, vn_tables_t info)
3692 vn_reference_t ref;
3693 vn_reference_s **slot;
3694 ref = (vn_reference_t) pool_alloc (info->references_pool);
3695 memcpy (ref, oref, sizeof (*ref));
3696 oref->operands.create (0);
3697 slot = info->references.find_slot_with_hash (ref, ref->hashcode, INSERT);
3698 if (*slot)
3699 free_reference (*slot);
3700 *slot = ref;
3703 /* Process a strongly connected component in the SSA graph. */
3705 static void
3706 process_scc (vec<tree> scc)
3708 tree var;
3709 unsigned int i;
3710 unsigned int iterations = 0;
3711 bool changed = true;
3712 vn_nary_op_iterator_type hin;
3713 vn_phi_iterator_type hip;
3714 vn_reference_iterator_type hir;
3715 vn_nary_op_t nary;
3716 vn_phi_t phi;
3717 vn_reference_t ref;
3719 /* If the SCC has a single member, just visit it. */
3720 if (scc.length () == 1)
3722 tree use = scc[0];
3723 if (VN_INFO (use)->use_processed)
3724 return;
3725 /* We need to make sure it doesn't form a cycle itself, which can
3726 happen for self-referential PHI nodes. In that case we would
3727 end up inserting an expression with VN_TOP operands into the
3728 valid table which makes us derive bogus equivalences later.
3729 The cheapest way to check this is to assume it for all PHI nodes. */
3730 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3731 /* Fallthru to iteration. */ ;
3732 else
3734 visit_use (use);
3735 return;
3739 /* Iterate over the SCC with the optimistic table until it stops
3740 changing. */
3741 current_info = optimistic_info;
3742 while (changed)
3744 changed = false;
3745 iterations++;
3746 if (dump_file && (dump_flags & TDF_DETAILS))
3747 fprintf (dump_file, "Starting iteration %d\n", iterations);
3748 /* As we are value-numbering optimistically we have to
3749 clear the expression tables and the simplified expressions
3750 in each iteration until we converge. */
3751 optimistic_info->nary.empty ();
3752 optimistic_info->phis.empty ();
3753 optimistic_info->references.empty ();
3754 obstack_free (&optimistic_info->nary_obstack, NULL);
3755 gcc_obstack_init (&optimistic_info->nary_obstack);
3756 empty_alloc_pool (optimistic_info->phis_pool);
3757 empty_alloc_pool (optimistic_info->references_pool);
3758 FOR_EACH_VEC_ELT (scc, i, var)
3759 VN_INFO (var)->expr = NULL_TREE;
3760 FOR_EACH_VEC_ELT (scc, i, var)
3761 changed |= visit_use (var);
3764 statistics_histogram_event (cfun, "SCC iterations", iterations);
3766 /* Finally, copy the contents of the no longer used optimistic
3767 table to the valid table. */
3768 FOR_EACH_HASH_TABLE_ELEMENT (optimistic_info->nary, nary, vn_nary_op_t, hin)
3769 copy_nary (nary, valid_info);
3770 FOR_EACH_HASH_TABLE_ELEMENT (optimistic_info->phis, phi, vn_phi_t, hip)
3771 copy_phi (phi, valid_info);
3772 FOR_EACH_HASH_TABLE_ELEMENT (optimistic_info->references,
3773 ref, vn_reference_t, hir)
3774 copy_reference (ref, valid_info);
3776 current_info = valid_info;
3780 /* Pop the components of the found SCC for NAME off the SCC stack
3781 and process them. Returns true if all went well, false if
3782 we run into resource limits. */
3784 static bool
3785 extract_and_process_scc_for_name (tree name)
3787 vec<tree> scc = vNULL;
3788 tree x;
3790 /* Found an SCC, pop the components off the SCC stack and
3791 process them. */
3794 x = sccstack.pop ();
3796 VN_INFO (x)->on_sccstack = false;
3797 scc.safe_push (x);
3798 } while (x != name);
3800 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
3801 if (scc.length ()
3802 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
3804 if (dump_file)
3805 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
3806 "SCC size %u exceeding %u\n", scc.length (),
3807 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
3809 scc.release ();
3810 return false;
3813 if (scc.length () > 1)
3814 sort_scc (scc);
3816 if (dump_file && (dump_flags & TDF_DETAILS))
3817 print_scc (dump_file, scc);
3819 process_scc (scc);
3821 scc.release ();
3823 return true;
3826 /* Depth first search on NAME to discover and process SCC's in the SSA
3827 graph.
3828 Execution of this algorithm relies on the fact that the SCC's are
3829 popped off the stack in topological order.
3830 Returns true if successful, false if we stopped processing SCC's due
3831 to resource constraints. */
3833 static bool
3834 DFS (tree name)
3836 vec<ssa_op_iter> itervec = vNULL;
3837 vec<tree> namevec = vNULL;
3838 use_operand_p usep = NULL;
3839 gimple defstmt;
3840 tree use;
3841 ssa_op_iter iter;
3843 start_over:
3844 /* SCC info */
3845 VN_INFO (name)->dfsnum = next_dfs_num++;
3846 VN_INFO (name)->visited = true;
3847 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
3849 sccstack.safe_push (name);
3850 VN_INFO (name)->on_sccstack = true;
3851 defstmt = SSA_NAME_DEF_STMT (name);
3853 /* Recursively DFS on our operands, looking for SCC's. */
3854 if (!gimple_nop_p (defstmt))
3856 /* Push a new iterator. */
3857 if (gimple_code (defstmt) == GIMPLE_PHI)
3858 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
3859 else
3860 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
3862 else
3863 clear_and_done_ssa_iter (&iter);
3865 while (1)
3867 /* If we are done processing uses of a name, go up the stack
3868 of iterators and process SCCs as we found them. */
3869 if (op_iter_done (&iter))
3871 /* See if we found an SCC. */
3872 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
3873 if (!extract_and_process_scc_for_name (name))
3875 namevec.release ();
3876 itervec.release ();
3877 return false;
3880 /* Check if we are done. */
3881 if (namevec.is_empty ())
3883 namevec.release ();
3884 itervec.release ();
3885 return true;
3888 /* Restore the last use walker and continue walking there. */
3889 use = name;
3890 name = namevec.pop ();
3891 memcpy (&iter, &itervec.last (),
3892 sizeof (ssa_op_iter));
3893 itervec.pop ();
3894 goto continue_walking;
3897 use = USE_FROM_PTR (usep);
3899 /* Since we handle phi nodes, we will sometimes get
3900 invariants in the use expression. */
3901 if (TREE_CODE (use) == SSA_NAME)
3903 if (! (VN_INFO (use)->visited))
3905 /* Recurse by pushing the current use walking state on
3906 the stack and starting over. */
3907 itervec.safe_push (iter);
3908 namevec.safe_push (name);
3909 name = use;
3910 goto start_over;
3912 continue_walking:
3913 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3914 VN_INFO (use)->low);
3916 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3917 && VN_INFO (use)->on_sccstack)
3919 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3920 VN_INFO (name)->low);
3924 usep = op_iter_next_use (&iter);
3928 /* Allocate a value number table. */
3930 static void
3931 allocate_vn_table (vn_tables_t table)
3933 table->phis.create (23);
3934 table->nary.create (23);
3935 table->references.create (23);
3937 gcc_obstack_init (&table->nary_obstack);
3938 table->phis_pool = create_alloc_pool ("VN phis",
3939 sizeof (struct vn_phi_s),
3940 30);
3941 table->references_pool = create_alloc_pool ("VN references",
3942 sizeof (struct vn_reference_s),
3943 30);
3946 /* Free a value number table. */
3948 static void
3949 free_vn_table (vn_tables_t table)
3951 table->phis.dispose ();
3952 table->nary.dispose ();
3953 table->references.dispose ();
3954 obstack_free (&table->nary_obstack, NULL);
3955 free_alloc_pool (table->phis_pool);
3956 free_alloc_pool (table->references_pool);
3959 static void
3960 init_scc_vn (void)
3962 size_t i;
3963 int j;
3964 int *rpo_numbers_temp;
3966 calculate_dominance_info (CDI_DOMINATORS);
3967 sccstack.create (0);
3968 constant_to_value_id.create (23);
3970 constant_value_ids = BITMAP_ALLOC (NULL);
3972 next_dfs_num = 1;
3973 next_value_id = 1;
3975 vn_ssa_aux_table.create (num_ssa_names + 1);
3976 /* VEC_alloc doesn't actually grow it to the right size, it just
3977 preallocates the space to do so. */
3978 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
3979 gcc_obstack_init (&vn_ssa_aux_obstack);
3981 shared_lookup_phiargs.create (0);
3982 shared_lookup_references.create (0);
3983 rpo_numbers = XNEWVEC (int, last_basic_block);
3984 rpo_numbers_temp =
3985 XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
3986 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
3988 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3989 the i'th block in RPO order is bb. We want to map bb's to RPO
3990 numbers, so we need to rearrange this array. */
3991 for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
3992 rpo_numbers[rpo_numbers_temp[j]] = j;
3994 XDELETE (rpo_numbers_temp);
3996 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
3998 /* Create the VN_INFO structures, and initialize value numbers to
3999 TOP. */
4000 for (i = 0; i < num_ssa_names; i++)
4002 tree name = ssa_name (i);
4003 if (name)
4005 VN_INFO_GET (name)->valnum = VN_TOP;
4006 VN_INFO (name)->expr = NULL_TREE;
4007 VN_INFO (name)->value_id = 0;
4011 renumber_gimple_stmt_uids ();
4013 /* Create the valid and optimistic value numbering tables. */
4014 valid_info = XCNEW (struct vn_tables_s);
4015 allocate_vn_table (valid_info);
4016 optimistic_info = XCNEW (struct vn_tables_s);
4017 allocate_vn_table (optimistic_info);
4020 void
4021 free_scc_vn (void)
4023 size_t i;
4025 constant_to_value_id.dispose ();
4026 BITMAP_FREE (constant_value_ids);
4027 shared_lookup_phiargs.release ();
4028 shared_lookup_references.release ();
4029 XDELETEVEC (rpo_numbers);
4031 for (i = 0; i < num_ssa_names; i++)
4033 tree name = ssa_name (i);
4034 if (name
4035 && VN_INFO (name)->needs_insertion)
4036 release_ssa_name (name);
4038 obstack_free (&vn_ssa_aux_obstack, NULL);
4039 vn_ssa_aux_table.release ();
4041 sccstack.release ();
4042 free_vn_table (valid_info);
4043 XDELETE (valid_info);
4044 free_vn_table (optimistic_info);
4045 XDELETE (optimistic_info);
4048 /* Set *ID according to RESULT. */
4050 static void
4051 set_value_id_for_result (tree result, unsigned int *id)
4053 if (result && TREE_CODE (result) == SSA_NAME)
4054 *id = VN_INFO (result)->value_id;
4055 else if (result && is_gimple_min_invariant (result))
4056 *id = get_or_alloc_constant_value_id (result);
4057 else
4058 *id = get_next_value_id ();
4061 /* Set the value ids in the valid hash tables. */
4063 static void
4064 set_hashtable_value_ids (void)
4066 vn_nary_op_iterator_type hin;
4067 vn_phi_iterator_type hip;
4068 vn_reference_iterator_type hir;
4069 vn_nary_op_t vno;
4070 vn_reference_t vr;
4071 vn_phi_t vp;
4073 /* Now set the value ids of the things we had put in the hash
4074 table. */
4076 FOR_EACH_HASH_TABLE_ELEMENT (valid_info->nary, vno, vn_nary_op_t, hin)
4077 set_value_id_for_result (vno->result, &vno->value_id);
4079 FOR_EACH_HASH_TABLE_ELEMENT (valid_info->phis, vp, vn_phi_t, hip)
4080 set_value_id_for_result (vp->result, &vp->value_id);
4082 FOR_EACH_HASH_TABLE_ELEMENT (valid_info->references, vr, vn_reference_t, hir)
4083 set_value_id_for_result (vr->result, &vr->value_id);
4086 /* Do SCCVN. Returns true if it finished, false if we bailed out
4087 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
4088 how we use the alias oracle walking during the VN process. */
4090 bool
4091 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
4093 size_t i;
4094 tree param;
4096 default_vn_walk_kind = default_vn_walk_kind_;
4098 init_scc_vn ();
4099 current_info = valid_info;
4101 for (param = DECL_ARGUMENTS (current_function_decl);
4102 param;
4103 param = DECL_CHAIN (param))
4105 tree def = ssa_default_def (cfun, param);
4106 if (def)
4107 VN_INFO (def)->valnum = def;
4110 for (i = 1; i < num_ssa_names; ++i)
4112 tree name = ssa_name (i);
4113 if (name
4114 && VN_INFO (name)->visited == false
4115 && !has_zero_uses (name))
4116 if (!DFS (name))
4118 free_scc_vn ();
4119 return false;
4123 /* Initialize the value ids. */
4125 for (i = 1; i < num_ssa_names; ++i)
4127 tree name = ssa_name (i);
4128 vn_ssa_aux_t info;
4129 if (!name)
4130 continue;
4131 info = VN_INFO (name);
4132 if (info->valnum == name
4133 || info->valnum == VN_TOP)
4134 info->value_id = get_next_value_id ();
4135 else if (is_gimple_min_invariant (info->valnum))
4136 info->value_id = get_or_alloc_constant_value_id (info->valnum);
4139 /* Propagate. */
4140 for (i = 1; i < num_ssa_names; ++i)
4142 tree name = ssa_name (i);
4143 vn_ssa_aux_t info;
4144 if (!name)
4145 continue;
4146 info = VN_INFO (name);
4147 if (TREE_CODE (info->valnum) == SSA_NAME
4148 && info->valnum != name
4149 && info->value_id != VN_INFO (info->valnum)->value_id)
4150 info->value_id = VN_INFO (info->valnum)->value_id;
4153 set_hashtable_value_ids ();
4155 if (dump_file && (dump_flags & TDF_DETAILS))
4157 fprintf (dump_file, "Value numbers:\n");
4158 for (i = 0; i < num_ssa_names; i++)
4160 tree name = ssa_name (i);
4161 if (name
4162 && VN_INFO (name)->visited
4163 && SSA_VAL (name) != name)
4165 print_generic_expr (dump_file, name, 0);
4166 fprintf (dump_file, " = ");
4167 print_generic_expr (dump_file, SSA_VAL (name), 0);
4168 fprintf (dump_file, "\n");
4173 return true;
4176 /* Return the maximum value id we have ever seen. */
4178 unsigned int
4179 get_max_value_id (void)
4181 return next_value_id;
4184 /* Return the next unique value id. */
4186 unsigned int
4187 get_next_value_id (void)
4189 return next_value_id++;
4193 /* Compare two expressions E1 and E2 and return true if they are equal. */
4195 bool
4196 expressions_equal_p (tree e1, tree e2)
4198 /* The obvious case. */
4199 if (e1 == e2)
4200 return true;
4202 /* If only one of them is null, they cannot be equal. */
4203 if (!e1 || !e2)
4204 return false;
4206 /* Now perform the actual comparison. */
4207 if (TREE_CODE (e1) == TREE_CODE (e2)
4208 && operand_equal_p (e1, e2, OEP_PURE_SAME))
4209 return true;
4211 return false;
4215 /* Return true if the nary operation NARY may trap. This is a copy
4216 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4218 bool
4219 vn_nary_may_trap (vn_nary_op_t nary)
4221 tree type;
4222 tree rhs2 = NULL_TREE;
4223 bool honor_nans = false;
4224 bool honor_snans = false;
4225 bool fp_operation = false;
4226 bool honor_trapv = false;
4227 bool handled, ret;
4228 unsigned i;
4230 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4231 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4232 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4234 type = nary->type;
4235 fp_operation = FLOAT_TYPE_P (type);
4236 if (fp_operation)
4238 honor_nans = flag_trapping_math && !flag_finite_math_only;
4239 honor_snans = flag_signaling_nans != 0;
4241 else if (INTEGRAL_TYPE_P (type)
4242 && TYPE_OVERFLOW_TRAPS (type))
4243 honor_trapv = true;
4245 if (nary->length >= 2)
4246 rhs2 = nary->op[1];
4247 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4248 honor_trapv,
4249 honor_nans, honor_snans, rhs2,
4250 &handled);
4251 if (handled
4252 && ret)
4253 return true;
4255 for (i = 0; i < nary->length; ++i)
4256 if (tree_could_trap_p (nary->op[i]))
4257 return true;
4259 return false;