* friend.c (make_friend_class): Handle template template parameters.
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blob5d5a91cef7ce5e962c23e242f55e301413fe432a
1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "basic-block.h"
28 #include "gimple-pretty-print.h"
29 #include "tree-inline.h"
30 #include "tree-flow.h"
31 #include "gimple.h"
32 #include "dumpfile.h"
33 #include "hashtab.h"
34 #include "alloc-pool.h"
35 #include "flags.h"
36 #include "bitmap.h"
37 #include "cfgloop.h"
38 #include "params.h"
39 #include "tree-ssa-propagate.h"
40 #include "tree-ssa-sccvn.h"
41 #include "gimple-fold.h"
43 /* This algorithm is based on the SCC algorithm presented by Keith
44 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
45 (http://citeseer.ist.psu.edu/41805.html). In
46 straight line code, it is equivalent to a regular hash based value
47 numbering that is performed in reverse postorder.
49 For code with cycles, there are two alternatives, both of which
50 require keeping the hashtables separate from the actual list of
51 value numbers for SSA names.
53 1. Iterate value numbering in an RPO walk of the blocks, removing
54 all the entries from the hashtable after each iteration (but
55 keeping the SSA name->value number mapping between iterations).
56 Iterate until it does not change.
58 2. Perform value numbering as part of an SCC walk on the SSA graph,
59 iterating only the cycles in the SSA graph until they do not change
60 (using a separate, optimistic hashtable for value numbering the SCC
61 operands).
63 The second is not just faster in practice (because most SSA graph
64 cycles do not involve all the variables in the graph), it also has
65 some nice properties.
67 One of these nice properties is that when we pop an SCC off the
68 stack, we are guaranteed to have processed all the operands coming from
69 *outside of that SCC*, so we do not need to do anything special to
70 ensure they have value numbers.
72 Another nice property is that the SCC walk is done as part of a DFS
73 of the SSA graph, which makes it easy to perform combining and
74 simplifying operations at the same time.
76 The code below is deliberately written in a way that makes it easy
77 to separate the SCC walk from the other work it does.
79 In order to propagate constants through the code, we track which
80 expressions contain constants, and use those while folding. In
81 theory, we could also track expressions whose value numbers are
82 replaced, in case we end up folding based on expression
83 identities.
85 In order to value number memory, we assign value numbers to vuses.
86 This enables us to note that, for example, stores to the same
87 address of the same value from the same starting memory states are
88 equivalent.
89 TODO:
91 1. We can iterate only the changing portions of the SCC's, but
92 I have not seen an SCC big enough for this to be a win.
93 2. If you differentiate between phi nodes for loops and phi nodes
94 for if-then-else, you can properly consider phi nodes in different
95 blocks for equivalence.
96 3. We could value number vuses in more cases, particularly, whole
97 structure copies.
100 /* The set of hashtables and alloc_pool's for their items. */
102 typedef struct vn_tables_s
104 htab_t nary;
105 htab_t phis;
106 htab_t references;
107 struct obstack nary_obstack;
108 alloc_pool phis_pool;
109 alloc_pool references_pool;
110 } *vn_tables_t;
112 static htab_t constant_to_value_id;
113 static bitmap constant_value_ids;
116 /* Valid hashtables storing information we have proven to be
117 correct. */
119 static vn_tables_t valid_info;
121 /* Optimistic hashtables storing information we are making assumptions about
122 during iterations. */
124 static vn_tables_t optimistic_info;
126 /* Pointer to the set of hashtables that is currently being used.
127 Should always point to either the optimistic_info, or the
128 valid_info. */
130 static vn_tables_t current_info;
133 /* Reverse post order index for each basic block. */
135 static int *rpo_numbers;
137 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
139 /* This represents the top of the VN lattice, which is the universal
140 value. */
142 tree VN_TOP;
144 /* Unique counter for our value ids. */
146 static unsigned int next_value_id;
148 /* Next DFS number and the stack for strongly connected component
149 detection. */
151 static unsigned int next_dfs_num;
152 static VEC (tree, heap) *sccstack;
155 DEF_VEC_P(vn_ssa_aux_t);
156 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
158 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
159 are allocated on an obstack for locality reasons, and to free them
160 without looping over the VEC. */
162 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
163 static struct obstack vn_ssa_aux_obstack;
165 /* Return the value numbering information for a given SSA name. */
167 vn_ssa_aux_t
168 VN_INFO (tree name)
170 vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
171 SSA_NAME_VERSION (name));
172 gcc_checking_assert (res);
173 return res;
176 /* Set the value numbering info for a given SSA name to a given
177 value. */
179 static inline void
180 VN_INFO_SET (tree name, vn_ssa_aux_t value)
182 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
183 SSA_NAME_VERSION (name), value);
186 /* Initialize the value numbering info for a given SSA name.
187 This should be called just once for every SSA name. */
189 vn_ssa_aux_t
190 VN_INFO_GET (tree name)
192 vn_ssa_aux_t newinfo;
194 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
195 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
196 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
197 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
198 SSA_NAME_VERSION (name) + 1);
199 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
200 SSA_NAME_VERSION (name), newinfo);
201 return newinfo;
205 /* Get the representative expression for the SSA_NAME NAME. Returns
206 the representative SSA_NAME if there is no expression associated with it. */
208 tree
209 vn_get_expr_for (tree name)
211 vn_ssa_aux_t vn = VN_INFO (name);
212 gimple def_stmt;
213 tree expr = NULL_TREE;
214 enum tree_code code;
216 if (vn->valnum == VN_TOP)
217 return name;
219 /* If the value-number is a constant it is the representative
220 expression. */
221 if (TREE_CODE (vn->valnum) != SSA_NAME)
222 return vn->valnum;
224 /* Get to the information of the value of this SSA_NAME. */
225 vn = VN_INFO (vn->valnum);
227 /* If the value-number is a constant it is the representative
228 expression. */
229 if (TREE_CODE (vn->valnum) != SSA_NAME)
230 return vn->valnum;
232 /* Else if we have an expression, return it. */
233 if (vn->expr != NULL_TREE)
234 return vn->expr;
236 /* Otherwise use the defining statement to build the expression. */
237 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
239 /* If the value number is not an assignment use it directly. */
240 if (!is_gimple_assign (def_stmt))
241 return vn->valnum;
243 /* FIXME tuples. This is incomplete and likely will miss some
244 simplifications. */
245 code = gimple_assign_rhs_code (def_stmt);
246 switch (TREE_CODE_CLASS (code))
248 case tcc_reference:
249 if ((code == REALPART_EXPR
250 || code == IMAGPART_EXPR
251 || code == VIEW_CONVERT_EXPR)
252 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (def_stmt),
253 0)) == SSA_NAME)
254 expr = fold_build1 (code,
255 gimple_expr_type (def_stmt),
256 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
257 break;
259 case tcc_unary:
260 expr = fold_build1 (code,
261 gimple_expr_type (def_stmt),
262 gimple_assign_rhs1 (def_stmt));
263 break;
265 case tcc_binary:
266 expr = fold_build2 (code,
267 gimple_expr_type (def_stmt),
268 gimple_assign_rhs1 (def_stmt),
269 gimple_assign_rhs2 (def_stmt));
270 break;
272 case tcc_exceptional:
273 if (code == CONSTRUCTOR
274 && TREE_CODE
275 (TREE_TYPE (gimple_assign_rhs1 (def_stmt))) == VECTOR_TYPE)
276 expr = gimple_assign_rhs1 (def_stmt);
277 break;
279 default:;
281 if (expr == NULL_TREE)
282 return vn->valnum;
284 /* Cache the expression. */
285 vn->expr = expr;
287 return expr;
291 /* Free a phi operation structure VP. */
293 static void
294 free_phi (void *vp)
296 vn_phi_t phi = (vn_phi_t) vp;
297 VEC_free (tree, heap, phi->phiargs);
300 /* Free a reference operation structure VP. */
302 static void
303 free_reference (void *vp)
305 vn_reference_t vr = (vn_reference_t) vp;
306 VEC_free (vn_reference_op_s, heap, vr->operands);
309 /* Hash table equality function for vn_constant_t. */
311 static int
312 vn_constant_eq (const void *p1, const void *p2)
314 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
315 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
317 if (vc1->hashcode != vc2->hashcode)
318 return false;
320 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
323 /* Hash table hash function for vn_constant_t. */
325 static hashval_t
326 vn_constant_hash (const void *p1)
328 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
329 return vc1->hashcode;
332 /* Lookup a value id for CONSTANT and return it. If it does not
333 exist returns 0. */
335 unsigned int
336 get_constant_value_id (tree constant)
338 void **slot;
339 struct vn_constant_s vc;
341 vc.hashcode = vn_hash_constant_with_type (constant);
342 vc.constant = constant;
343 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
344 vc.hashcode, NO_INSERT);
345 if (slot)
346 return ((vn_constant_t)*slot)->value_id;
347 return 0;
350 /* Lookup a value id for CONSTANT, and if it does not exist, create a
351 new one and return it. If it does exist, return it. */
353 unsigned int
354 get_or_alloc_constant_value_id (tree constant)
356 void **slot;
357 struct vn_constant_s vc;
358 vn_constant_t vcp;
360 vc.hashcode = vn_hash_constant_with_type (constant);
361 vc.constant = constant;
362 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
363 vc.hashcode, INSERT);
364 if (*slot)
365 return ((vn_constant_t)*slot)->value_id;
367 vcp = XNEW (struct vn_constant_s);
368 vcp->hashcode = vc.hashcode;
369 vcp->constant = constant;
370 vcp->value_id = get_next_value_id ();
371 *slot = (void *) vcp;
372 bitmap_set_bit (constant_value_ids, vcp->value_id);
373 return vcp->value_id;
376 /* Return true if V is a value id for a constant. */
378 bool
379 value_id_constant_p (unsigned int v)
381 return bitmap_bit_p (constant_value_ids, v);
384 /* Compare two reference operands P1 and P2 for equality. Return true if
385 they are equal, and false otherwise. */
387 static int
388 vn_reference_op_eq (const void *p1, const void *p2)
390 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
391 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
393 return (vro1->opcode == vro2->opcode
394 /* We do not care for differences in type qualification. */
395 && (vro1->type == vro2->type
396 || (vro1->type && vro2->type
397 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
398 TYPE_MAIN_VARIANT (vro2->type))))
399 && expressions_equal_p (vro1->op0, vro2->op0)
400 && expressions_equal_p (vro1->op1, vro2->op1)
401 && expressions_equal_p (vro1->op2, vro2->op2));
404 /* Compute the hash for a reference operand VRO1. */
406 static hashval_t
407 vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result)
409 result = iterative_hash_hashval_t (vro1->opcode, result);
410 if (vro1->op0)
411 result = iterative_hash_expr (vro1->op0, result);
412 if (vro1->op1)
413 result = iterative_hash_expr (vro1->op1, result);
414 if (vro1->op2)
415 result = iterative_hash_expr (vro1->op2, result);
416 return result;
419 /* Return the hashcode for a given reference operation P1. */
421 static hashval_t
422 vn_reference_hash (const void *p1)
424 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
425 return vr1->hashcode;
428 /* Compute a hash for the reference operation VR1 and return it. */
430 hashval_t
431 vn_reference_compute_hash (const vn_reference_t vr1)
433 hashval_t result = 0;
434 int i;
435 vn_reference_op_t vro;
436 HOST_WIDE_INT off = -1;
437 bool deref = false;
439 FOR_EACH_VEC_ELT (vn_reference_op_s, vr1->operands, i, vro)
441 if (vro->opcode == MEM_REF)
442 deref = true;
443 else if (vro->opcode != ADDR_EXPR)
444 deref = false;
445 if (vro->off != -1)
447 if (off == -1)
448 off = 0;
449 off += vro->off;
451 else
453 if (off != -1
454 && off != 0)
455 result = iterative_hash_hashval_t (off, result);
456 off = -1;
457 if (deref
458 && vro->opcode == ADDR_EXPR)
460 if (vro->op0)
462 tree op = TREE_OPERAND (vro->op0, 0);
463 result = iterative_hash_hashval_t (TREE_CODE (op), result);
464 result = iterative_hash_expr (op, result);
467 else
468 result = vn_reference_op_compute_hash (vro, result);
471 if (vr1->vuse)
472 result += SSA_NAME_VERSION (vr1->vuse);
474 return result;
477 /* Return true if reference operations P1 and P2 are equivalent. This
478 means they have the same set of operands and vuses. */
481 vn_reference_eq (const void *p1, const void *p2)
483 unsigned i, j;
485 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
486 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
487 if (vr1->hashcode != vr2->hashcode)
488 return false;
490 /* Early out if this is not a hash collision. */
491 if (vr1->hashcode != vr2->hashcode)
492 return false;
494 /* The VOP needs to be the same. */
495 if (vr1->vuse != vr2->vuse)
496 return false;
498 /* If the operands are the same we are done. */
499 if (vr1->operands == vr2->operands)
500 return true;
502 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
503 return false;
505 if (INTEGRAL_TYPE_P (vr1->type)
506 && INTEGRAL_TYPE_P (vr2->type))
508 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
509 return false;
511 else if (INTEGRAL_TYPE_P (vr1->type)
512 && (TYPE_PRECISION (vr1->type)
513 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
514 return false;
515 else if (INTEGRAL_TYPE_P (vr2->type)
516 && (TYPE_PRECISION (vr2->type)
517 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
518 return false;
520 i = 0;
521 j = 0;
524 HOST_WIDE_INT off1 = 0, off2 = 0;
525 vn_reference_op_t vro1, vro2;
526 vn_reference_op_s tem1, tem2;
527 bool deref1 = false, deref2 = false;
528 for (; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro1); i++)
530 if (vro1->opcode == MEM_REF)
531 deref1 = true;
532 if (vro1->off == -1)
533 break;
534 off1 += vro1->off;
536 for (; VEC_iterate (vn_reference_op_s, vr2->operands, j, vro2); j++)
538 if (vro2->opcode == MEM_REF)
539 deref2 = true;
540 if (vro2->off == -1)
541 break;
542 off2 += vro2->off;
544 if (off1 != off2)
545 return false;
546 if (deref1 && vro1->opcode == ADDR_EXPR)
548 memset (&tem1, 0, sizeof (tem1));
549 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
550 tem1.type = TREE_TYPE (tem1.op0);
551 tem1.opcode = TREE_CODE (tem1.op0);
552 vro1 = &tem1;
553 deref1 = false;
555 if (deref2 && vro2->opcode == ADDR_EXPR)
557 memset (&tem2, 0, sizeof (tem2));
558 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
559 tem2.type = TREE_TYPE (tem2.op0);
560 tem2.opcode = TREE_CODE (tem2.op0);
561 vro2 = &tem2;
562 deref2 = false;
564 if (deref1 != deref2)
565 return false;
566 if (!vn_reference_op_eq (vro1, vro2))
567 return false;
568 ++j;
569 ++i;
571 while (VEC_length (vn_reference_op_s, vr1->operands) != i
572 || VEC_length (vn_reference_op_s, vr2->operands) != j);
574 return true;
577 /* Copy the operations present in load/store REF into RESULT, a vector of
578 vn_reference_op_s's. */
580 void
581 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
583 if (TREE_CODE (ref) == TARGET_MEM_REF)
585 vn_reference_op_s temp;
587 memset (&temp, 0, sizeof (temp));
588 temp.type = TREE_TYPE (ref);
589 temp.opcode = TREE_CODE (ref);
590 temp.op0 = TMR_INDEX (ref);
591 temp.op1 = TMR_STEP (ref);
592 temp.op2 = TMR_OFFSET (ref);
593 temp.off = -1;
594 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
596 memset (&temp, 0, sizeof (temp));
597 temp.type = NULL_TREE;
598 temp.opcode = ERROR_MARK;
599 temp.op0 = TMR_INDEX2 (ref);
600 temp.off = -1;
601 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
603 memset (&temp, 0, sizeof (temp));
604 temp.type = NULL_TREE;
605 temp.opcode = TREE_CODE (TMR_BASE (ref));
606 temp.op0 = TMR_BASE (ref);
607 temp.off = -1;
608 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
609 return;
612 /* For non-calls, store the information that makes up the address. */
614 while (ref)
616 vn_reference_op_s temp;
618 memset (&temp, 0, sizeof (temp));
619 temp.type = TREE_TYPE (ref);
620 temp.opcode = TREE_CODE (ref);
621 temp.off = -1;
623 switch (temp.opcode)
625 case MODIFY_EXPR:
626 temp.op0 = TREE_OPERAND (ref, 1);
627 break;
628 case WITH_SIZE_EXPR:
629 temp.op0 = TREE_OPERAND (ref, 1);
630 temp.off = 0;
631 break;
632 case MEM_REF:
633 /* The base address gets its own vn_reference_op_s structure. */
634 temp.op0 = TREE_OPERAND (ref, 1);
635 if (host_integerp (TREE_OPERAND (ref, 1), 0))
636 temp.off = TREE_INT_CST_LOW (TREE_OPERAND (ref, 1));
637 break;
638 case BIT_FIELD_REF:
639 /* Record bits and position. */
640 temp.op0 = TREE_OPERAND (ref, 1);
641 temp.op1 = TREE_OPERAND (ref, 2);
642 break;
643 case COMPONENT_REF:
644 /* The field decl is enough to unambiguously specify the field,
645 a matching type is not necessary and a mismatching type
646 is always a spurious difference. */
647 temp.type = NULL_TREE;
648 temp.op0 = TREE_OPERAND (ref, 1);
649 temp.op1 = TREE_OPERAND (ref, 2);
651 tree this_offset = component_ref_field_offset (ref);
652 if (this_offset
653 && TREE_CODE (this_offset) == INTEGER_CST)
655 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
656 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
658 double_int off
659 = double_int_add (tree_to_double_int (this_offset),
660 double_int_rshift
661 (tree_to_double_int (bit_offset),
662 BITS_PER_UNIT == 8
663 ? 3 : exact_log2 (BITS_PER_UNIT),
664 HOST_BITS_PER_DOUBLE_INT, true));
665 if (double_int_fits_in_shwi_p (off))
666 temp.off = off.low;
670 break;
671 case ARRAY_RANGE_REF:
672 case ARRAY_REF:
673 /* Record index as operand. */
674 temp.op0 = TREE_OPERAND (ref, 1);
675 /* Always record lower bounds and element size. */
676 temp.op1 = array_ref_low_bound (ref);
677 temp.op2 = array_ref_element_size (ref);
678 if (TREE_CODE (temp.op0) == INTEGER_CST
679 && TREE_CODE (temp.op1) == INTEGER_CST
680 && TREE_CODE (temp.op2) == INTEGER_CST)
682 double_int off = tree_to_double_int (temp.op0);
683 off = double_int_add (off,
684 double_int_neg
685 (tree_to_double_int (temp.op1)));
686 off = double_int_mul (off, tree_to_double_int (temp.op2));
687 if (double_int_fits_in_shwi_p (off))
688 temp.off = off.low;
690 break;
691 case VAR_DECL:
692 if (DECL_HARD_REGISTER (ref))
694 temp.op0 = ref;
695 break;
697 /* Fallthru. */
698 case PARM_DECL:
699 case CONST_DECL:
700 case RESULT_DECL:
701 /* Canonicalize decls to MEM[&decl] which is what we end up with
702 when valueizing MEM[ptr] with ptr = &decl. */
703 temp.opcode = MEM_REF;
704 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
705 temp.off = 0;
706 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
707 temp.opcode = ADDR_EXPR;
708 temp.op0 = build_fold_addr_expr (ref);
709 temp.type = TREE_TYPE (temp.op0);
710 temp.off = -1;
711 break;
712 case STRING_CST:
713 case INTEGER_CST:
714 case COMPLEX_CST:
715 case VECTOR_CST:
716 case REAL_CST:
717 case FIXED_CST:
718 case CONSTRUCTOR:
719 case SSA_NAME:
720 temp.op0 = ref;
721 break;
722 case ADDR_EXPR:
723 if (is_gimple_min_invariant (ref))
725 temp.op0 = ref;
726 break;
728 /* Fallthrough. */
729 /* These are only interesting for their operands, their
730 existence, and their type. They will never be the last
731 ref in the chain of references (IE they require an
732 operand), so we don't have to put anything
733 for op* as it will be handled by the iteration */
734 case REALPART_EXPR:
735 case VIEW_CONVERT_EXPR:
736 temp.off = 0;
737 break;
738 case IMAGPART_EXPR:
739 /* This is only interesting for its constant offset. */
740 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
741 break;
742 default:
743 gcc_unreachable ();
745 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
747 if (REFERENCE_CLASS_P (ref)
748 || TREE_CODE (ref) == MODIFY_EXPR
749 || TREE_CODE (ref) == WITH_SIZE_EXPR
750 || (TREE_CODE (ref) == ADDR_EXPR
751 && !is_gimple_min_invariant (ref)))
752 ref = TREE_OPERAND (ref, 0);
753 else
754 ref = NULL_TREE;
758 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
759 operands in *OPS, the reference alias set SET and the reference type TYPE.
760 Return true if something useful was produced. */
762 bool
763 ao_ref_init_from_vn_reference (ao_ref *ref,
764 alias_set_type set, tree type,
765 VEC (vn_reference_op_s, heap) *ops)
767 vn_reference_op_t op;
768 unsigned i;
769 tree base = NULL_TREE;
770 tree *op0_p = &base;
771 HOST_WIDE_INT offset = 0;
772 HOST_WIDE_INT max_size;
773 HOST_WIDE_INT size = -1;
774 tree size_tree = NULL_TREE;
775 alias_set_type base_alias_set = -1;
777 /* First get the final access size from just the outermost expression. */
778 op = &VEC_index (vn_reference_op_s, ops, 0);
779 if (op->opcode == COMPONENT_REF)
780 size_tree = DECL_SIZE (op->op0);
781 else if (op->opcode == BIT_FIELD_REF)
782 size_tree = op->op0;
783 else
785 enum machine_mode mode = TYPE_MODE (type);
786 if (mode == BLKmode)
787 size_tree = TYPE_SIZE (type);
788 else
789 size = GET_MODE_BITSIZE (mode);
791 if (size_tree != NULL_TREE)
793 if (!host_integerp (size_tree, 1))
794 size = -1;
795 else
796 size = TREE_INT_CST_LOW (size_tree);
799 /* Initially, maxsize is the same as the accessed element size.
800 In the following it will only grow (or become -1). */
801 max_size = size;
803 /* Compute cumulative bit-offset for nested component-refs and array-refs,
804 and find the ultimate containing object. */
805 FOR_EACH_VEC_ELT (vn_reference_op_s, ops, i, op)
807 switch (op->opcode)
809 /* These may be in the reference ops, but we cannot do anything
810 sensible with them here. */
811 case ADDR_EXPR:
812 /* Apart from ADDR_EXPR arguments to MEM_REF. */
813 if (base != NULL_TREE
814 && TREE_CODE (base) == MEM_REF
815 && op->op0
816 && DECL_P (TREE_OPERAND (op->op0, 0)))
818 vn_reference_op_t pop = &VEC_index (vn_reference_op_s, ops, i-1);
819 base = TREE_OPERAND (op->op0, 0);
820 if (pop->off == -1)
822 max_size = -1;
823 offset = 0;
825 else
826 offset += pop->off * BITS_PER_UNIT;
827 op0_p = NULL;
828 break;
830 /* Fallthru. */
831 case CALL_EXPR:
832 return false;
834 /* Record the base objects. */
835 case MEM_REF:
836 base_alias_set = get_deref_alias_set (op->op0);
837 *op0_p = build2 (MEM_REF, op->type,
838 NULL_TREE, op->op0);
839 op0_p = &TREE_OPERAND (*op0_p, 0);
840 break;
842 case VAR_DECL:
843 case PARM_DECL:
844 case RESULT_DECL:
845 case SSA_NAME:
846 *op0_p = op->op0;
847 op0_p = NULL;
848 break;
850 /* And now the usual component-reference style ops. */
851 case BIT_FIELD_REF:
852 offset += tree_low_cst (op->op1, 0);
853 break;
855 case COMPONENT_REF:
857 tree field = op->op0;
858 /* We do not have a complete COMPONENT_REF tree here so we
859 cannot use component_ref_field_offset. Do the interesting
860 parts manually. */
862 if (op->op1
863 || !host_integerp (DECL_FIELD_OFFSET (field), 1))
864 max_size = -1;
865 else
867 offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
868 * BITS_PER_UNIT);
869 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
871 break;
874 case ARRAY_RANGE_REF:
875 case ARRAY_REF:
876 /* We recorded the lower bound and the element size. */
877 if (!host_integerp (op->op0, 0)
878 || !host_integerp (op->op1, 0)
879 || !host_integerp (op->op2, 0))
880 max_size = -1;
881 else
883 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
884 hindex -= TREE_INT_CST_LOW (op->op1);
885 hindex *= TREE_INT_CST_LOW (op->op2);
886 hindex *= BITS_PER_UNIT;
887 offset += hindex;
889 break;
891 case REALPART_EXPR:
892 break;
894 case IMAGPART_EXPR:
895 offset += size;
896 break;
898 case VIEW_CONVERT_EXPR:
899 break;
901 case STRING_CST:
902 case INTEGER_CST:
903 case COMPLEX_CST:
904 case VECTOR_CST:
905 case REAL_CST:
906 case CONSTRUCTOR:
907 case CONST_DECL:
908 return false;
910 default:
911 return false;
915 if (base == NULL_TREE)
916 return false;
918 ref->ref = NULL_TREE;
919 ref->base = base;
920 ref->offset = offset;
921 ref->size = size;
922 ref->max_size = max_size;
923 ref->ref_alias_set = set;
924 if (base_alias_set != -1)
925 ref->base_alias_set = base_alias_set;
926 else
927 ref->base_alias_set = get_alias_set (base);
928 /* We discount volatiles from value-numbering elsewhere. */
929 ref->volatile_p = false;
931 return true;
934 /* Copy the operations present in load/store/call REF into RESULT, a vector of
935 vn_reference_op_s's. */
937 void
938 copy_reference_ops_from_call (gimple call,
939 VEC(vn_reference_op_s, heap) **result)
941 vn_reference_op_s temp;
942 unsigned i;
943 tree lhs = gimple_call_lhs (call);
945 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
946 different. By adding the lhs here in the vector, we ensure that the
947 hashcode is different, guaranteeing a different value number. */
948 if (lhs && TREE_CODE (lhs) != SSA_NAME)
950 memset (&temp, 0, sizeof (temp));
951 temp.opcode = MODIFY_EXPR;
952 temp.type = TREE_TYPE (lhs);
953 temp.op0 = lhs;
954 temp.off = -1;
955 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
958 /* Copy the type, opcode, function being called and static chain. */
959 memset (&temp, 0, sizeof (temp));
960 temp.type = gimple_call_return_type (call);
961 temp.opcode = CALL_EXPR;
962 temp.op0 = gimple_call_fn (call);
963 temp.op1 = gimple_call_chain (call);
964 temp.off = -1;
965 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
967 /* Copy the call arguments. As they can be references as well,
968 just chain them together. */
969 for (i = 0; i < gimple_call_num_args (call); ++i)
971 tree callarg = gimple_call_arg (call, i);
972 copy_reference_ops_from_ref (callarg, result);
976 /* Create a vector of vn_reference_op_s structures from REF, a
977 REFERENCE_CLASS_P tree. The vector is not shared. */
979 static VEC(vn_reference_op_s, heap) *
980 create_reference_ops_from_ref (tree ref)
982 VEC (vn_reference_op_s, heap) *result = NULL;
984 copy_reference_ops_from_ref (ref, &result);
985 return result;
988 /* Create a vector of vn_reference_op_s structures from CALL, a
989 call statement. The vector is not shared. */
991 static VEC(vn_reference_op_s, heap) *
992 create_reference_ops_from_call (gimple call)
994 VEC (vn_reference_op_s, heap) *result = NULL;
996 copy_reference_ops_from_call (call, &result);
997 return result;
1000 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1001 *I_P to point to the last element of the replacement. */
1002 void
1003 vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
1004 unsigned int *i_p)
1006 unsigned int i = *i_p;
1007 vn_reference_op_t op = &VEC_index (vn_reference_op_s, *ops, i);
1008 vn_reference_op_t mem_op = &VEC_index (vn_reference_op_s, *ops, i - 1);
1009 tree addr_base;
1010 HOST_WIDE_INT addr_offset;
1012 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1013 from .foo.bar to the preceding MEM_REF offset and replace the
1014 address with &OBJ. */
1015 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1016 &addr_offset);
1017 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1018 if (addr_base != op->op0)
1020 double_int off = tree_to_double_int (mem_op->op0);
1021 off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1022 off = double_int_add (off, shwi_to_double_int (addr_offset));
1023 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1024 op->op0 = build_fold_addr_expr (addr_base);
1025 if (host_integerp (mem_op->op0, 0))
1026 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1027 else
1028 mem_op->off = -1;
1032 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1033 *I_P to point to the last element of the replacement. */
1034 static void
1035 vn_reference_maybe_forwprop_address (VEC (vn_reference_op_s, heap) **ops,
1036 unsigned int *i_p)
1038 unsigned int i = *i_p;
1039 vn_reference_op_t op = &VEC_index (vn_reference_op_s, *ops, i);
1040 vn_reference_op_t mem_op = &VEC_index (vn_reference_op_s, *ops, i - 1);
1041 gimple def_stmt;
1042 enum tree_code code;
1043 double_int off;
1045 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1046 if (!is_gimple_assign (def_stmt))
1047 return;
1049 code = gimple_assign_rhs_code (def_stmt);
1050 if (code != ADDR_EXPR
1051 && code != POINTER_PLUS_EXPR)
1052 return;
1054 off = tree_to_double_int (mem_op->op0);
1055 off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1057 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1058 from .foo.bar to the preceding MEM_REF offset and replace the
1059 address with &OBJ. */
1060 if (code == ADDR_EXPR)
1062 tree addr, addr_base;
1063 HOST_WIDE_INT addr_offset;
1065 addr = gimple_assign_rhs1 (def_stmt);
1066 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1067 &addr_offset);
1068 if (!addr_base
1069 || TREE_CODE (addr_base) != MEM_REF)
1070 return;
1072 off = double_int_add (off, shwi_to_double_int (addr_offset));
1073 off = double_int_add (off, mem_ref_offset (addr_base));
1074 op->op0 = TREE_OPERAND (addr_base, 0);
1076 else
1078 tree ptr, ptroff;
1079 ptr = gimple_assign_rhs1 (def_stmt);
1080 ptroff = gimple_assign_rhs2 (def_stmt);
1081 if (TREE_CODE (ptr) != SSA_NAME
1082 || TREE_CODE (ptroff) != INTEGER_CST)
1083 return;
1085 off = double_int_add (off, tree_to_double_int (ptroff));
1086 op->op0 = ptr;
1089 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1090 if (host_integerp (mem_op->op0, 0))
1091 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1092 else
1093 mem_op->off = -1;
1094 if (TREE_CODE (op->op0) == SSA_NAME)
1095 op->op0 = SSA_VAL (op->op0);
1096 if (TREE_CODE (op->op0) != SSA_NAME)
1097 op->opcode = TREE_CODE (op->op0);
1099 /* And recurse. */
1100 if (TREE_CODE (op->op0) == SSA_NAME)
1101 vn_reference_maybe_forwprop_address (ops, i_p);
1102 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1103 vn_reference_fold_indirect (ops, i_p);
1106 /* Optimize the reference REF to a constant if possible or return
1107 NULL_TREE if not. */
1109 tree
1110 fully_constant_vn_reference_p (vn_reference_t ref)
1112 VEC (vn_reference_op_s, heap) *operands = ref->operands;
1113 vn_reference_op_t op;
1115 /* Try to simplify the translated expression if it is
1116 a call to a builtin function with at most two arguments. */
1117 op = &VEC_index (vn_reference_op_s, operands, 0);
1118 if (op->opcode == CALL_EXPR
1119 && TREE_CODE (op->op0) == ADDR_EXPR
1120 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1121 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1122 && VEC_length (vn_reference_op_s, operands) >= 2
1123 && VEC_length (vn_reference_op_s, operands) <= 3)
1125 vn_reference_op_t arg0, arg1 = NULL;
1126 bool anyconst = false;
1127 arg0 = &VEC_index (vn_reference_op_s, operands, 1);
1128 if (VEC_length (vn_reference_op_s, operands) > 2)
1129 arg1 = &VEC_index (vn_reference_op_s, operands, 2);
1130 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1131 || (arg0->opcode == ADDR_EXPR
1132 && is_gimple_min_invariant (arg0->op0)))
1133 anyconst = true;
1134 if (arg1
1135 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1136 || (arg1->opcode == ADDR_EXPR
1137 && is_gimple_min_invariant (arg1->op0))))
1138 anyconst = true;
1139 if (anyconst)
1141 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1142 arg1 ? 2 : 1,
1143 arg0->op0,
1144 arg1 ? arg1->op0 : NULL);
1145 if (folded
1146 && TREE_CODE (folded) == NOP_EXPR)
1147 folded = TREE_OPERAND (folded, 0);
1148 if (folded
1149 && is_gimple_min_invariant (folded))
1150 return folded;
1154 /* Simplify reads from constant strings. */
1155 else if (op->opcode == ARRAY_REF
1156 && TREE_CODE (op->op0) == INTEGER_CST
1157 && integer_zerop (op->op1)
1158 && VEC_length (vn_reference_op_s, operands) == 2)
1160 vn_reference_op_t arg0;
1161 arg0 = &VEC_index (vn_reference_op_s, operands, 1);
1162 if (arg0->opcode == STRING_CST
1163 && (TYPE_MODE (op->type)
1164 == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
1165 && GET_MODE_CLASS (TYPE_MODE (op->type)) == MODE_INT
1166 && GET_MODE_SIZE (TYPE_MODE (op->type)) == 1
1167 && compare_tree_int (op->op0, TREE_STRING_LENGTH (arg0->op0)) < 0)
1168 return build_int_cst_type (op->type,
1169 (TREE_STRING_POINTER (arg0->op0)
1170 [TREE_INT_CST_LOW (op->op0)]));
1173 return NULL_TREE;
1176 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1177 structures into their value numbers. This is done in-place, and
1178 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1179 whether any operands were valueized. */
1181 static VEC (vn_reference_op_s, heap) *
1182 valueize_refs_1 (VEC (vn_reference_op_s, heap) *orig, bool *valueized_anything)
1184 vn_reference_op_t vro;
1185 unsigned int i;
1187 *valueized_anything = false;
1189 FOR_EACH_VEC_ELT (vn_reference_op_s, orig, i, vro)
1191 if (vro->opcode == SSA_NAME
1192 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1194 tree tem = SSA_VAL (vro->op0);
1195 if (tem != vro->op0)
1197 *valueized_anything = true;
1198 vro->op0 = tem;
1200 /* If it transforms from an SSA_NAME to a constant, update
1201 the opcode. */
1202 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1203 vro->opcode = TREE_CODE (vro->op0);
1205 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1207 tree tem = SSA_VAL (vro->op1);
1208 if (tem != vro->op1)
1210 *valueized_anything = true;
1211 vro->op1 = tem;
1214 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1216 tree tem = SSA_VAL (vro->op2);
1217 if (tem != vro->op2)
1219 *valueized_anything = true;
1220 vro->op2 = tem;
1223 /* If it transforms from an SSA_NAME to an address, fold with
1224 a preceding indirect reference. */
1225 if (i > 0
1226 && vro->op0
1227 && TREE_CODE (vro->op0) == ADDR_EXPR
1228 && VEC_index (vn_reference_op_s,
1229 orig, i - 1).opcode == MEM_REF)
1230 vn_reference_fold_indirect (&orig, &i);
1231 else if (i > 0
1232 && vro->opcode == SSA_NAME
1233 && VEC_index (vn_reference_op_s,
1234 orig, i - 1).opcode == MEM_REF)
1235 vn_reference_maybe_forwprop_address (&orig, &i);
1236 /* If it transforms a non-constant ARRAY_REF into a constant
1237 one, adjust the constant offset. */
1238 else if (vro->opcode == ARRAY_REF
1239 && vro->off == -1
1240 && TREE_CODE (vro->op0) == INTEGER_CST
1241 && TREE_CODE (vro->op1) == INTEGER_CST
1242 && TREE_CODE (vro->op2) == INTEGER_CST)
1244 double_int off = tree_to_double_int (vro->op0);
1245 off = double_int_add (off,
1246 double_int_neg
1247 (tree_to_double_int (vro->op1)));
1248 off = double_int_mul (off, tree_to_double_int (vro->op2));
1249 if (double_int_fits_in_shwi_p (off))
1250 vro->off = off.low;
1254 return orig;
1257 static VEC (vn_reference_op_s, heap) *
1258 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
1260 bool tem;
1261 return valueize_refs_1 (orig, &tem);
1264 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
1266 /* Create a vector of vn_reference_op_s structures from REF, a
1267 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1268 this function. *VALUEIZED_ANYTHING will specify whether any
1269 operands were valueized. */
1271 static VEC(vn_reference_op_s, heap) *
1272 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1274 if (!ref)
1275 return NULL;
1276 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1277 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1278 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1279 valueized_anything);
1280 return shared_lookup_references;
1283 /* Create a vector of vn_reference_op_s structures from CALL, a
1284 call statement. The vector is shared among all callers of
1285 this function. */
1287 static VEC(vn_reference_op_s, heap) *
1288 valueize_shared_reference_ops_from_call (gimple call)
1290 if (!call)
1291 return NULL;
1292 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1293 copy_reference_ops_from_call (call, &shared_lookup_references);
1294 shared_lookup_references = valueize_refs (shared_lookup_references);
1295 return shared_lookup_references;
1298 /* Lookup a SCCVN reference operation VR in the current hash table.
1299 Returns the resulting value number if it exists in the hash table,
1300 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1301 vn_reference_t stored in the hashtable if something is found. */
1303 static tree
1304 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1306 void **slot;
1307 hashval_t hash;
1309 hash = vr->hashcode;
1310 slot = htab_find_slot_with_hash (current_info->references, vr,
1311 hash, NO_INSERT);
1312 if (!slot && current_info == optimistic_info)
1313 slot = htab_find_slot_with_hash (valid_info->references, vr,
1314 hash, NO_INSERT);
1315 if (slot)
1317 if (vnresult)
1318 *vnresult = (vn_reference_t)*slot;
1319 return ((vn_reference_t)*slot)->result;
1322 return NULL_TREE;
1325 static tree *last_vuse_ptr;
1326 static vn_lookup_kind vn_walk_kind;
1327 static vn_lookup_kind default_vn_walk_kind;
1329 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1330 with the current VUSE and performs the expression lookup. */
1332 static void *
1333 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1334 unsigned int cnt, void *vr_)
1336 vn_reference_t vr = (vn_reference_t)vr_;
1337 void **slot;
1338 hashval_t hash;
1340 /* This bounds the stmt walks we perform on reference lookups
1341 to O(1) instead of O(N) where N is the number of dominating
1342 stores. */
1343 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1344 return (void *)-1;
1346 if (last_vuse_ptr)
1347 *last_vuse_ptr = vuse;
1349 /* Fixup vuse and hash. */
1350 if (vr->vuse)
1351 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1352 vr->vuse = SSA_VAL (vuse);
1353 if (vr->vuse)
1354 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1356 hash = vr->hashcode;
1357 slot = htab_find_slot_with_hash (current_info->references, vr,
1358 hash, NO_INSERT);
1359 if (!slot && current_info == optimistic_info)
1360 slot = htab_find_slot_with_hash (valid_info->references, vr,
1361 hash, NO_INSERT);
1362 if (slot)
1363 return *slot;
1365 return NULL;
1368 /* Lookup an existing or insert a new vn_reference entry into the
1369 value table for the VUSE, SET, TYPE, OPERANDS reference which
1370 has the value VALUE which is either a constant or an SSA name. */
1372 static vn_reference_t
1373 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1374 alias_set_type set,
1375 tree type,
1376 VEC (vn_reference_op_s,
1377 heap) *operands,
1378 tree value)
1380 struct vn_reference_s vr1;
1381 vn_reference_t result;
1382 unsigned value_id;
1383 vr1.vuse = vuse;
1384 vr1.operands = operands;
1385 vr1.type = type;
1386 vr1.set = set;
1387 vr1.hashcode = vn_reference_compute_hash (&vr1);
1388 if (vn_reference_lookup_1 (&vr1, &result))
1389 return result;
1390 if (TREE_CODE (value) == SSA_NAME)
1391 value_id = VN_INFO (value)->value_id;
1392 else
1393 value_id = get_or_alloc_constant_value_id (value);
1394 return vn_reference_insert_pieces (vuse, set, type,
1395 VEC_copy (vn_reference_op_s, heap,
1396 operands), value, value_id);
1399 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1400 from the statement defining VUSE and if not successful tries to
1401 translate *REFP and VR_ through an aggregate copy at the definition
1402 of VUSE. */
1404 static void *
1405 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
1407 vn_reference_t vr = (vn_reference_t)vr_;
1408 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1409 tree base;
1410 HOST_WIDE_INT offset, maxsize;
1411 static VEC (vn_reference_op_s, heap) *lhs_ops = NULL;
1412 ao_ref lhs_ref;
1413 bool lhs_ref_ok = false;
1415 /* First try to disambiguate after value-replacing in the definitions LHS. */
1416 if (is_gimple_assign (def_stmt))
1418 VEC (vn_reference_op_s, heap) *tem;
1419 tree lhs = gimple_assign_lhs (def_stmt);
1420 bool valueized_anything = false;
1421 /* Avoid re-allocation overhead. */
1422 VEC_truncate (vn_reference_op_s, lhs_ops, 0);
1423 copy_reference_ops_from_ref (lhs, &lhs_ops);
1424 tem = lhs_ops;
1425 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1426 gcc_assert (lhs_ops == tem);
1427 if (valueized_anything)
1429 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1430 get_alias_set (lhs),
1431 TREE_TYPE (lhs), lhs_ops);
1432 if (lhs_ref_ok
1433 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1434 return NULL;
1436 else
1438 ao_ref_init (&lhs_ref, lhs);
1439 lhs_ref_ok = true;
1443 base = ao_ref_base (ref);
1444 offset = ref->offset;
1445 maxsize = ref->max_size;
1447 /* If we cannot constrain the size of the reference we cannot
1448 test if anything kills it. */
1449 if (maxsize == -1)
1450 return (void *)-1;
1452 /* We can't deduce anything useful from clobbers. */
1453 if (gimple_clobber_p (def_stmt))
1454 return (void *)-1;
1456 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1457 from that definition.
1458 1) Memset. */
1459 if (is_gimple_reg_type (vr->type)
1460 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1461 && integer_zerop (gimple_call_arg (def_stmt, 1))
1462 && host_integerp (gimple_call_arg (def_stmt, 2), 1)
1463 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1465 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1466 tree base2;
1467 HOST_WIDE_INT offset2, size2, maxsize2;
1468 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1469 size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
1470 if ((unsigned HOST_WIDE_INT)size2 / 8
1471 == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
1472 && maxsize2 != -1
1473 && operand_equal_p (base, base2, 0)
1474 && offset2 <= offset
1475 && offset2 + size2 >= offset + maxsize)
1477 tree val = build_zero_cst (vr->type);
1478 return vn_reference_lookup_or_insert_for_pieces
1479 (vuse, vr->set, vr->type, vr->operands, val);
1483 /* 2) Assignment from an empty CONSTRUCTOR. */
1484 else if (is_gimple_reg_type (vr->type)
1485 && gimple_assign_single_p (def_stmt)
1486 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1487 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1489 tree base2;
1490 HOST_WIDE_INT offset2, size2, maxsize2;
1491 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1492 &offset2, &size2, &maxsize2);
1493 if (maxsize2 != -1
1494 && operand_equal_p (base, base2, 0)
1495 && offset2 <= offset
1496 && offset2 + size2 >= offset + maxsize)
1498 tree val = build_zero_cst (vr->type);
1499 return vn_reference_lookup_or_insert_for_pieces
1500 (vuse, vr->set, vr->type, vr->operands, val);
1504 /* 3) Assignment from a constant. We can use folds native encode/interpret
1505 routines to extract the assigned bits. */
1506 else if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
1507 && ref->size == maxsize
1508 && maxsize % BITS_PER_UNIT == 0
1509 && offset % BITS_PER_UNIT == 0
1510 && is_gimple_reg_type (vr->type)
1511 && gimple_assign_single_p (def_stmt)
1512 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
1514 tree base2;
1515 HOST_WIDE_INT offset2, size2, maxsize2;
1516 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1517 &offset2, &size2, &maxsize2);
1518 if (maxsize2 != -1
1519 && maxsize2 == size2
1520 && size2 % BITS_PER_UNIT == 0
1521 && offset2 % BITS_PER_UNIT == 0
1522 && operand_equal_p (base, base2, 0)
1523 && offset2 <= offset
1524 && offset2 + size2 >= offset + maxsize)
1526 /* We support up to 512-bit values (for V8DFmode). */
1527 unsigned char buffer[64];
1528 int len;
1530 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1531 buffer, sizeof (buffer));
1532 if (len > 0)
1534 tree val = native_interpret_expr (vr->type,
1535 buffer
1536 + ((offset - offset2)
1537 / BITS_PER_UNIT),
1538 ref->size / BITS_PER_UNIT);
1539 if (val)
1540 return vn_reference_lookup_or_insert_for_pieces
1541 (vuse, vr->set, vr->type, vr->operands, val);
1546 /* 4) Assignment from an SSA name which definition we may be able
1547 to access pieces from. */
1548 else if (ref->size == maxsize
1549 && is_gimple_reg_type (vr->type)
1550 && gimple_assign_single_p (def_stmt)
1551 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
1553 tree rhs1 = gimple_assign_rhs1 (def_stmt);
1554 gimple def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
1555 if (is_gimple_assign (def_stmt2)
1556 && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
1557 || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
1558 && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1))))
1560 tree base2;
1561 HOST_WIDE_INT offset2, size2, maxsize2, off;
1562 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1563 &offset2, &size2, &maxsize2);
1564 off = offset - offset2;
1565 if (maxsize2 != -1
1566 && maxsize2 == size2
1567 && operand_equal_p (base, base2, 0)
1568 && offset2 <= offset
1569 && offset2 + size2 >= offset + maxsize)
1571 tree val = NULL_TREE;
1572 HOST_WIDE_INT elsz
1573 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1))));
1574 if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR)
1576 if (off == 0)
1577 val = gimple_assign_rhs1 (def_stmt2);
1578 else if (off == elsz)
1579 val = gimple_assign_rhs2 (def_stmt2);
1581 else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR
1582 && off % elsz == 0)
1584 tree ctor = gimple_assign_rhs1 (def_stmt2);
1585 unsigned i = off / elsz;
1586 if (i < CONSTRUCTOR_NELTS (ctor))
1588 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
1589 if (compare_tree_int (elt->index, i) == 0)
1590 val = elt->value;
1593 if (val)
1594 return vn_reference_lookup_or_insert_for_pieces
1595 (vuse, vr->set, vr->type, vr->operands, val);
1600 /* 5) For aggregate copies translate the reference through them if
1601 the copy kills ref. */
1602 else if (vn_walk_kind == VN_WALKREWRITE
1603 && gimple_assign_single_p (def_stmt)
1604 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1605 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1606 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1608 tree base2;
1609 HOST_WIDE_INT offset2, size2, maxsize2;
1610 int i, j;
1611 VEC (vn_reference_op_s, heap) *rhs = NULL;
1612 vn_reference_op_t vro;
1613 ao_ref r;
1615 if (!lhs_ref_ok)
1616 return (void *)-1;
1618 /* See if the assignment kills REF. */
1619 base2 = ao_ref_base (&lhs_ref);
1620 offset2 = lhs_ref.offset;
1621 size2 = lhs_ref.size;
1622 maxsize2 = lhs_ref.max_size;
1623 if (maxsize2 == -1
1624 || (base != base2 && !operand_equal_p (base, base2, 0))
1625 || offset2 > offset
1626 || offset2 + size2 < offset + maxsize)
1627 return (void *)-1;
1629 /* Find the common base of ref and the lhs. lhs_ops already
1630 contains valueized operands for the lhs. */
1631 i = VEC_length (vn_reference_op_s, vr->operands) - 1;
1632 j = VEC_length (vn_reference_op_s, lhs_ops) - 1;
1633 while (j >= 0 && i >= 0
1634 && vn_reference_op_eq (&VEC_index (vn_reference_op_s,
1635 vr->operands, i),
1636 &VEC_index (vn_reference_op_s, lhs_ops, j)))
1638 i--;
1639 j--;
1642 /* ??? The innermost op should always be a MEM_REF and we already
1643 checked that the assignment to the lhs kills vr. Thus for
1644 aggregate copies using char[] types the vn_reference_op_eq
1645 may fail when comparing types for compatibility. But we really
1646 don't care here - further lookups with the rewritten operands
1647 will simply fail if we messed up types too badly. */
1648 if (j == 0 && i >= 0
1649 && VEC_index (vn_reference_op_s, lhs_ops, 0).opcode == MEM_REF
1650 && VEC_index (vn_reference_op_s, lhs_ops, 0).off != -1
1651 && (VEC_index (vn_reference_op_s, lhs_ops, 0).off
1652 == VEC_index (vn_reference_op_s, vr->operands, i).off))
1653 i--, j--;
1655 /* i now points to the first additional op.
1656 ??? LHS may not be completely contained in VR, one or more
1657 VIEW_CONVERT_EXPRs could be in its way. We could at least
1658 try handling outermost VIEW_CONVERT_EXPRs. */
1659 if (j != -1)
1660 return (void *)-1;
1662 /* Now re-write REF to be based on the rhs of the assignment. */
1663 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1664 /* We need to pre-pend vr->operands[0..i] to rhs. */
1665 if (i + 1 + VEC_length (vn_reference_op_s, rhs)
1666 > VEC_length (vn_reference_op_s, vr->operands))
1668 VEC (vn_reference_op_s, heap) *old = vr->operands;
1669 VEC_safe_grow (vn_reference_op_s, heap, vr->operands,
1670 i + 1 + VEC_length (vn_reference_op_s, rhs));
1671 if (old == shared_lookup_references
1672 && vr->operands != old)
1673 shared_lookup_references = NULL;
1675 else
1676 VEC_truncate (vn_reference_op_s, vr->operands,
1677 i + 1 + VEC_length (vn_reference_op_s, rhs));
1678 FOR_EACH_VEC_ELT (vn_reference_op_s, rhs, j, vro)
1679 VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, *vro);
1680 VEC_free (vn_reference_op_s, heap, rhs);
1681 vr->operands = valueize_refs (vr->operands);
1682 vr->hashcode = vn_reference_compute_hash (vr);
1684 /* Adjust *ref from the new operands. */
1685 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1686 return (void *)-1;
1687 /* This can happen with bitfields. */
1688 if (ref->size != r.size)
1689 return (void *)-1;
1690 *ref = r;
1692 /* Do not update last seen VUSE after translating. */
1693 last_vuse_ptr = NULL;
1695 /* Keep looking for the adjusted *REF / VR pair. */
1696 return NULL;
1699 /* 6) For memcpy copies translate the reference through them if
1700 the copy kills ref. */
1701 else if (vn_walk_kind == VN_WALKREWRITE
1702 && is_gimple_reg_type (vr->type)
1703 /* ??? Handle BCOPY as well. */
1704 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
1705 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
1706 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
1707 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
1708 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
1709 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
1710 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
1711 && host_integerp (gimple_call_arg (def_stmt, 2), 1))
1713 tree lhs, rhs;
1714 ao_ref r;
1715 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
1716 vn_reference_op_s op;
1717 HOST_WIDE_INT at;
1720 /* Only handle non-variable, addressable refs. */
1721 if (ref->size != maxsize
1722 || offset % BITS_PER_UNIT != 0
1723 || ref->size % BITS_PER_UNIT != 0)
1724 return (void *)-1;
1726 /* Extract a pointer base and an offset for the destination. */
1727 lhs = gimple_call_arg (def_stmt, 0);
1728 lhs_offset = 0;
1729 if (TREE_CODE (lhs) == SSA_NAME)
1730 lhs = SSA_VAL (lhs);
1731 if (TREE_CODE (lhs) == ADDR_EXPR)
1733 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
1734 &lhs_offset);
1735 if (!tem)
1736 return (void *)-1;
1737 if (TREE_CODE (tem) == MEM_REF
1738 && host_integerp (TREE_OPERAND (tem, 1), 1))
1740 lhs = TREE_OPERAND (tem, 0);
1741 lhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1743 else if (DECL_P (tem))
1744 lhs = build_fold_addr_expr (tem);
1745 else
1746 return (void *)-1;
1748 if (TREE_CODE (lhs) != SSA_NAME
1749 && TREE_CODE (lhs) != ADDR_EXPR)
1750 return (void *)-1;
1752 /* Extract a pointer base and an offset for the source. */
1753 rhs = gimple_call_arg (def_stmt, 1);
1754 rhs_offset = 0;
1755 if (TREE_CODE (rhs) == SSA_NAME)
1756 rhs = SSA_VAL (rhs);
1757 if (TREE_CODE (rhs) == ADDR_EXPR)
1759 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
1760 &rhs_offset);
1761 if (!tem)
1762 return (void *)-1;
1763 if (TREE_CODE (tem) == MEM_REF
1764 && host_integerp (TREE_OPERAND (tem, 1), 1))
1766 rhs = TREE_OPERAND (tem, 0);
1767 rhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1769 else if (DECL_P (tem))
1770 rhs = build_fold_addr_expr (tem);
1771 else
1772 return (void *)-1;
1774 if (TREE_CODE (rhs) != SSA_NAME
1775 && TREE_CODE (rhs) != ADDR_EXPR)
1776 return (void *)-1;
1778 copy_size = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2));
1780 /* The bases of the destination and the references have to agree. */
1781 if ((TREE_CODE (base) != MEM_REF
1782 && !DECL_P (base))
1783 || (TREE_CODE (base) == MEM_REF
1784 && (TREE_OPERAND (base, 0) != lhs
1785 || !host_integerp (TREE_OPERAND (base, 1), 1)))
1786 || (DECL_P (base)
1787 && (TREE_CODE (lhs) != ADDR_EXPR
1788 || TREE_OPERAND (lhs, 0) != base)))
1789 return (void *)-1;
1791 /* And the access has to be contained within the memcpy destination. */
1792 at = offset / BITS_PER_UNIT;
1793 if (TREE_CODE (base) == MEM_REF)
1794 at += TREE_INT_CST_LOW (TREE_OPERAND (base, 1));
1795 if (lhs_offset > at
1796 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
1797 return (void *)-1;
1799 /* Make room for 2 operands in the new reference. */
1800 if (VEC_length (vn_reference_op_s, vr->operands) < 2)
1802 VEC (vn_reference_op_s, heap) *old = vr->operands;
1803 VEC_safe_grow (vn_reference_op_s, heap, vr->operands, 2);
1804 if (old == shared_lookup_references
1805 && vr->operands != old)
1806 shared_lookup_references = NULL;
1808 else
1809 VEC_truncate (vn_reference_op_s, vr->operands, 2);
1811 /* The looked-through reference is a simple MEM_REF. */
1812 memset (&op, 0, sizeof (op));
1813 op.type = vr->type;
1814 op.opcode = MEM_REF;
1815 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
1816 op.off = at - lhs_offset + rhs_offset;
1817 VEC_replace (vn_reference_op_s, vr->operands, 0, op);
1818 op.type = TREE_TYPE (rhs);
1819 op.opcode = TREE_CODE (rhs);
1820 op.op0 = rhs;
1821 op.off = -1;
1822 VEC_replace (vn_reference_op_s, vr->operands, 1, op);
1823 vr->hashcode = vn_reference_compute_hash (vr);
1825 /* Adjust *ref from the new operands. */
1826 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1827 return (void *)-1;
1828 /* This can happen with bitfields. */
1829 if (ref->size != r.size)
1830 return (void *)-1;
1831 *ref = r;
1833 /* Do not update last seen VUSE after translating. */
1834 last_vuse_ptr = NULL;
1836 /* Keep looking for the adjusted *REF / VR pair. */
1837 return NULL;
1840 /* Bail out and stop walking. */
1841 return (void *)-1;
1844 /* Lookup a reference operation by it's parts, in the current hash table.
1845 Returns the resulting value number if it exists in the hash table,
1846 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1847 vn_reference_t stored in the hashtable if something is found. */
1849 tree
1850 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
1851 VEC (vn_reference_op_s, heap) *operands,
1852 vn_reference_t *vnresult, vn_lookup_kind kind)
1854 struct vn_reference_s vr1;
1855 vn_reference_t tmp;
1856 tree cst;
1858 if (!vnresult)
1859 vnresult = &tmp;
1860 *vnresult = NULL;
1862 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1863 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1864 VEC_safe_grow (vn_reference_op_s, heap, shared_lookup_references,
1865 VEC_length (vn_reference_op_s, operands));
1866 memcpy (VEC_address (vn_reference_op_s, shared_lookup_references),
1867 VEC_address (vn_reference_op_s, operands),
1868 sizeof (vn_reference_op_s)
1869 * VEC_length (vn_reference_op_s, operands));
1870 vr1.operands = operands = shared_lookup_references
1871 = valueize_refs (shared_lookup_references);
1872 vr1.type = type;
1873 vr1.set = set;
1874 vr1.hashcode = vn_reference_compute_hash (&vr1);
1875 if ((cst = fully_constant_vn_reference_p (&vr1)))
1876 return cst;
1878 vn_reference_lookup_1 (&vr1, vnresult);
1879 if (!*vnresult
1880 && kind != VN_NOWALK
1881 && vr1.vuse)
1883 ao_ref r;
1884 vn_walk_kind = kind;
1885 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
1886 *vnresult =
1887 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1888 vn_reference_lookup_2,
1889 vn_reference_lookup_3, &vr1);
1890 if (vr1.operands != operands)
1891 VEC_free (vn_reference_op_s, heap, vr1.operands);
1894 if (*vnresult)
1895 return (*vnresult)->result;
1897 return NULL_TREE;
1900 /* Lookup OP in the current hash table, and return the resulting value
1901 number if it exists in the hash table. Return NULL_TREE if it does
1902 not exist in the hash table or if the result field of the structure
1903 was NULL.. VNRESULT will be filled in with the vn_reference_t
1904 stored in the hashtable if one exists. */
1906 tree
1907 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
1908 vn_reference_t *vnresult)
1910 VEC (vn_reference_op_s, heap) *operands;
1911 struct vn_reference_s vr1;
1912 tree cst;
1913 bool valuezied_anything;
1915 if (vnresult)
1916 *vnresult = NULL;
1918 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1919 vr1.operands = operands
1920 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
1921 vr1.type = TREE_TYPE (op);
1922 vr1.set = get_alias_set (op);
1923 vr1.hashcode = vn_reference_compute_hash (&vr1);
1924 if ((cst = fully_constant_vn_reference_p (&vr1)))
1925 return cst;
1927 if (kind != VN_NOWALK
1928 && vr1.vuse)
1930 vn_reference_t wvnresult;
1931 ao_ref r;
1932 /* Make sure to use a valueized reference if we valueized anything.
1933 Otherwise preserve the full reference for advanced TBAA. */
1934 if (!valuezied_anything
1935 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
1936 vr1.operands))
1937 ao_ref_init (&r, op);
1938 vn_walk_kind = kind;
1939 wvnresult =
1940 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1941 vn_reference_lookup_2,
1942 vn_reference_lookup_3, &vr1);
1943 if (vr1.operands != operands)
1944 VEC_free (vn_reference_op_s, heap, vr1.operands);
1945 if (wvnresult)
1947 if (vnresult)
1948 *vnresult = wvnresult;
1949 return wvnresult->result;
1952 return NULL_TREE;
1955 return vn_reference_lookup_1 (&vr1, vnresult);
1959 /* Insert OP into the current hash table with a value number of
1960 RESULT, and return the resulting reference structure we created. */
1962 vn_reference_t
1963 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
1965 void **slot;
1966 vn_reference_t vr1;
1968 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1969 if (TREE_CODE (result) == SSA_NAME)
1970 vr1->value_id = VN_INFO (result)->value_id;
1971 else
1972 vr1->value_id = get_or_alloc_constant_value_id (result);
1973 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1974 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
1975 vr1->type = TREE_TYPE (op);
1976 vr1->set = get_alias_set (op);
1977 vr1->hashcode = vn_reference_compute_hash (vr1);
1978 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
1979 vr1->result_vdef = vdef;
1981 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1982 INSERT);
1984 /* Because we lookup stores using vuses, and value number failures
1985 using the vdefs (see visit_reference_op_store for how and why),
1986 it's possible that on failure we may try to insert an already
1987 inserted store. This is not wrong, there is no ssa name for a
1988 store that we could use as a differentiator anyway. Thus, unlike
1989 the other lookup functions, you cannot gcc_assert (!*slot)
1990 here. */
1992 /* But free the old slot in case of a collision. */
1993 if (*slot)
1994 free_reference (*slot);
1996 *slot = vr1;
1997 return vr1;
2000 /* Insert a reference by it's pieces into the current hash table with
2001 a value number of RESULT. Return the resulting reference
2002 structure we created. */
2004 vn_reference_t
2005 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2006 VEC (vn_reference_op_s, heap) *operands,
2007 tree result, unsigned int value_id)
2010 void **slot;
2011 vn_reference_t vr1;
2013 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
2014 vr1->value_id = value_id;
2015 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2016 vr1->operands = valueize_refs (operands);
2017 vr1->type = type;
2018 vr1->set = set;
2019 vr1->hashcode = vn_reference_compute_hash (vr1);
2020 if (result && TREE_CODE (result) == SSA_NAME)
2021 result = SSA_VAL (result);
2022 vr1->result = result;
2024 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
2025 INSERT);
2027 /* At this point we should have all the things inserted that we have
2028 seen before, and we should never try inserting something that
2029 already exists. */
2030 gcc_assert (!*slot);
2031 if (*slot)
2032 free_reference (*slot);
2034 *slot = vr1;
2035 return vr1;
2038 /* Compute and return the hash value for nary operation VBO1. */
2040 hashval_t
2041 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2043 hashval_t hash;
2044 unsigned i;
2046 for (i = 0; i < vno1->length; ++i)
2047 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2048 vno1->op[i] = SSA_VAL (vno1->op[i]);
2050 if (vno1->length == 2
2051 && commutative_tree_code (vno1->opcode)
2052 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2054 tree temp = vno1->op[0];
2055 vno1->op[0] = vno1->op[1];
2056 vno1->op[1] = temp;
2059 hash = iterative_hash_hashval_t (vno1->opcode, 0);
2060 for (i = 0; i < vno1->length; ++i)
2061 hash = iterative_hash_expr (vno1->op[i], hash);
2063 return hash;
2066 /* Return the computed hashcode for nary operation P1. */
2068 static hashval_t
2069 vn_nary_op_hash (const void *p1)
2071 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
2072 return vno1->hashcode;
2075 /* Compare nary operations P1 and P2 and return true if they are
2076 equivalent. */
2079 vn_nary_op_eq (const void *p1, const void *p2)
2081 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
2082 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
2083 unsigned i;
2085 if (vno1->hashcode != vno2->hashcode)
2086 return false;
2088 if (vno1->length != vno2->length)
2089 return false;
2091 if (vno1->opcode != vno2->opcode
2092 || !types_compatible_p (vno1->type, vno2->type))
2093 return false;
2095 for (i = 0; i < vno1->length; ++i)
2096 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2097 return false;
2099 return true;
2102 /* Initialize VNO from the pieces provided. */
2104 static void
2105 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2106 enum tree_code code, tree type, tree *ops)
2108 vno->opcode = code;
2109 vno->length = length;
2110 vno->type = type;
2111 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2114 /* Initialize VNO from OP. */
2116 static void
2117 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2119 unsigned i;
2121 vno->opcode = TREE_CODE (op);
2122 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2123 vno->type = TREE_TYPE (op);
2124 for (i = 0; i < vno->length; ++i)
2125 vno->op[i] = TREE_OPERAND (op, i);
2128 /* Return the number of operands for a vn_nary ops structure from STMT. */
2130 static unsigned int
2131 vn_nary_length_from_stmt (gimple stmt)
2133 switch (gimple_assign_rhs_code (stmt))
2135 case REALPART_EXPR:
2136 case IMAGPART_EXPR:
2137 case VIEW_CONVERT_EXPR:
2138 return 1;
2140 case CONSTRUCTOR:
2141 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2143 default:
2144 return gimple_num_ops (stmt) - 1;
2148 /* Initialize VNO from STMT. */
2150 static void
2151 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
2153 unsigned i;
2155 vno->opcode = gimple_assign_rhs_code (stmt);
2156 vno->type = gimple_expr_type (stmt);
2157 switch (vno->opcode)
2159 case REALPART_EXPR:
2160 case IMAGPART_EXPR:
2161 case VIEW_CONVERT_EXPR:
2162 vno->length = 1;
2163 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2164 break;
2166 case CONSTRUCTOR:
2167 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2168 for (i = 0; i < vno->length; ++i)
2169 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2170 break;
2172 default:
2173 vno->length = gimple_num_ops (stmt) - 1;
2174 for (i = 0; i < vno->length; ++i)
2175 vno->op[i] = gimple_op (stmt, i + 1);
2179 /* Compute the hashcode for VNO and look for it in the hash table;
2180 return the resulting value number if it exists in the hash table.
2181 Return NULL_TREE if it does not exist in the hash table or if the
2182 result field of the operation is NULL. VNRESULT will contain the
2183 vn_nary_op_t from the hashtable if it exists. */
2185 static tree
2186 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2188 void **slot;
2190 if (vnresult)
2191 *vnresult = NULL;
2193 vno->hashcode = vn_nary_op_compute_hash (vno);
2194 slot = htab_find_slot_with_hash (current_info->nary, vno, vno->hashcode,
2195 NO_INSERT);
2196 if (!slot && current_info == optimistic_info)
2197 slot = htab_find_slot_with_hash (valid_info->nary, vno, vno->hashcode,
2198 NO_INSERT);
2199 if (!slot)
2200 return NULL_TREE;
2201 if (vnresult)
2202 *vnresult = (vn_nary_op_t)*slot;
2203 return ((vn_nary_op_t)*slot)->result;
2206 /* Lookup a n-ary operation by its pieces and return the resulting value
2207 number if it exists in the hash table. Return NULL_TREE if it does
2208 not exist in the hash table or if the result field of the operation
2209 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2210 if it exists. */
2212 tree
2213 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2214 tree type, tree *ops, vn_nary_op_t *vnresult)
2216 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2217 sizeof_vn_nary_op (length));
2218 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2219 return vn_nary_op_lookup_1 (vno1, vnresult);
2222 /* Lookup OP in the current hash table, and return the resulting value
2223 number if it exists in the hash table. Return NULL_TREE if it does
2224 not exist in the hash table or if the result field of the operation
2225 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2226 if it exists. */
2228 tree
2229 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2231 vn_nary_op_t vno1
2232 = XALLOCAVAR (struct vn_nary_op_s,
2233 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2234 init_vn_nary_op_from_op (vno1, op);
2235 return vn_nary_op_lookup_1 (vno1, vnresult);
2238 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2239 value number if it exists in the hash table. Return NULL_TREE if
2240 it does not exist in the hash table. VNRESULT will contain the
2241 vn_nary_op_t from the hashtable if it exists. */
2243 tree
2244 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
2246 vn_nary_op_t vno1
2247 = XALLOCAVAR (struct vn_nary_op_s,
2248 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2249 init_vn_nary_op_from_stmt (vno1, stmt);
2250 return vn_nary_op_lookup_1 (vno1, vnresult);
2253 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2255 static vn_nary_op_t
2256 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2258 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2261 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2262 obstack. */
2264 static vn_nary_op_t
2265 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2267 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2268 &current_info->nary_obstack);
2270 vno1->value_id = value_id;
2271 vno1->length = length;
2272 vno1->result = result;
2274 return vno1;
2277 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2278 VNO->HASHCODE first. */
2280 static vn_nary_op_t
2281 vn_nary_op_insert_into (vn_nary_op_t vno, htab_t table, bool compute_hash)
2283 void **slot;
2285 if (compute_hash)
2286 vno->hashcode = vn_nary_op_compute_hash (vno);
2288 slot = htab_find_slot_with_hash (table, vno, vno->hashcode, INSERT);
2289 gcc_assert (!*slot);
2291 *slot = vno;
2292 return vno;
2295 /* Insert a n-ary operation into the current hash table using it's
2296 pieces. Return the vn_nary_op_t structure we created and put in
2297 the hashtable. */
2299 vn_nary_op_t
2300 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2301 tree type, tree *ops,
2302 tree result, unsigned int value_id)
2304 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2305 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2306 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2309 /* Insert OP into the current hash table with a value number of
2310 RESULT. Return the vn_nary_op_t structure we created and put in
2311 the hashtable. */
2313 vn_nary_op_t
2314 vn_nary_op_insert (tree op, tree result)
2316 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2317 vn_nary_op_t vno1;
2319 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2320 init_vn_nary_op_from_op (vno1, op);
2321 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2324 /* Insert the rhs of STMT into the current hash table with a value number of
2325 RESULT. */
2327 vn_nary_op_t
2328 vn_nary_op_insert_stmt (gimple stmt, tree result)
2330 vn_nary_op_t vno1
2331 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2332 result, VN_INFO (result)->value_id);
2333 init_vn_nary_op_from_stmt (vno1, stmt);
2334 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2337 /* Compute a hashcode for PHI operation VP1 and return it. */
2339 static inline hashval_t
2340 vn_phi_compute_hash (vn_phi_t vp1)
2342 hashval_t result;
2343 int i;
2344 tree phi1op;
2345 tree type;
2347 result = vp1->block->index;
2349 /* If all PHI arguments are constants we need to distinguish
2350 the PHI node via its type. */
2351 type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
2352 result += (INTEGRAL_TYPE_P (type)
2353 + (INTEGRAL_TYPE_P (type)
2354 ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
2356 FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
2358 if (phi1op == VN_TOP)
2359 continue;
2360 result = iterative_hash_expr (phi1op, result);
2363 return result;
2366 /* Return the computed hashcode for phi operation P1. */
2368 static hashval_t
2369 vn_phi_hash (const void *p1)
2371 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
2372 return vp1->hashcode;
2375 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2377 static int
2378 vn_phi_eq (const void *p1, const void *p2)
2380 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
2381 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
2383 if (vp1->hashcode != vp2->hashcode)
2384 return false;
2386 if (vp1->block == vp2->block)
2388 int i;
2389 tree phi1op;
2391 /* If the PHI nodes do not have compatible types
2392 they are not the same. */
2393 if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
2394 TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
2395 return false;
2397 /* Any phi in the same block will have it's arguments in the
2398 same edge order, because of how we store phi nodes. */
2399 FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
2401 tree phi2op = VEC_index (tree, vp2->phiargs, i);
2402 if (phi1op == VN_TOP || phi2op == VN_TOP)
2403 continue;
2404 if (!expressions_equal_p (phi1op, phi2op))
2405 return false;
2407 return true;
2409 return false;
2412 static VEC(tree, heap) *shared_lookup_phiargs;
2414 /* Lookup PHI in the current hash table, and return the resulting
2415 value number if it exists in the hash table. Return NULL_TREE if
2416 it does not exist in the hash table. */
2418 static tree
2419 vn_phi_lookup (gimple phi)
2421 void **slot;
2422 struct vn_phi_s vp1;
2423 unsigned i;
2425 VEC_truncate (tree, shared_lookup_phiargs, 0);
2427 /* Canonicalize the SSA_NAME's to their value number. */
2428 for (i = 0; i < gimple_phi_num_args (phi); i++)
2430 tree def = PHI_ARG_DEF (phi, i);
2431 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2432 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
2434 vp1.phiargs = shared_lookup_phiargs;
2435 vp1.block = gimple_bb (phi);
2436 vp1.hashcode = vn_phi_compute_hash (&vp1);
2437 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
2438 NO_INSERT);
2439 if (!slot && current_info == optimistic_info)
2440 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
2441 NO_INSERT);
2442 if (!slot)
2443 return NULL_TREE;
2444 return ((vn_phi_t)*slot)->result;
2447 /* Insert PHI into the current hash table with a value number of
2448 RESULT. */
2450 static vn_phi_t
2451 vn_phi_insert (gimple phi, tree result)
2453 void **slot;
2454 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
2455 unsigned i;
2456 VEC (tree, heap) *args = NULL;
2458 /* Canonicalize the SSA_NAME's to their value number. */
2459 for (i = 0; i < gimple_phi_num_args (phi); i++)
2461 tree def = PHI_ARG_DEF (phi, i);
2462 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2463 VEC_safe_push (tree, heap, args, def);
2465 vp1->value_id = VN_INFO (result)->value_id;
2466 vp1->phiargs = args;
2467 vp1->block = gimple_bb (phi);
2468 vp1->result = result;
2469 vp1->hashcode = vn_phi_compute_hash (vp1);
2471 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
2472 INSERT);
2474 /* Because we iterate over phi operations more than once, it's
2475 possible the slot might already exist here, hence no assert.*/
2476 *slot = vp1;
2477 return vp1;
2481 /* Print set of components in strongly connected component SCC to OUT. */
2483 static void
2484 print_scc (FILE *out, VEC (tree, heap) *scc)
2486 tree var;
2487 unsigned int i;
2489 fprintf (out, "SCC consists of:");
2490 FOR_EACH_VEC_ELT (tree, scc, i, var)
2492 fprintf (out, " ");
2493 print_generic_expr (out, var, 0);
2495 fprintf (out, "\n");
2498 /* Set the value number of FROM to TO, return true if it has changed
2499 as a result. */
2501 static inline bool
2502 set_ssa_val_to (tree from, tree to)
2504 tree currval = SSA_VAL (from);
2506 if (from != to)
2508 if (currval == from)
2510 if (dump_file && (dump_flags & TDF_DETAILS))
2512 fprintf (dump_file, "Not changing value number of ");
2513 print_generic_expr (dump_file, from, 0);
2514 fprintf (dump_file, " from VARYING to ");
2515 print_generic_expr (dump_file, to, 0);
2516 fprintf (dump_file, "\n");
2518 return false;
2520 else if (TREE_CODE (to) == SSA_NAME
2521 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2522 to = from;
2525 /* The only thing we allow as value numbers are VN_TOP, ssa_names
2526 and invariants. So assert that here. */
2527 gcc_assert (to != NULL_TREE
2528 && (to == VN_TOP
2529 || TREE_CODE (to) == SSA_NAME
2530 || is_gimple_min_invariant (to)));
2532 if (dump_file && (dump_flags & TDF_DETAILS))
2534 fprintf (dump_file, "Setting value number of ");
2535 print_generic_expr (dump_file, from, 0);
2536 fprintf (dump_file, " to ");
2537 print_generic_expr (dump_file, to, 0);
2540 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
2542 VN_INFO (from)->valnum = to;
2543 if (dump_file && (dump_flags & TDF_DETAILS))
2544 fprintf (dump_file, " (changed)\n");
2545 return true;
2547 if (dump_file && (dump_flags & TDF_DETAILS))
2548 fprintf (dump_file, "\n");
2549 return false;
2552 /* Mark as processed all the definitions in the defining stmt of USE, or
2553 the USE itself. */
2555 static void
2556 mark_use_processed (tree use)
2558 ssa_op_iter iter;
2559 def_operand_p defp;
2560 gimple stmt = SSA_NAME_DEF_STMT (use);
2562 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
2564 VN_INFO (use)->use_processed = true;
2565 return;
2568 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2570 tree def = DEF_FROM_PTR (defp);
2572 VN_INFO (def)->use_processed = true;
2576 /* Set all definitions in STMT to value number to themselves.
2577 Return true if a value number changed. */
2579 static bool
2580 defs_to_varying (gimple stmt)
2582 bool changed = false;
2583 ssa_op_iter iter;
2584 def_operand_p defp;
2586 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2588 tree def = DEF_FROM_PTR (defp);
2589 changed |= set_ssa_val_to (def, def);
2591 return changed;
2594 static bool expr_has_constants (tree expr);
2595 static tree valueize_expr (tree expr);
2597 /* Visit a copy between LHS and RHS, return true if the value number
2598 changed. */
2600 static bool
2601 visit_copy (tree lhs, tree rhs)
2603 /* Follow chains of copies to their destination. */
2604 while (TREE_CODE (rhs) == SSA_NAME
2605 && SSA_VAL (rhs) != rhs)
2606 rhs = SSA_VAL (rhs);
2608 /* The copy may have a more interesting constant filled expression
2609 (we don't, since we know our RHS is just an SSA name). */
2610 if (TREE_CODE (rhs) == SSA_NAME)
2612 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2613 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2616 return set_ssa_val_to (lhs, rhs);
2619 /* Visit a nary operator RHS, value number it, and return true if the
2620 value number of LHS has changed as a result. */
2622 static bool
2623 visit_nary_op (tree lhs, gimple stmt)
2625 bool changed = false;
2626 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2628 if (result)
2629 changed = set_ssa_val_to (lhs, result);
2630 else
2632 changed = set_ssa_val_to (lhs, lhs);
2633 vn_nary_op_insert_stmt (stmt, lhs);
2636 return changed;
2639 /* Visit a call STMT storing into LHS. Return true if the value number
2640 of the LHS has changed as a result. */
2642 static bool
2643 visit_reference_op_call (tree lhs, gimple stmt)
2645 bool changed = false;
2646 struct vn_reference_s vr1;
2647 vn_reference_t vnresult = NULL;
2648 tree vuse = gimple_vuse (stmt);
2649 tree vdef = gimple_vdef (stmt);
2651 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
2652 if (lhs && TREE_CODE (lhs) != SSA_NAME)
2653 lhs = NULL_TREE;
2655 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2656 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
2657 vr1.type = gimple_expr_type (stmt);
2658 vr1.set = 0;
2659 vr1.hashcode = vn_reference_compute_hash (&vr1);
2660 vn_reference_lookup_1 (&vr1, &vnresult);
2662 if (vnresult)
2664 if (vnresult->result_vdef)
2665 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
2667 if (!vnresult->result && lhs)
2668 vnresult->result = lhs;
2670 if (vnresult->result && lhs)
2672 changed |= set_ssa_val_to (lhs, vnresult->result);
2674 if (VN_INFO (vnresult->result)->has_constants)
2675 VN_INFO (lhs)->has_constants = true;
2678 else
2680 void **slot;
2681 vn_reference_t vr2;
2682 if (vdef)
2683 changed |= set_ssa_val_to (vdef, vdef);
2684 if (lhs)
2685 changed |= set_ssa_val_to (lhs, lhs);
2686 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
2687 vr2->vuse = vr1.vuse;
2688 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
2689 vr2->type = vr1.type;
2690 vr2->set = vr1.set;
2691 vr2->hashcode = vr1.hashcode;
2692 vr2->result = lhs;
2693 vr2->result_vdef = vdef;
2694 slot = htab_find_slot_with_hash (current_info->references,
2695 vr2, vr2->hashcode, INSERT);
2696 if (*slot)
2697 free_reference (*slot);
2698 *slot = vr2;
2701 return changed;
2704 /* Visit a load from a reference operator RHS, part of STMT, value number it,
2705 and return true if the value number of the LHS has changed as a result. */
2707 static bool
2708 visit_reference_op_load (tree lhs, tree op, gimple stmt)
2710 bool changed = false;
2711 tree last_vuse;
2712 tree result;
2714 last_vuse = gimple_vuse (stmt);
2715 last_vuse_ptr = &last_vuse;
2716 result = vn_reference_lookup (op, gimple_vuse (stmt),
2717 default_vn_walk_kind, NULL);
2718 last_vuse_ptr = NULL;
2720 /* If we have a VCE, try looking up its operand as it might be stored in
2721 a different type. */
2722 if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
2723 result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
2724 default_vn_walk_kind, NULL);
2726 /* We handle type-punning through unions by value-numbering based
2727 on offset and size of the access. Be prepared to handle a
2728 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
2729 if (result
2730 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
2732 /* We will be setting the value number of lhs to the value number
2733 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2734 So first simplify and lookup this expression to see if it
2735 is already available. */
2736 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
2737 if ((CONVERT_EXPR_P (val)
2738 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
2739 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
2741 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
2742 if ((CONVERT_EXPR_P (tem)
2743 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
2744 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
2745 TREE_TYPE (val), tem)))
2746 val = tem;
2748 result = val;
2749 if (!is_gimple_min_invariant (val)
2750 && TREE_CODE (val) != SSA_NAME)
2751 result = vn_nary_op_lookup (val, NULL);
2752 /* If the expression is not yet available, value-number lhs to
2753 a new SSA_NAME we create. */
2754 if (!result)
2756 result = make_temp_ssa_name (TREE_TYPE (lhs), gimple_build_nop (),
2757 "vntemp");
2758 /* Initialize value-number information properly. */
2759 VN_INFO_GET (result)->valnum = result;
2760 VN_INFO (result)->value_id = get_next_value_id ();
2761 VN_INFO (result)->expr = val;
2762 VN_INFO (result)->has_constants = expr_has_constants (val);
2763 VN_INFO (result)->needs_insertion = true;
2764 /* As all "inserted" statements are singleton SCCs, insert
2765 to the valid table. This is strictly needed to
2766 avoid re-generating new value SSA_NAMEs for the same
2767 expression during SCC iteration over and over (the
2768 optimistic table gets cleared after each iteration).
2769 We do not need to insert into the optimistic table, as
2770 lookups there will fall back to the valid table. */
2771 if (current_info == optimistic_info)
2773 current_info = valid_info;
2774 vn_nary_op_insert (val, result);
2775 current_info = optimistic_info;
2777 else
2778 vn_nary_op_insert (val, result);
2779 if (dump_file && (dump_flags & TDF_DETAILS))
2781 fprintf (dump_file, "Inserting name ");
2782 print_generic_expr (dump_file, result, 0);
2783 fprintf (dump_file, " for expression ");
2784 print_generic_expr (dump_file, val, 0);
2785 fprintf (dump_file, "\n");
2790 if (result)
2792 changed = set_ssa_val_to (lhs, result);
2793 if (TREE_CODE (result) == SSA_NAME
2794 && VN_INFO (result)->has_constants)
2796 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2797 VN_INFO (lhs)->has_constants = true;
2800 else
2802 changed = set_ssa_val_to (lhs, lhs);
2803 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
2806 return changed;
2810 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2811 and return true if the value number of the LHS has changed as a result. */
2813 static bool
2814 visit_reference_op_store (tree lhs, tree op, gimple stmt)
2816 bool changed = false;
2817 vn_reference_t vnresult = NULL;
2818 tree result, assign;
2819 bool resultsame = false;
2820 tree vuse = gimple_vuse (stmt);
2821 tree vdef = gimple_vdef (stmt);
2823 /* First we want to lookup using the *vuses* from the store and see
2824 if there the last store to this location with the same address
2825 had the same value.
2827 The vuses represent the memory state before the store. If the
2828 memory state, address, and value of the store is the same as the
2829 last store to this location, then this store will produce the
2830 same memory state as that store.
2832 In this case the vdef versions for this store are value numbered to those
2833 vuse versions, since they represent the same memory state after
2834 this store.
2836 Otherwise, the vdefs for the store are used when inserting into
2837 the table, since the store generates a new memory state. */
2839 result = vn_reference_lookup (lhs, vuse, VN_NOWALK, NULL);
2841 if (result)
2843 if (TREE_CODE (result) == SSA_NAME)
2844 result = SSA_VAL (result);
2845 if (TREE_CODE (op) == SSA_NAME)
2846 op = SSA_VAL (op);
2847 resultsame = expressions_equal_p (result, op);
2850 if (!result || !resultsame)
2852 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
2853 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult);
2854 if (vnresult)
2856 VN_INFO (vdef)->use_processed = true;
2857 return set_ssa_val_to (vdef, vnresult->result_vdef);
2861 if (!result || !resultsame)
2863 if (dump_file && (dump_flags & TDF_DETAILS))
2865 fprintf (dump_file, "No store match\n");
2866 fprintf (dump_file, "Value numbering store ");
2867 print_generic_expr (dump_file, lhs, 0);
2868 fprintf (dump_file, " to ");
2869 print_generic_expr (dump_file, op, 0);
2870 fprintf (dump_file, "\n");
2872 /* Have to set value numbers before insert, since insert is
2873 going to valueize the references in-place. */
2874 if (vdef)
2876 changed |= set_ssa_val_to (vdef, vdef);
2879 /* Do not insert structure copies into the tables. */
2880 if (is_gimple_min_invariant (op)
2881 || is_gimple_reg (op))
2882 vn_reference_insert (lhs, op, vdef, NULL);
2884 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
2885 vn_reference_insert (assign, lhs, vuse, vdef);
2887 else
2889 /* We had a match, so value number the vdef to have the value
2890 number of the vuse it came from. */
2892 if (dump_file && (dump_flags & TDF_DETAILS))
2893 fprintf (dump_file, "Store matched earlier value,"
2894 "value numbering store vdefs to matching vuses.\n");
2896 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
2899 return changed;
2902 /* Visit and value number PHI, return true if the value number
2903 changed. */
2905 static bool
2906 visit_phi (gimple phi)
2908 bool changed = false;
2909 tree result;
2910 tree sameval = VN_TOP;
2911 bool allsame = true;
2912 unsigned i;
2914 /* TODO: We could check for this in init_sccvn, and replace this
2915 with a gcc_assert. */
2916 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
2917 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2919 /* See if all non-TOP arguments have the same value. TOP is
2920 equivalent to everything, so we can ignore it. */
2921 for (i = 0; i < gimple_phi_num_args (phi); i++)
2923 tree def = PHI_ARG_DEF (phi, i);
2925 if (TREE_CODE (def) == SSA_NAME)
2926 def = SSA_VAL (def);
2927 if (def == VN_TOP)
2928 continue;
2929 if (sameval == VN_TOP)
2931 sameval = def;
2933 else
2935 if (!expressions_equal_p (def, sameval))
2937 allsame = false;
2938 break;
2943 /* If all value numbered to the same value, the phi node has that
2944 value. */
2945 if (allsame)
2947 if (is_gimple_min_invariant (sameval))
2949 VN_INFO (PHI_RESULT (phi))->has_constants = true;
2950 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2952 else
2954 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2955 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2958 if (TREE_CODE (sameval) == SSA_NAME)
2959 return visit_copy (PHI_RESULT (phi), sameval);
2961 return set_ssa_val_to (PHI_RESULT (phi), sameval);
2964 /* Otherwise, see if it is equivalent to a phi node in this block. */
2965 result = vn_phi_lookup (phi);
2966 if (result)
2968 if (TREE_CODE (result) == SSA_NAME)
2969 changed = visit_copy (PHI_RESULT (phi), result);
2970 else
2971 changed = set_ssa_val_to (PHI_RESULT (phi), result);
2973 else
2975 vn_phi_insert (phi, PHI_RESULT (phi));
2976 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2977 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
2978 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2981 return changed;
2984 /* Return true if EXPR contains constants. */
2986 static bool
2987 expr_has_constants (tree expr)
2989 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2991 case tcc_unary:
2992 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
2994 case tcc_binary:
2995 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
2996 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
2997 /* Constants inside reference ops are rarely interesting, but
2998 it can take a lot of looking to find them. */
2999 case tcc_reference:
3000 case tcc_declaration:
3001 return false;
3002 default:
3003 return is_gimple_min_invariant (expr);
3005 return false;
3008 /* Return true if STMT contains constants. */
3010 static bool
3011 stmt_has_constants (gimple stmt)
3013 if (gimple_code (stmt) != GIMPLE_ASSIGN)
3014 return false;
3016 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
3018 case GIMPLE_UNARY_RHS:
3019 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
3021 case GIMPLE_BINARY_RHS:
3022 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
3023 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
3024 case GIMPLE_TERNARY_RHS:
3025 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
3026 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt))
3027 || is_gimple_min_invariant (gimple_assign_rhs3 (stmt)));
3028 case GIMPLE_SINGLE_RHS:
3029 /* Constants inside reference ops are rarely interesting, but
3030 it can take a lot of looking to find them. */
3031 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
3032 default:
3033 gcc_unreachable ();
3035 return false;
3038 /* Replace SSA_NAMES in expr with their value numbers, and return the
3039 result.
3040 This is performed in place. */
3042 static tree
3043 valueize_expr (tree expr)
3045 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
3047 case tcc_binary:
3048 TREE_OPERAND (expr, 1) = vn_valueize (TREE_OPERAND (expr, 1));
3049 /* Fallthru. */
3050 case tcc_unary:
3051 TREE_OPERAND (expr, 0) = vn_valueize (TREE_OPERAND (expr, 0));
3052 break;
3053 default:;
3055 return expr;
3058 /* Simplify the binary expression RHS, and return the result if
3059 simplified. */
3061 static tree
3062 simplify_binary_expression (gimple stmt)
3064 tree result = NULL_TREE;
3065 tree op0 = gimple_assign_rhs1 (stmt);
3066 tree op1 = gimple_assign_rhs2 (stmt);
3067 enum tree_code code = gimple_assign_rhs_code (stmt);
3069 /* This will not catch every single case we could combine, but will
3070 catch those with constants. The goal here is to simultaneously
3071 combine constants between expressions, but avoid infinite
3072 expansion of expressions during simplification. */
3073 if (TREE_CODE (op0) == SSA_NAME)
3075 if (VN_INFO (op0)->has_constants
3076 || TREE_CODE_CLASS (code) == tcc_comparison
3077 || code == COMPLEX_EXPR)
3078 op0 = valueize_expr (vn_get_expr_for (op0));
3079 else
3080 op0 = vn_valueize (op0);
3083 if (TREE_CODE (op1) == SSA_NAME)
3085 if (VN_INFO (op1)->has_constants
3086 || code == COMPLEX_EXPR)
3087 op1 = valueize_expr (vn_get_expr_for (op1));
3088 else
3089 op1 = vn_valueize (op1);
3092 /* Pointer plus constant can be represented as invariant address.
3093 Do so to allow further propatation, see also tree forwprop. */
3094 if (code == POINTER_PLUS_EXPR
3095 && host_integerp (op1, 1)
3096 && TREE_CODE (op0) == ADDR_EXPR
3097 && is_gimple_min_invariant (op0))
3098 return build_invariant_address (TREE_TYPE (op0),
3099 TREE_OPERAND (op0, 0),
3100 TREE_INT_CST_LOW (op1));
3102 /* Avoid folding if nothing changed. */
3103 if (op0 == gimple_assign_rhs1 (stmt)
3104 && op1 == gimple_assign_rhs2 (stmt))
3105 return NULL_TREE;
3107 fold_defer_overflow_warnings ();
3109 result = fold_binary (code, gimple_expr_type (stmt), op0, op1);
3110 if (result)
3111 STRIP_USELESS_TYPE_CONVERSION (result);
3113 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
3114 stmt, 0);
3116 /* Make sure result is not a complex expression consisting
3117 of operators of operators (IE (a + b) + (a + c))
3118 Otherwise, we will end up with unbounded expressions if
3119 fold does anything at all. */
3120 if (result && valid_gimple_rhs_p (result))
3121 return result;
3123 return NULL_TREE;
3126 /* Simplify the unary expression RHS, and return the result if
3127 simplified. */
3129 static tree
3130 simplify_unary_expression (gimple stmt)
3132 tree result = NULL_TREE;
3133 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
3134 enum tree_code code = gimple_assign_rhs_code (stmt);
3136 /* We handle some tcc_reference codes here that are all
3137 GIMPLE_ASSIGN_SINGLE codes. */
3138 if (code == REALPART_EXPR
3139 || code == IMAGPART_EXPR
3140 || code == VIEW_CONVERT_EXPR
3141 || code == BIT_FIELD_REF)
3142 op0 = TREE_OPERAND (op0, 0);
3144 if (TREE_CODE (op0) != SSA_NAME)
3145 return NULL_TREE;
3147 orig_op0 = op0;
3148 if (VN_INFO (op0)->has_constants)
3149 op0 = valueize_expr (vn_get_expr_for (op0));
3150 else if (CONVERT_EXPR_CODE_P (code)
3151 || code == REALPART_EXPR
3152 || code == IMAGPART_EXPR
3153 || code == VIEW_CONVERT_EXPR
3154 || code == BIT_FIELD_REF)
3156 /* We want to do tree-combining on conversion-like expressions.
3157 Make sure we feed only SSA_NAMEs or constants to fold though. */
3158 tree tem = valueize_expr (vn_get_expr_for (op0));
3159 if (UNARY_CLASS_P (tem)
3160 || BINARY_CLASS_P (tem)
3161 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
3162 || TREE_CODE (tem) == SSA_NAME
3163 || TREE_CODE (tem) == CONSTRUCTOR
3164 || is_gimple_min_invariant (tem))
3165 op0 = tem;
3168 /* Avoid folding if nothing changed, but remember the expression. */
3169 if (op0 == orig_op0)
3170 return NULL_TREE;
3172 if (code == BIT_FIELD_REF)
3174 tree rhs = gimple_assign_rhs1 (stmt);
3175 result = fold_ternary (BIT_FIELD_REF, TREE_TYPE (rhs),
3176 op0, TREE_OPERAND (rhs, 1), TREE_OPERAND (rhs, 2));
3178 else
3179 result = fold_unary_ignore_overflow (code, gimple_expr_type (stmt), op0);
3180 if (result)
3182 STRIP_USELESS_TYPE_CONVERSION (result);
3183 if (valid_gimple_rhs_p (result))
3184 return result;
3187 return NULL_TREE;
3190 /* Try to simplify RHS using equivalences and constant folding. */
3192 static tree
3193 try_to_simplify (gimple stmt)
3195 enum tree_code code = gimple_assign_rhs_code (stmt);
3196 tree tem;
3198 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3199 in this case, there is no point in doing extra work. */
3200 if (code == SSA_NAME)
3201 return NULL_TREE;
3203 /* First try constant folding based on our current lattice. */
3204 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize);
3205 if (tem
3206 && (TREE_CODE (tem) == SSA_NAME
3207 || is_gimple_min_invariant (tem)))
3208 return tem;
3210 /* If that didn't work try combining multiple statements. */
3211 switch (TREE_CODE_CLASS (code))
3213 case tcc_reference:
3214 /* Fallthrough for some unary codes that can operate on registers. */
3215 if (!(code == REALPART_EXPR
3216 || code == IMAGPART_EXPR
3217 || code == VIEW_CONVERT_EXPR
3218 || code == BIT_FIELD_REF))
3219 break;
3220 /* We could do a little more with unary ops, if they expand
3221 into binary ops, but it's debatable whether it is worth it. */
3222 case tcc_unary:
3223 return simplify_unary_expression (stmt);
3225 case tcc_comparison:
3226 case tcc_binary:
3227 return simplify_binary_expression (stmt);
3229 default:
3230 break;
3233 return NULL_TREE;
3236 /* Visit and value number USE, return true if the value number
3237 changed. */
3239 static bool
3240 visit_use (tree use)
3242 bool changed = false;
3243 gimple stmt = SSA_NAME_DEF_STMT (use);
3245 mark_use_processed (use);
3247 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3248 if (dump_file && (dump_flags & TDF_DETAILS)
3249 && !SSA_NAME_IS_DEFAULT_DEF (use))
3251 fprintf (dump_file, "Value numbering ");
3252 print_generic_expr (dump_file, use, 0);
3253 fprintf (dump_file, " stmt = ");
3254 print_gimple_stmt (dump_file, stmt, 0, 0);
3257 /* Handle uninitialized uses. */
3258 if (SSA_NAME_IS_DEFAULT_DEF (use))
3259 changed = set_ssa_val_to (use, use);
3260 else
3262 if (gimple_code (stmt) == GIMPLE_PHI)
3263 changed = visit_phi (stmt);
3264 else if (gimple_has_volatile_ops (stmt))
3265 changed = defs_to_varying (stmt);
3266 else if (is_gimple_assign (stmt))
3268 enum tree_code code = gimple_assign_rhs_code (stmt);
3269 tree lhs = gimple_assign_lhs (stmt);
3270 tree rhs1 = gimple_assign_rhs1 (stmt);
3271 tree simplified;
3273 /* Shortcut for copies. Simplifying copies is pointless,
3274 since we copy the expression and value they represent. */
3275 if (code == SSA_NAME
3276 && TREE_CODE (lhs) == SSA_NAME)
3278 changed = visit_copy (lhs, rhs1);
3279 goto done;
3281 simplified = try_to_simplify (stmt);
3282 if (simplified)
3284 if (dump_file && (dump_flags & TDF_DETAILS))
3286 fprintf (dump_file, "RHS ");
3287 print_gimple_expr (dump_file, stmt, 0, 0);
3288 fprintf (dump_file, " simplified to ");
3289 print_generic_expr (dump_file, simplified, 0);
3290 if (TREE_CODE (lhs) == SSA_NAME)
3291 fprintf (dump_file, " has constants %d\n",
3292 expr_has_constants (simplified));
3293 else
3294 fprintf (dump_file, "\n");
3297 /* Setting value numbers to constants will occasionally
3298 screw up phi congruence because constants are not
3299 uniquely associated with a single ssa name that can be
3300 looked up. */
3301 if (simplified
3302 && is_gimple_min_invariant (simplified)
3303 && TREE_CODE (lhs) == SSA_NAME)
3305 VN_INFO (lhs)->expr = simplified;
3306 VN_INFO (lhs)->has_constants = true;
3307 changed = set_ssa_val_to (lhs, simplified);
3308 goto done;
3310 else if (simplified
3311 && TREE_CODE (simplified) == SSA_NAME
3312 && TREE_CODE (lhs) == SSA_NAME)
3314 changed = visit_copy (lhs, simplified);
3315 goto done;
3317 else if (simplified)
3319 if (TREE_CODE (lhs) == SSA_NAME)
3321 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
3322 /* We have to unshare the expression or else
3323 valuizing may change the IL stream. */
3324 VN_INFO (lhs)->expr = unshare_expr (simplified);
3327 else if (stmt_has_constants (stmt)
3328 && TREE_CODE (lhs) == SSA_NAME)
3329 VN_INFO (lhs)->has_constants = true;
3330 else if (TREE_CODE (lhs) == SSA_NAME)
3332 /* We reset expr and constantness here because we may
3333 have been value numbering optimistically, and
3334 iterating. They may become non-constant in this case,
3335 even if they were optimistically constant. */
3337 VN_INFO (lhs)->has_constants = false;
3338 VN_INFO (lhs)->expr = NULL_TREE;
3341 if ((TREE_CODE (lhs) == SSA_NAME
3342 /* We can substitute SSA_NAMEs that are live over
3343 abnormal edges with their constant value. */
3344 && !(gimple_assign_copy_p (stmt)
3345 && is_gimple_min_invariant (rhs1))
3346 && !(simplified
3347 && is_gimple_min_invariant (simplified))
3348 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3349 /* Stores or copies from SSA_NAMEs that are live over
3350 abnormal edges are a problem. */
3351 || (code == SSA_NAME
3352 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3353 changed = defs_to_varying (stmt);
3354 else if (REFERENCE_CLASS_P (lhs)
3355 || DECL_P (lhs))
3356 changed = visit_reference_op_store (lhs, rhs1, stmt);
3357 else if (TREE_CODE (lhs) == SSA_NAME)
3359 if ((gimple_assign_copy_p (stmt)
3360 && is_gimple_min_invariant (rhs1))
3361 || (simplified
3362 && is_gimple_min_invariant (simplified)))
3364 VN_INFO (lhs)->has_constants = true;
3365 if (simplified)
3366 changed = set_ssa_val_to (lhs, simplified);
3367 else
3368 changed = set_ssa_val_to (lhs, rhs1);
3370 else
3372 switch (get_gimple_rhs_class (code))
3374 case GIMPLE_UNARY_RHS:
3375 case GIMPLE_BINARY_RHS:
3376 case GIMPLE_TERNARY_RHS:
3377 changed = visit_nary_op (lhs, stmt);
3378 break;
3379 case GIMPLE_SINGLE_RHS:
3380 switch (TREE_CODE_CLASS (code))
3382 case tcc_reference:
3383 /* VOP-less references can go through unary case. */
3384 if ((code == REALPART_EXPR
3385 || code == IMAGPART_EXPR
3386 || code == VIEW_CONVERT_EXPR
3387 || code == BIT_FIELD_REF)
3388 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
3390 changed = visit_nary_op (lhs, stmt);
3391 break;
3393 /* Fallthrough. */
3394 case tcc_declaration:
3395 changed = visit_reference_op_load (lhs, rhs1, stmt);
3396 break;
3397 default:
3398 if (code == ADDR_EXPR)
3400 changed = visit_nary_op (lhs, stmt);
3401 break;
3403 else if (code == CONSTRUCTOR)
3405 changed = visit_nary_op (lhs, stmt);
3406 break;
3408 changed = defs_to_varying (stmt);
3410 break;
3411 default:
3412 changed = defs_to_varying (stmt);
3413 break;
3417 else
3418 changed = defs_to_varying (stmt);
3420 else if (is_gimple_call (stmt))
3422 tree lhs = gimple_call_lhs (stmt);
3424 /* ??? We could try to simplify calls. */
3426 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3428 if (stmt_has_constants (stmt))
3429 VN_INFO (lhs)->has_constants = true;
3430 else
3432 /* We reset expr and constantness here because we may
3433 have been value numbering optimistically, and
3434 iterating. They may become non-constant in this case,
3435 even if they were optimistically constant. */
3436 VN_INFO (lhs)->has_constants = false;
3437 VN_INFO (lhs)->expr = NULL_TREE;
3440 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3442 changed = defs_to_varying (stmt);
3443 goto done;
3447 if (!gimple_call_internal_p (stmt)
3448 && (/* Calls to the same function with the same vuse
3449 and the same operands do not necessarily return the same
3450 value, unless they're pure or const. */
3451 gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST)
3452 /* If calls have a vdef, subsequent calls won't have
3453 the same incoming vuse. So, if 2 calls with vdef have the
3454 same vuse, we know they're not subsequent.
3455 We can value number 2 calls to the same function with the
3456 same vuse and the same operands which are not subsequent
3457 the same, because there is no code in the program that can
3458 compare the 2 values. */
3459 || gimple_vdef (stmt)))
3460 changed = visit_reference_op_call (lhs, stmt);
3461 else
3462 changed = defs_to_varying (stmt);
3464 else
3465 changed = defs_to_varying (stmt);
3467 done:
3468 return changed;
3471 /* Compare two operands by reverse postorder index */
3473 static int
3474 compare_ops (const void *pa, const void *pb)
3476 const tree opa = *((const tree *)pa);
3477 const tree opb = *((const tree *)pb);
3478 gimple opstmta = SSA_NAME_DEF_STMT (opa);
3479 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
3480 basic_block bba;
3481 basic_block bbb;
3483 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3484 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3485 else if (gimple_nop_p (opstmta))
3486 return -1;
3487 else if (gimple_nop_p (opstmtb))
3488 return 1;
3490 bba = gimple_bb (opstmta);
3491 bbb = gimple_bb (opstmtb);
3493 if (!bba && !bbb)
3494 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3495 else if (!bba)
3496 return -1;
3497 else if (!bbb)
3498 return 1;
3500 if (bba == bbb)
3502 if (gimple_code (opstmta) == GIMPLE_PHI
3503 && gimple_code (opstmtb) == GIMPLE_PHI)
3504 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3505 else if (gimple_code (opstmta) == GIMPLE_PHI)
3506 return -1;
3507 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3508 return 1;
3509 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3510 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3511 else
3512 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3514 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3517 /* Sort an array containing members of a strongly connected component
3518 SCC so that the members are ordered by RPO number.
3519 This means that when the sort is complete, iterating through the
3520 array will give you the members in RPO order. */
3522 static void
3523 sort_scc (VEC (tree, heap) *scc)
3525 VEC_qsort (tree, scc, compare_ops);
3528 /* Insert the no longer used nary ONARY to the hash INFO. */
3530 static void
3531 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3533 size_t size = sizeof_vn_nary_op (onary->length);
3534 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3535 &info->nary_obstack);
3536 memcpy (nary, onary, size);
3537 vn_nary_op_insert_into (nary, info->nary, false);
3540 /* Insert the no longer used phi OPHI to the hash INFO. */
3542 static void
3543 copy_phi (vn_phi_t ophi, vn_tables_t info)
3545 vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
3546 void **slot;
3547 memcpy (phi, ophi, sizeof (*phi));
3548 ophi->phiargs = NULL;
3549 slot = htab_find_slot_with_hash (info->phis, phi, phi->hashcode, INSERT);
3550 gcc_assert (!*slot);
3551 *slot = phi;
3554 /* Insert the no longer used reference OREF to the hash INFO. */
3556 static void
3557 copy_reference (vn_reference_t oref, vn_tables_t info)
3559 vn_reference_t ref;
3560 void **slot;
3561 ref = (vn_reference_t) pool_alloc (info->references_pool);
3562 memcpy (ref, oref, sizeof (*ref));
3563 oref->operands = NULL;
3564 slot = htab_find_slot_with_hash (info->references, ref, ref->hashcode,
3565 INSERT);
3566 if (*slot)
3567 free_reference (*slot);
3568 *slot = ref;
3571 /* Process a strongly connected component in the SSA graph. */
3573 static void
3574 process_scc (VEC (tree, heap) *scc)
3576 tree var;
3577 unsigned int i;
3578 unsigned int iterations = 0;
3579 bool changed = true;
3580 htab_iterator hi;
3581 vn_nary_op_t nary;
3582 vn_phi_t phi;
3583 vn_reference_t ref;
3585 /* If the SCC has a single member, just visit it. */
3586 if (VEC_length (tree, scc) == 1)
3588 tree use = VEC_index (tree, scc, 0);
3589 if (VN_INFO (use)->use_processed)
3590 return;
3591 /* We need to make sure it doesn't form a cycle itself, which can
3592 happen for self-referential PHI nodes. In that case we would
3593 end up inserting an expression with VN_TOP operands into the
3594 valid table which makes us derive bogus equivalences later.
3595 The cheapest way to check this is to assume it for all PHI nodes. */
3596 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3597 /* Fallthru to iteration. */ ;
3598 else
3600 visit_use (use);
3601 return;
3605 /* Iterate over the SCC with the optimistic table until it stops
3606 changing. */
3607 current_info = optimistic_info;
3608 while (changed)
3610 changed = false;
3611 iterations++;
3612 if (dump_file && (dump_flags & TDF_DETAILS))
3613 fprintf (dump_file, "Starting iteration %d\n", iterations);
3614 /* As we are value-numbering optimistically we have to
3615 clear the expression tables and the simplified expressions
3616 in each iteration until we converge. */
3617 htab_empty (optimistic_info->nary);
3618 htab_empty (optimistic_info->phis);
3619 htab_empty (optimistic_info->references);
3620 obstack_free (&optimistic_info->nary_obstack, NULL);
3621 gcc_obstack_init (&optimistic_info->nary_obstack);
3622 empty_alloc_pool (optimistic_info->phis_pool);
3623 empty_alloc_pool (optimistic_info->references_pool);
3624 FOR_EACH_VEC_ELT (tree, scc, i, var)
3625 VN_INFO (var)->expr = NULL_TREE;
3626 FOR_EACH_VEC_ELT (tree, scc, i, var)
3627 changed |= visit_use (var);
3630 statistics_histogram_event (cfun, "SCC iterations", iterations);
3632 /* Finally, copy the contents of the no longer used optimistic
3633 table to the valid table. */
3634 FOR_EACH_HTAB_ELEMENT (optimistic_info->nary, nary, vn_nary_op_t, hi)
3635 copy_nary (nary, valid_info);
3636 FOR_EACH_HTAB_ELEMENT (optimistic_info->phis, phi, vn_phi_t, hi)
3637 copy_phi (phi, valid_info);
3638 FOR_EACH_HTAB_ELEMENT (optimistic_info->references, ref, vn_reference_t, hi)
3639 copy_reference (ref, valid_info);
3641 current_info = valid_info;
3644 DEF_VEC_O(ssa_op_iter);
3645 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
3647 /* Pop the components of the found SCC for NAME off the SCC stack
3648 and process them. Returns true if all went well, false if
3649 we run into resource limits. */
3651 static bool
3652 extract_and_process_scc_for_name (tree name)
3654 VEC (tree, heap) *scc = NULL;
3655 tree x;
3657 /* Found an SCC, pop the components off the SCC stack and
3658 process them. */
3661 x = VEC_pop (tree, sccstack);
3663 VN_INFO (x)->on_sccstack = false;
3664 VEC_safe_push (tree, heap, scc, x);
3665 } while (x != name);
3667 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
3668 if (VEC_length (tree, scc)
3669 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
3671 if (dump_file)
3672 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
3673 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
3674 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
3676 VEC_free (tree, heap, scc);
3677 return false;
3680 if (VEC_length (tree, scc) > 1)
3681 sort_scc (scc);
3683 if (dump_file && (dump_flags & TDF_DETAILS))
3684 print_scc (dump_file, scc);
3686 process_scc (scc);
3688 VEC_free (tree, heap, scc);
3690 return true;
3693 /* Depth first search on NAME to discover and process SCC's in the SSA
3694 graph.
3695 Execution of this algorithm relies on the fact that the SCC's are
3696 popped off the stack in topological order.
3697 Returns true if successful, false if we stopped processing SCC's due
3698 to resource constraints. */
3700 static bool
3701 DFS (tree name)
3703 VEC(ssa_op_iter, heap) *itervec = NULL;
3704 VEC(tree, heap) *namevec = NULL;
3705 use_operand_p usep = NULL;
3706 gimple defstmt;
3707 tree use;
3708 ssa_op_iter iter;
3710 start_over:
3711 /* SCC info */
3712 VN_INFO (name)->dfsnum = next_dfs_num++;
3713 VN_INFO (name)->visited = true;
3714 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
3716 VEC_safe_push (tree, heap, sccstack, name);
3717 VN_INFO (name)->on_sccstack = true;
3718 defstmt = SSA_NAME_DEF_STMT (name);
3720 /* Recursively DFS on our operands, looking for SCC's. */
3721 if (!gimple_nop_p (defstmt))
3723 /* Push a new iterator. */
3724 if (gimple_code (defstmt) == GIMPLE_PHI)
3725 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
3726 else
3727 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
3729 else
3730 clear_and_done_ssa_iter (&iter);
3732 while (1)
3734 /* If we are done processing uses of a name, go up the stack
3735 of iterators and process SCCs as we found them. */
3736 if (op_iter_done (&iter))
3738 /* See if we found an SCC. */
3739 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
3740 if (!extract_and_process_scc_for_name (name))
3742 VEC_free (tree, heap, namevec);
3743 VEC_free (ssa_op_iter, heap, itervec);
3744 return false;
3747 /* Check if we are done. */
3748 if (VEC_empty (tree, namevec))
3750 VEC_free (tree, heap, namevec);
3751 VEC_free (ssa_op_iter, heap, itervec);
3752 return true;
3755 /* Restore the last use walker and continue walking there. */
3756 use = name;
3757 name = VEC_pop (tree, namevec);
3758 memcpy (&iter, &VEC_last (ssa_op_iter, itervec),
3759 sizeof (ssa_op_iter));
3760 VEC_pop (ssa_op_iter, itervec);
3761 goto continue_walking;
3764 use = USE_FROM_PTR (usep);
3766 /* Since we handle phi nodes, we will sometimes get
3767 invariants in the use expression. */
3768 if (TREE_CODE (use) == SSA_NAME)
3770 if (! (VN_INFO (use)->visited))
3772 /* Recurse by pushing the current use walking state on
3773 the stack and starting over. */
3774 VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
3775 VEC_safe_push(tree, heap, namevec, name);
3776 name = use;
3777 goto start_over;
3779 continue_walking:
3780 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3781 VN_INFO (use)->low);
3783 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3784 && VN_INFO (use)->on_sccstack)
3786 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3787 VN_INFO (name)->low);
3791 usep = op_iter_next_use (&iter);
3795 /* Allocate a value number table. */
3797 static void
3798 allocate_vn_table (vn_tables_t table)
3800 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
3801 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
3802 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
3803 free_reference);
3805 gcc_obstack_init (&table->nary_obstack);
3806 table->phis_pool = create_alloc_pool ("VN phis",
3807 sizeof (struct vn_phi_s),
3808 30);
3809 table->references_pool = create_alloc_pool ("VN references",
3810 sizeof (struct vn_reference_s),
3811 30);
3814 /* Free a value number table. */
3816 static void
3817 free_vn_table (vn_tables_t table)
3819 htab_delete (table->phis);
3820 htab_delete (table->nary);
3821 htab_delete (table->references);
3822 obstack_free (&table->nary_obstack, NULL);
3823 free_alloc_pool (table->phis_pool);
3824 free_alloc_pool (table->references_pool);
3827 static void
3828 init_scc_vn (void)
3830 size_t i;
3831 int j;
3832 int *rpo_numbers_temp;
3834 calculate_dominance_info (CDI_DOMINATORS);
3835 sccstack = NULL;
3836 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
3837 free);
3839 constant_value_ids = BITMAP_ALLOC (NULL);
3841 next_dfs_num = 1;
3842 next_value_id = 1;
3844 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
3845 /* VEC_alloc doesn't actually grow it to the right size, it just
3846 preallocates the space to do so. */
3847 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table,
3848 num_ssa_names + 1);
3849 gcc_obstack_init (&vn_ssa_aux_obstack);
3851 shared_lookup_phiargs = NULL;
3852 shared_lookup_references = NULL;
3853 rpo_numbers = XNEWVEC (int, last_basic_block);
3854 rpo_numbers_temp = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
3855 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
3857 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3858 the i'th block in RPO order is bb. We want to map bb's to RPO
3859 numbers, so we need to rearrange this array. */
3860 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
3861 rpo_numbers[rpo_numbers_temp[j]] = j;
3863 XDELETE (rpo_numbers_temp);
3865 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
3867 /* Create the VN_INFO structures, and initialize value numbers to
3868 TOP. */
3869 for (i = 0; i < num_ssa_names; i++)
3871 tree name = ssa_name (i);
3872 if (name)
3874 VN_INFO_GET (name)->valnum = VN_TOP;
3875 VN_INFO (name)->expr = NULL_TREE;
3876 VN_INFO (name)->value_id = 0;
3880 renumber_gimple_stmt_uids ();
3882 /* Create the valid and optimistic value numbering tables. */
3883 valid_info = XCNEW (struct vn_tables_s);
3884 allocate_vn_table (valid_info);
3885 optimistic_info = XCNEW (struct vn_tables_s);
3886 allocate_vn_table (optimistic_info);
3889 void
3890 free_scc_vn (void)
3892 size_t i;
3894 htab_delete (constant_to_value_id);
3895 BITMAP_FREE (constant_value_ids);
3896 VEC_free (tree, heap, shared_lookup_phiargs);
3897 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
3898 XDELETEVEC (rpo_numbers);
3900 for (i = 0; i < num_ssa_names; i++)
3902 tree name = ssa_name (i);
3903 if (name
3904 && VN_INFO (name)->needs_insertion)
3905 release_ssa_name (name);
3907 obstack_free (&vn_ssa_aux_obstack, NULL);
3908 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
3910 VEC_free (tree, heap, sccstack);
3911 free_vn_table (valid_info);
3912 XDELETE (valid_info);
3913 free_vn_table (optimistic_info);
3914 XDELETE (optimistic_info);
3917 /* Set *ID if we computed something useful in RESULT. */
3919 static void
3920 set_value_id_for_result (tree result, unsigned int *id)
3922 if (result)
3924 if (TREE_CODE (result) == SSA_NAME)
3925 *id = VN_INFO (result)->value_id;
3926 else if (is_gimple_min_invariant (result))
3927 *id = get_or_alloc_constant_value_id (result);
3931 /* Set the value ids in the valid hash tables. */
3933 static void
3934 set_hashtable_value_ids (void)
3936 htab_iterator hi;
3937 vn_nary_op_t vno;
3938 vn_reference_t vr;
3939 vn_phi_t vp;
3941 /* Now set the value ids of the things we had put in the hash
3942 table. */
3944 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
3945 vno, vn_nary_op_t, hi)
3946 set_value_id_for_result (vno->result, &vno->value_id);
3948 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
3949 vp, vn_phi_t, hi)
3950 set_value_id_for_result (vp->result, &vp->value_id);
3952 FOR_EACH_HTAB_ELEMENT (valid_info->references,
3953 vr, vn_reference_t, hi)
3954 set_value_id_for_result (vr->result, &vr->value_id);
3957 /* Do SCCVN. Returns true if it finished, false if we bailed out
3958 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
3959 how we use the alias oracle walking during the VN process. */
3961 bool
3962 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
3964 size_t i;
3965 tree param;
3966 bool changed = true;
3968 default_vn_walk_kind = default_vn_walk_kind_;
3970 init_scc_vn ();
3971 current_info = valid_info;
3973 for (param = DECL_ARGUMENTS (current_function_decl);
3974 param;
3975 param = DECL_CHAIN (param))
3977 tree def = ssa_default_def (cfun, param);
3978 if (def)
3979 VN_INFO (def)->valnum = def;
3982 for (i = 1; i < num_ssa_names; ++i)
3984 tree name = ssa_name (i);
3985 if (name
3986 && VN_INFO (name)->visited == false
3987 && !has_zero_uses (name))
3988 if (!DFS (name))
3990 free_scc_vn ();
3991 return false;
3995 /* Initialize the value ids. */
3997 for (i = 1; i < num_ssa_names; ++i)
3999 tree name = ssa_name (i);
4000 vn_ssa_aux_t info;
4001 if (!name)
4002 continue;
4003 info = VN_INFO (name);
4004 if (info->valnum == name
4005 || info->valnum == VN_TOP)
4006 info->value_id = get_next_value_id ();
4007 else if (is_gimple_min_invariant (info->valnum))
4008 info->value_id = get_or_alloc_constant_value_id (info->valnum);
4011 /* Propagate until they stop changing. */
4012 while (changed)
4014 changed = false;
4015 for (i = 1; i < num_ssa_names; ++i)
4017 tree name = ssa_name (i);
4018 vn_ssa_aux_t info;
4019 if (!name)
4020 continue;
4021 info = VN_INFO (name);
4022 if (TREE_CODE (info->valnum) == SSA_NAME
4023 && info->valnum != name
4024 && info->value_id != VN_INFO (info->valnum)->value_id)
4026 changed = true;
4027 info->value_id = VN_INFO (info->valnum)->value_id;
4032 set_hashtable_value_ids ();
4034 if (dump_file && (dump_flags & TDF_DETAILS))
4036 fprintf (dump_file, "Value numbers:\n");
4037 for (i = 0; i < num_ssa_names; i++)
4039 tree name = ssa_name (i);
4040 if (name
4041 && VN_INFO (name)->visited
4042 && SSA_VAL (name) != name)
4044 print_generic_expr (dump_file, name, 0);
4045 fprintf (dump_file, " = ");
4046 print_generic_expr (dump_file, SSA_VAL (name), 0);
4047 fprintf (dump_file, "\n");
4052 return true;
4055 /* Return the maximum value id we have ever seen. */
4057 unsigned int
4058 get_max_value_id (void)
4060 return next_value_id;
4063 /* Return the next unique value id. */
4065 unsigned int
4066 get_next_value_id (void)
4068 return next_value_id++;
4072 /* Compare two expressions E1 and E2 and return true if they are equal. */
4074 bool
4075 expressions_equal_p (tree e1, tree e2)
4077 /* The obvious case. */
4078 if (e1 == e2)
4079 return true;
4081 /* If only one of them is null, they cannot be equal. */
4082 if (!e1 || !e2)
4083 return false;
4085 /* Now perform the actual comparison. */
4086 if (TREE_CODE (e1) == TREE_CODE (e2)
4087 && operand_equal_p (e1, e2, OEP_PURE_SAME))
4088 return true;
4090 return false;
4094 /* Return true if the nary operation NARY may trap. This is a copy
4095 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4097 bool
4098 vn_nary_may_trap (vn_nary_op_t nary)
4100 tree type;
4101 tree rhs2 = NULL_TREE;
4102 bool honor_nans = false;
4103 bool honor_snans = false;
4104 bool fp_operation = false;
4105 bool honor_trapv = false;
4106 bool handled, ret;
4107 unsigned i;
4109 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4110 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4111 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4113 type = nary->type;
4114 fp_operation = FLOAT_TYPE_P (type);
4115 if (fp_operation)
4117 honor_nans = flag_trapping_math && !flag_finite_math_only;
4118 honor_snans = flag_signaling_nans != 0;
4120 else if (INTEGRAL_TYPE_P (type)
4121 && TYPE_OVERFLOW_TRAPS (type))
4122 honor_trapv = true;
4124 if (nary->length >= 2)
4125 rhs2 = nary->op[1];
4126 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4127 honor_trapv,
4128 honor_nans, honor_snans, rhs2,
4129 &handled);
4130 if (handled
4131 && ret)
4132 return true;
4134 for (i = 0; i < nary->length; ++i)
4135 if (tree_could_trap_p (nary->op[i]))
4136 return true;
4138 return false;