var-tracking.c (vt_add_function_parameter): Adjust for VEC changes.
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blob9a370e8ab3cb174bf91beac2d44d367b8b6454bb
1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "basic-block.h"
28 #include "gimple-pretty-print.h"
29 #include "tree-inline.h"
30 #include "tree-flow.h"
31 #include "gimple.h"
32 #include "dumpfile.h"
33 #include "hashtab.h"
34 #include "alloc-pool.h"
35 #include "flags.h"
36 #include "bitmap.h"
37 #include "cfgloop.h"
38 #include "params.h"
39 #include "tree-ssa-propagate.h"
40 #include "tree-ssa-sccvn.h"
41 #include "gimple-fold.h"
43 /* This algorithm is based on the SCC algorithm presented by Keith
44 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
45 (http://citeseer.ist.psu.edu/41805.html). In
46 straight line code, it is equivalent to a regular hash based value
47 numbering that is performed in reverse postorder.
49 For code with cycles, there are two alternatives, both of which
50 require keeping the hashtables separate from the actual list of
51 value numbers for SSA names.
53 1. Iterate value numbering in an RPO walk of the blocks, removing
54 all the entries from the hashtable after each iteration (but
55 keeping the SSA name->value number mapping between iterations).
56 Iterate until it does not change.
58 2. Perform value numbering as part of an SCC walk on the SSA graph,
59 iterating only the cycles in the SSA graph until they do not change
60 (using a separate, optimistic hashtable for value numbering the SCC
61 operands).
63 The second is not just faster in practice (because most SSA graph
64 cycles do not involve all the variables in the graph), it also has
65 some nice properties.
67 One of these nice properties is that when we pop an SCC off the
68 stack, we are guaranteed to have processed all the operands coming from
69 *outside of that SCC*, so we do not need to do anything special to
70 ensure they have value numbers.
72 Another nice property is that the SCC walk is done as part of a DFS
73 of the SSA graph, which makes it easy to perform combining and
74 simplifying operations at the same time.
76 The code below is deliberately written in a way that makes it easy
77 to separate the SCC walk from the other work it does.
79 In order to propagate constants through the code, we track which
80 expressions contain constants, and use those while folding. In
81 theory, we could also track expressions whose value numbers are
82 replaced, in case we end up folding based on expression
83 identities.
85 In order to value number memory, we assign value numbers to vuses.
86 This enables us to note that, for example, stores to the same
87 address of the same value from the same starting memory states are
88 equivalent.
89 TODO:
91 1. We can iterate only the changing portions of the SCC's, but
92 I have not seen an SCC big enough for this to be a win.
93 2. If you differentiate between phi nodes for loops and phi nodes
94 for if-then-else, you can properly consider phi nodes in different
95 blocks for equivalence.
96 3. We could value number vuses in more cases, particularly, whole
97 structure copies.
100 /* The set of hashtables and alloc_pool's for their items. */
102 typedef struct vn_tables_s
104 htab_t nary;
105 htab_t phis;
106 htab_t references;
107 struct obstack nary_obstack;
108 alloc_pool phis_pool;
109 alloc_pool references_pool;
110 } *vn_tables_t;
112 static htab_t constant_to_value_id;
113 static bitmap constant_value_ids;
116 /* Valid hashtables storing information we have proven to be
117 correct. */
119 static vn_tables_t valid_info;
121 /* Optimistic hashtables storing information we are making assumptions about
122 during iterations. */
124 static vn_tables_t optimistic_info;
126 /* Pointer to the set of hashtables that is currently being used.
127 Should always point to either the optimistic_info, or the
128 valid_info. */
130 static vn_tables_t current_info;
133 /* Reverse post order index for each basic block. */
135 static int *rpo_numbers;
137 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
139 /* This represents the top of the VN lattice, which is the universal
140 value. */
142 tree VN_TOP;
144 /* Unique counter for our value ids. */
146 static unsigned int next_value_id;
148 /* Next DFS number and the stack for strongly connected component
149 detection. */
151 static unsigned int next_dfs_num;
152 static VEC (tree, heap) *sccstack;
155 DEF_VEC_P(vn_ssa_aux_t);
156 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
158 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
159 are allocated on an obstack for locality reasons, and to free them
160 without looping over the VEC. */
162 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
163 static struct obstack vn_ssa_aux_obstack;
165 /* Return the value numbering information for a given SSA name. */
167 vn_ssa_aux_t
168 VN_INFO (tree name)
170 vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
171 SSA_NAME_VERSION (name));
172 gcc_checking_assert (res);
173 return res;
176 /* Set the value numbering info for a given SSA name to a given
177 value. */
179 static inline void
180 VN_INFO_SET (tree name, vn_ssa_aux_t value)
182 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
183 SSA_NAME_VERSION (name), value);
186 /* Initialize the value numbering info for a given SSA name.
187 This should be called just once for every SSA name. */
189 vn_ssa_aux_t
190 VN_INFO_GET (tree name)
192 vn_ssa_aux_t newinfo;
194 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
195 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
196 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
197 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
198 SSA_NAME_VERSION (name) + 1);
199 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
200 SSA_NAME_VERSION (name), newinfo);
201 return newinfo;
205 /* Get the representative expression for the SSA_NAME NAME. Returns
206 the representative SSA_NAME if there is no expression associated with it. */
208 tree
209 vn_get_expr_for (tree name)
211 vn_ssa_aux_t vn = VN_INFO (name);
212 gimple def_stmt;
213 tree expr = NULL_TREE;
214 enum tree_code code;
216 if (vn->valnum == VN_TOP)
217 return name;
219 /* If the value-number is a constant it is the representative
220 expression. */
221 if (TREE_CODE (vn->valnum) != SSA_NAME)
222 return vn->valnum;
224 /* Get to the information of the value of this SSA_NAME. */
225 vn = VN_INFO (vn->valnum);
227 /* If the value-number is a constant it is the representative
228 expression. */
229 if (TREE_CODE (vn->valnum) != SSA_NAME)
230 return vn->valnum;
232 /* Else if we have an expression, return it. */
233 if (vn->expr != NULL_TREE)
234 return vn->expr;
236 /* Otherwise use the defining statement to build the expression. */
237 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
239 /* If the value number is not an assignment use it directly. */
240 if (!is_gimple_assign (def_stmt))
241 return vn->valnum;
243 /* FIXME tuples. This is incomplete and likely will miss some
244 simplifications. */
245 code = gimple_assign_rhs_code (def_stmt);
246 switch (TREE_CODE_CLASS (code))
248 case tcc_reference:
249 if ((code == REALPART_EXPR
250 || code == IMAGPART_EXPR
251 || code == VIEW_CONVERT_EXPR)
252 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (def_stmt),
253 0)) == SSA_NAME)
254 expr = fold_build1 (code,
255 gimple_expr_type (def_stmt),
256 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
257 break;
259 case tcc_unary:
260 expr = fold_build1 (code,
261 gimple_expr_type (def_stmt),
262 gimple_assign_rhs1 (def_stmt));
263 break;
265 case tcc_binary:
266 expr = fold_build2 (code,
267 gimple_expr_type (def_stmt),
268 gimple_assign_rhs1 (def_stmt),
269 gimple_assign_rhs2 (def_stmt));
270 break;
272 case tcc_exceptional:
273 if (code == CONSTRUCTOR
274 && TREE_CODE
275 (TREE_TYPE (gimple_assign_rhs1 (def_stmt))) == VECTOR_TYPE)
276 expr = gimple_assign_rhs1 (def_stmt);
277 break;
279 default:;
281 if (expr == NULL_TREE)
282 return vn->valnum;
284 /* Cache the expression. */
285 vn->expr = expr;
287 return expr;
291 /* Free a phi operation structure VP. */
293 static void
294 free_phi (void *vp)
296 vn_phi_t phi = (vn_phi_t) vp;
297 VEC_free (tree, heap, phi->phiargs);
300 /* Free a reference operation structure VP. */
302 static void
303 free_reference (void *vp)
305 vn_reference_t vr = (vn_reference_t) vp;
306 VEC_free (vn_reference_op_s, heap, vr->operands);
309 /* Hash table equality function for vn_constant_t. */
311 static int
312 vn_constant_eq (const void *p1, const void *p2)
314 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
315 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
317 if (vc1->hashcode != vc2->hashcode)
318 return false;
320 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
323 /* Hash table hash function for vn_constant_t. */
325 static hashval_t
326 vn_constant_hash (const void *p1)
328 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
329 return vc1->hashcode;
332 /* Lookup a value id for CONSTANT and return it. If it does not
333 exist returns 0. */
335 unsigned int
336 get_constant_value_id (tree constant)
338 void **slot;
339 struct vn_constant_s vc;
341 vc.hashcode = vn_hash_constant_with_type (constant);
342 vc.constant = constant;
343 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
344 vc.hashcode, NO_INSERT);
345 if (slot)
346 return ((vn_constant_t)*slot)->value_id;
347 return 0;
350 /* Lookup a value id for CONSTANT, and if it does not exist, create a
351 new one and return it. If it does exist, return it. */
353 unsigned int
354 get_or_alloc_constant_value_id (tree constant)
356 void **slot;
357 struct vn_constant_s vc;
358 vn_constant_t vcp;
360 vc.hashcode = vn_hash_constant_with_type (constant);
361 vc.constant = constant;
362 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
363 vc.hashcode, INSERT);
364 if (*slot)
365 return ((vn_constant_t)*slot)->value_id;
367 vcp = XNEW (struct vn_constant_s);
368 vcp->hashcode = vc.hashcode;
369 vcp->constant = constant;
370 vcp->value_id = get_next_value_id ();
371 *slot = (void *) vcp;
372 bitmap_set_bit (constant_value_ids, vcp->value_id);
373 return vcp->value_id;
376 /* Return true if V is a value id for a constant. */
378 bool
379 value_id_constant_p (unsigned int v)
381 return bitmap_bit_p (constant_value_ids, v);
384 /* Compare two reference operands P1 and P2 for equality. Return true if
385 they are equal, and false otherwise. */
387 static int
388 vn_reference_op_eq (const void *p1, const void *p2)
390 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
391 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
393 return (vro1->opcode == vro2->opcode
394 /* We do not care for differences in type qualification. */
395 && (vro1->type == vro2->type
396 || (vro1->type && vro2->type
397 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
398 TYPE_MAIN_VARIANT (vro2->type))))
399 && expressions_equal_p (vro1->op0, vro2->op0)
400 && expressions_equal_p (vro1->op1, vro2->op1)
401 && expressions_equal_p (vro1->op2, vro2->op2));
404 /* Compute the hash for a reference operand VRO1. */
406 static hashval_t
407 vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result)
409 result = iterative_hash_hashval_t (vro1->opcode, result);
410 if (vro1->op0)
411 result = iterative_hash_expr (vro1->op0, result);
412 if (vro1->op1)
413 result = iterative_hash_expr (vro1->op1, result);
414 if (vro1->op2)
415 result = iterative_hash_expr (vro1->op2, result);
416 return result;
419 /* Return the hashcode for a given reference operation P1. */
421 static hashval_t
422 vn_reference_hash (const void *p1)
424 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
425 return vr1->hashcode;
428 /* Compute a hash for the reference operation VR1 and return it. */
430 hashval_t
431 vn_reference_compute_hash (const vn_reference_t vr1)
433 hashval_t result = 0;
434 int i;
435 vn_reference_op_t vro;
436 HOST_WIDE_INT off = -1;
437 bool deref = false;
439 FOR_EACH_VEC_ELT (vn_reference_op_s, vr1->operands, i, vro)
441 if (vro->opcode == MEM_REF)
442 deref = true;
443 else if (vro->opcode != ADDR_EXPR)
444 deref = false;
445 if (vro->off != -1)
447 if (off == -1)
448 off = 0;
449 off += vro->off;
451 else
453 if (off != -1
454 && off != 0)
455 result = iterative_hash_hashval_t (off, result);
456 off = -1;
457 if (deref
458 && vro->opcode == ADDR_EXPR)
460 if (vro->op0)
462 tree op = TREE_OPERAND (vro->op0, 0);
463 result = iterative_hash_hashval_t (TREE_CODE (op), result);
464 result = iterative_hash_expr (op, result);
467 else
468 result = vn_reference_op_compute_hash (vro, result);
471 if (vr1->vuse)
472 result += SSA_NAME_VERSION (vr1->vuse);
474 return result;
477 /* Return true if reference operations P1 and P2 are equivalent. This
478 means they have the same set of operands and vuses. */
481 vn_reference_eq (const void *p1, const void *p2)
483 unsigned i, j;
485 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
486 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
487 if (vr1->hashcode != vr2->hashcode)
488 return false;
490 /* Early out if this is not a hash collision. */
491 if (vr1->hashcode != vr2->hashcode)
492 return false;
494 /* The VOP needs to be the same. */
495 if (vr1->vuse != vr2->vuse)
496 return false;
498 /* If the operands are the same we are done. */
499 if (vr1->operands == vr2->operands)
500 return true;
502 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
503 return false;
505 if (INTEGRAL_TYPE_P (vr1->type)
506 && INTEGRAL_TYPE_P (vr2->type))
508 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
509 return false;
511 else if (INTEGRAL_TYPE_P (vr1->type)
512 && (TYPE_PRECISION (vr1->type)
513 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
514 return false;
515 else if (INTEGRAL_TYPE_P (vr2->type)
516 && (TYPE_PRECISION (vr2->type)
517 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
518 return false;
520 i = 0;
521 j = 0;
524 HOST_WIDE_INT off1 = 0, off2 = 0;
525 vn_reference_op_t vro1, vro2;
526 vn_reference_op_s tem1, tem2;
527 bool deref1 = false, deref2 = false;
528 for (; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro1); i++)
530 if (vro1->opcode == MEM_REF)
531 deref1 = true;
532 if (vro1->off == -1)
533 break;
534 off1 += vro1->off;
536 for (; VEC_iterate (vn_reference_op_s, vr2->operands, j, vro2); j++)
538 if (vro2->opcode == MEM_REF)
539 deref2 = true;
540 if (vro2->off == -1)
541 break;
542 off2 += vro2->off;
544 if (off1 != off2)
545 return false;
546 if (deref1 && vro1->opcode == ADDR_EXPR)
548 memset (&tem1, 0, sizeof (tem1));
549 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
550 tem1.type = TREE_TYPE (tem1.op0);
551 tem1.opcode = TREE_CODE (tem1.op0);
552 vro1 = &tem1;
553 deref1 = false;
555 if (deref2 && vro2->opcode == ADDR_EXPR)
557 memset (&tem2, 0, sizeof (tem2));
558 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
559 tem2.type = TREE_TYPE (tem2.op0);
560 tem2.opcode = TREE_CODE (tem2.op0);
561 vro2 = &tem2;
562 deref2 = false;
564 if (deref1 != deref2)
565 return false;
566 if (!vn_reference_op_eq (vro1, vro2))
567 return false;
568 ++j;
569 ++i;
571 while (VEC_length (vn_reference_op_s, vr1->operands) != i
572 || VEC_length (vn_reference_op_s, vr2->operands) != j);
574 return true;
577 /* Copy the operations present in load/store REF into RESULT, a vector of
578 vn_reference_op_s's. */
580 void
581 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
583 if (TREE_CODE (ref) == TARGET_MEM_REF)
585 vn_reference_op_s temp;
587 memset (&temp, 0, sizeof (temp));
588 temp.type = TREE_TYPE (ref);
589 temp.opcode = TREE_CODE (ref);
590 temp.op0 = TMR_INDEX (ref);
591 temp.op1 = TMR_STEP (ref);
592 temp.op2 = TMR_OFFSET (ref);
593 temp.off = -1;
594 VEC_safe_push (vn_reference_op_s, heap, *result, temp);
596 memset (&temp, 0, sizeof (temp));
597 temp.type = NULL_TREE;
598 temp.opcode = ERROR_MARK;
599 temp.op0 = TMR_INDEX2 (ref);
600 temp.off = -1;
601 VEC_safe_push (vn_reference_op_s, heap, *result, temp);
603 memset (&temp, 0, sizeof (temp));
604 temp.type = NULL_TREE;
605 temp.opcode = TREE_CODE (TMR_BASE (ref));
606 temp.op0 = TMR_BASE (ref);
607 temp.off = -1;
608 VEC_safe_push (vn_reference_op_s, heap, *result, temp);
609 return;
612 /* For non-calls, store the information that makes up the address. */
614 while (ref)
616 vn_reference_op_s temp;
618 memset (&temp, 0, sizeof (temp));
619 temp.type = TREE_TYPE (ref);
620 temp.opcode = TREE_CODE (ref);
621 temp.off = -1;
623 switch (temp.opcode)
625 case MODIFY_EXPR:
626 temp.op0 = TREE_OPERAND (ref, 1);
627 break;
628 case WITH_SIZE_EXPR:
629 temp.op0 = TREE_OPERAND (ref, 1);
630 temp.off = 0;
631 break;
632 case MEM_REF:
633 /* The base address gets its own vn_reference_op_s structure. */
634 temp.op0 = TREE_OPERAND (ref, 1);
635 if (host_integerp (TREE_OPERAND (ref, 1), 0))
636 temp.off = TREE_INT_CST_LOW (TREE_OPERAND (ref, 1));
637 break;
638 case BIT_FIELD_REF:
639 /* Record bits and position. */
640 temp.op0 = TREE_OPERAND (ref, 1);
641 temp.op1 = TREE_OPERAND (ref, 2);
642 break;
643 case COMPONENT_REF:
644 /* The field decl is enough to unambiguously specify the field,
645 a matching type is not necessary and a mismatching type
646 is always a spurious difference. */
647 temp.type = NULL_TREE;
648 temp.op0 = TREE_OPERAND (ref, 1);
649 temp.op1 = TREE_OPERAND (ref, 2);
651 tree this_offset = component_ref_field_offset (ref);
652 if (this_offset
653 && TREE_CODE (this_offset) == INTEGER_CST)
655 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
656 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
658 double_int off
659 = tree_to_double_int (this_offset)
660 + tree_to_double_int (bit_offset)
661 .arshift (BITS_PER_UNIT == 8
662 ? 3 : exact_log2 (BITS_PER_UNIT),
663 HOST_BITS_PER_DOUBLE_INT);
664 if (off.fits_shwi ())
665 temp.off = off.low;
669 break;
670 case ARRAY_RANGE_REF:
671 case ARRAY_REF:
672 /* Record index as operand. */
673 temp.op0 = TREE_OPERAND (ref, 1);
674 /* Always record lower bounds and element size. */
675 temp.op1 = array_ref_low_bound (ref);
676 temp.op2 = array_ref_element_size (ref);
677 if (TREE_CODE (temp.op0) == INTEGER_CST
678 && TREE_CODE (temp.op1) == INTEGER_CST
679 && TREE_CODE (temp.op2) == INTEGER_CST)
681 double_int off = tree_to_double_int (temp.op0);
682 off += -tree_to_double_int (temp.op1);
683 off *= tree_to_double_int (temp.op2);
684 if (off.fits_shwi ())
685 temp.off = off.low;
687 break;
688 case VAR_DECL:
689 if (DECL_HARD_REGISTER (ref))
691 temp.op0 = ref;
692 break;
694 /* Fallthru. */
695 case PARM_DECL:
696 case CONST_DECL:
697 case RESULT_DECL:
698 /* Canonicalize decls to MEM[&decl] which is what we end up with
699 when valueizing MEM[ptr] with ptr = &decl. */
700 temp.opcode = MEM_REF;
701 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
702 temp.off = 0;
703 VEC_safe_push (vn_reference_op_s, heap, *result, temp);
704 temp.opcode = ADDR_EXPR;
705 temp.op0 = build_fold_addr_expr (ref);
706 temp.type = TREE_TYPE (temp.op0);
707 temp.off = -1;
708 break;
709 case STRING_CST:
710 case INTEGER_CST:
711 case COMPLEX_CST:
712 case VECTOR_CST:
713 case REAL_CST:
714 case FIXED_CST:
715 case CONSTRUCTOR:
716 case SSA_NAME:
717 temp.op0 = ref;
718 break;
719 case ADDR_EXPR:
720 if (is_gimple_min_invariant (ref))
722 temp.op0 = ref;
723 break;
725 /* Fallthrough. */
726 /* These are only interesting for their operands, their
727 existence, and their type. They will never be the last
728 ref in the chain of references (IE they require an
729 operand), so we don't have to put anything
730 for op* as it will be handled by the iteration */
731 case REALPART_EXPR:
732 case VIEW_CONVERT_EXPR:
733 temp.off = 0;
734 break;
735 case IMAGPART_EXPR:
736 /* This is only interesting for its constant offset. */
737 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
738 break;
739 default:
740 gcc_unreachable ();
742 VEC_safe_push (vn_reference_op_s, heap, *result, temp);
744 if (REFERENCE_CLASS_P (ref)
745 || TREE_CODE (ref) == MODIFY_EXPR
746 || TREE_CODE (ref) == WITH_SIZE_EXPR
747 || (TREE_CODE (ref) == ADDR_EXPR
748 && !is_gimple_min_invariant (ref)))
749 ref = TREE_OPERAND (ref, 0);
750 else
751 ref = NULL_TREE;
755 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
756 operands in *OPS, the reference alias set SET and the reference type TYPE.
757 Return true if something useful was produced. */
759 bool
760 ao_ref_init_from_vn_reference (ao_ref *ref,
761 alias_set_type set, tree type,
762 VEC (vn_reference_op_s, heap) *ops)
764 vn_reference_op_t op;
765 unsigned i;
766 tree base = NULL_TREE;
767 tree *op0_p = &base;
768 HOST_WIDE_INT offset = 0;
769 HOST_WIDE_INT max_size;
770 HOST_WIDE_INT size = -1;
771 tree size_tree = NULL_TREE;
772 alias_set_type base_alias_set = -1;
774 /* First get the final access size from just the outermost expression. */
775 op = &VEC_index (vn_reference_op_s, ops, 0);
776 if (op->opcode == COMPONENT_REF)
777 size_tree = DECL_SIZE (op->op0);
778 else if (op->opcode == BIT_FIELD_REF)
779 size_tree = op->op0;
780 else
782 enum machine_mode mode = TYPE_MODE (type);
783 if (mode == BLKmode)
784 size_tree = TYPE_SIZE (type);
785 else
786 size = GET_MODE_BITSIZE (mode);
788 if (size_tree != NULL_TREE)
790 if (!host_integerp (size_tree, 1))
791 size = -1;
792 else
793 size = TREE_INT_CST_LOW (size_tree);
796 /* Initially, maxsize is the same as the accessed element size.
797 In the following it will only grow (or become -1). */
798 max_size = size;
800 /* Compute cumulative bit-offset for nested component-refs and array-refs,
801 and find the ultimate containing object. */
802 FOR_EACH_VEC_ELT (vn_reference_op_s, ops, i, op)
804 switch (op->opcode)
806 /* These may be in the reference ops, but we cannot do anything
807 sensible with them here. */
808 case ADDR_EXPR:
809 /* Apart from ADDR_EXPR arguments to MEM_REF. */
810 if (base != NULL_TREE
811 && TREE_CODE (base) == MEM_REF
812 && op->op0
813 && DECL_P (TREE_OPERAND (op->op0, 0)))
815 vn_reference_op_t pop = &VEC_index (vn_reference_op_s, ops, i-1);
816 base = TREE_OPERAND (op->op0, 0);
817 if (pop->off == -1)
819 max_size = -1;
820 offset = 0;
822 else
823 offset += pop->off * BITS_PER_UNIT;
824 op0_p = NULL;
825 break;
827 /* Fallthru. */
828 case CALL_EXPR:
829 return false;
831 /* Record the base objects. */
832 case MEM_REF:
833 base_alias_set = get_deref_alias_set (op->op0);
834 *op0_p = build2 (MEM_REF, op->type,
835 NULL_TREE, op->op0);
836 op0_p = &TREE_OPERAND (*op0_p, 0);
837 break;
839 case VAR_DECL:
840 case PARM_DECL:
841 case RESULT_DECL:
842 case SSA_NAME:
843 *op0_p = op->op0;
844 op0_p = NULL;
845 break;
847 /* And now the usual component-reference style ops. */
848 case BIT_FIELD_REF:
849 offset += tree_low_cst (op->op1, 0);
850 break;
852 case COMPONENT_REF:
854 tree field = op->op0;
855 /* We do not have a complete COMPONENT_REF tree here so we
856 cannot use component_ref_field_offset. Do the interesting
857 parts manually. */
859 if (op->op1
860 || !host_integerp (DECL_FIELD_OFFSET (field), 1))
861 max_size = -1;
862 else
864 offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
865 * BITS_PER_UNIT);
866 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
868 break;
871 case ARRAY_RANGE_REF:
872 case ARRAY_REF:
873 /* We recorded the lower bound and the element size. */
874 if (!host_integerp (op->op0, 0)
875 || !host_integerp (op->op1, 0)
876 || !host_integerp (op->op2, 0))
877 max_size = -1;
878 else
880 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
881 hindex -= TREE_INT_CST_LOW (op->op1);
882 hindex *= TREE_INT_CST_LOW (op->op2);
883 hindex *= BITS_PER_UNIT;
884 offset += hindex;
886 break;
888 case REALPART_EXPR:
889 break;
891 case IMAGPART_EXPR:
892 offset += size;
893 break;
895 case VIEW_CONVERT_EXPR:
896 break;
898 case STRING_CST:
899 case INTEGER_CST:
900 case COMPLEX_CST:
901 case VECTOR_CST:
902 case REAL_CST:
903 case CONSTRUCTOR:
904 case CONST_DECL:
905 return false;
907 default:
908 return false;
912 if (base == NULL_TREE)
913 return false;
915 ref->ref = NULL_TREE;
916 ref->base = base;
917 ref->offset = offset;
918 ref->size = size;
919 ref->max_size = max_size;
920 ref->ref_alias_set = set;
921 if (base_alias_set != -1)
922 ref->base_alias_set = base_alias_set;
923 else
924 ref->base_alias_set = get_alias_set (base);
925 /* We discount volatiles from value-numbering elsewhere. */
926 ref->volatile_p = false;
928 return true;
931 /* Copy the operations present in load/store/call REF into RESULT, a vector of
932 vn_reference_op_s's. */
934 void
935 copy_reference_ops_from_call (gimple call,
936 VEC(vn_reference_op_s, heap) **result)
938 vn_reference_op_s temp;
939 unsigned i;
940 tree lhs = gimple_call_lhs (call);
942 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
943 different. By adding the lhs here in the vector, we ensure that the
944 hashcode is different, guaranteeing a different value number. */
945 if (lhs && TREE_CODE (lhs) != SSA_NAME)
947 memset (&temp, 0, sizeof (temp));
948 temp.opcode = MODIFY_EXPR;
949 temp.type = TREE_TYPE (lhs);
950 temp.op0 = lhs;
951 temp.off = -1;
952 VEC_safe_push (vn_reference_op_s, heap, *result, temp);
955 /* Copy the type, opcode, function being called and static chain. */
956 memset (&temp, 0, sizeof (temp));
957 temp.type = gimple_call_return_type (call);
958 temp.opcode = CALL_EXPR;
959 temp.op0 = gimple_call_fn (call);
960 temp.op1 = gimple_call_chain (call);
961 temp.off = -1;
962 VEC_safe_push (vn_reference_op_s, heap, *result, temp);
964 /* Copy the call arguments. As they can be references as well,
965 just chain them together. */
966 for (i = 0; i < gimple_call_num_args (call); ++i)
968 tree callarg = gimple_call_arg (call, i);
969 copy_reference_ops_from_ref (callarg, result);
973 /* Create a vector of vn_reference_op_s structures from REF, a
974 REFERENCE_CLASS_P tree. The vector is not shared. */
976 static VEC(vn_reference_op_s, heap) *
977 create_reference_ops_from_ref (tree ref)
979 VEC (vn_reference_op_s, heap) *result = NULL;
981 copy_reference_ops_from_ref (ref, &result);
982 return result;
985 /* Create a vector of vn_reference_op_s structures from CALL, a
986 call statement. The vector is not shared. */
988 static VEC(vn_reference_op_s, heap) *
989 create_reference_ops_from_call (gimple call)
991 VEC (vn_reference_op_s, heap) *result = NULL;
993 copy_reference_ops_from_call (call, &result);
994 return result;
997 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
998 *I_P to point to the last element of the replacement. */
999 void
1000 vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
1001 unsigned int *i_p)
1003 unsigned int i = *i_p;
1004 vn_reference_op_t op = &VEC_index (vn_reference_op_s, *ops, i);
1005 vn_reference_op_t mem_op = &VEC_index (vn_reference_op_s, *ops, i - 1);
1006 tree addr_base;
1007 HOST_WIDE_INT addr_offset = 0;
1009 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1010 from .foo.bar to the preceding MEM_REF offset and replace the
1011 address with &OBJ. */
1012 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1013 &addr_offset);
1014 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1015 if (addr_base != op->op0)
1017 double_int off = tree_to_double_int (mem_op->op0);
1018 off = off.sext (TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1019 off += double_int::from_shwi (addr_offset);
1020 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1021 op->op0 = build_fold_addr_expr (addr_base);
1022 if (host_integerp (mem_op->op0, 0))
1023 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1024 else
1025 mem_op->off = -1;
1029 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1030 *I_P to point to the last element of the replacement. */
1031 static void
1032 vn_reference_maybe_forwprop_address (VEC (vn_reference_op_s, heap) **ops,
1033 unsigned int *i_p)
1035 unsigned int i = *i_p;
1036 vn_reference_op_t op = &VEC_index (vn_reference_op_s, *ops, i);
1037 vn_reference_op_t mem_op = &VEC_index (vn_reference_op_s, *ops, i - 1);
1038 gimple def_stmt;
1039 enum tree_code code;
1040 double_int off;
1042 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1043 if (!is_gimple_assign (def_stmt))
1044 return;
1046 code = gimple_assign_rhs_code (def_stmt);
1047 if (code != ADDR_EXPR
1048 && code != POINTER_PLUS_EXPR)
1049 return;
1051 off = tree_to_double_int (mem_op->op0);
1052 off = off.sext (TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1054 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1055 from .foo.bar to the preceding MEM_REF offset and replace the
1056 address with &OBJ. */
1057 if (code == ADDR_EXPR)
1059 tree addr, addr_base;
1060 HOST_WIDE_INT addr_offset;
1062 addr = gimple_assign_rhs1 (def_stmt);
1063 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1064 &addr_offset);
1065 if (!addr_base
1066 || TREE_CODE (addr_base) != MEM_REF)
1067 return;
1069 off += double_int::from_shwi (addr_offset);
1070 off += mem_ref_offset (addr_base);
1071 op->op0 = TREE_OPERAND (addr_base, 0);
1073 else
1075 tree ptr, ptroff;
1076 ptr = gimple_assign_rhs1 (def_stmt);
1077 ptroff = gimple_assign_rhs2 (def_stmt);
1078 if (TREE_CODE (ptr) != SSA_NAME
1079 || TREE_CODE (ptroff) != INTEGER_CST)
1080 return;
1082 off += tree_to_double_int (ptroff);
1083 op->op0 = ptr;
1086 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1087 if (host_integerp (mem_op->op0, 0))
1088 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1089 else
1090 mem_op->off = -1;
1091 if (TREE_CODE (op->op0) == SSA_NAME)
1092 op->op0 = SSA_VAL (op->op0);
1093 if (TREE_CODE (op->op0) != SSA_NAME)
1094 op->opcode = TREE_CODE (op->op0);
1096 /* And recurse. */
1097 if (TREE_CODE (op->op0) == SSA_NAME)
1098 vn_reference_maybe_forwprop_address (ops, i_p);
1099 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1100 vn_reference_fold_indirect (ops, i_p);
1103 /* Optimize the reference REF to a constant if possible or return
1104 NULL_TREE if not. */
1106 tree
1107 fully_constant_vn_reference_p (vn_reference_t ref)
1109 VEC (vn_reference_op_s, heap) *operands = ref->operands;
1110 vn_reference_op_t op;
1112 /* Try to simplify the translated expression if it is
1113 a call to a builtin function with at most two arguments. */
1114 op = &VEC_index (vn_reference_op_s, operands, 0);
1115 if (op->opcode == CALL_EXPR
1116 && TREE_CODE (op->op0) == ADDR_EXPR
1117 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1118 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1119 && VEC_length (vn_reference_op_s, operands) >= 2
1120 && VEC_length (vn_reference_op_s, operands) <= 3)
1122 vn_reference_op_t arg0, arg1 = NULL;
1123 bool anyconst = false;
1124 arg0 = &VEC_index (vn_reference_op_s, operands, 1);
1125 if (VEC_length (vn_reference_op_s, operands) > 2)
1126 arg1 = &VEC_index (vn_reference_op_s, operands, 2);
1127 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1128 || (arg0->opcode == ADDR_EXPR
1129 && is_gimple_min_invariant (arg0->op0)))
1130 anyconst = true;
1131 if (arg1
1132 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1133 || (arg1->opcode == ADDR_EXPR
1134 && is_gimple_min_invariant (arg1->op0))))
1135 anyconst = true;
1136 if (anyconst)
1138 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1139 arg1 ? 2 : 1,
1140 arg0->op0,
1141 arg1 ? arg1->op0 : NULL);
1142 if (folded
1143 && TREE_CODE (folded) == NOP_EXPR)
1144 folded = TREE_OPERAND (folded, 0);
1145 if (folded
1146 && is_gimple_min_invariant (folded))
1147 return folded;
1151 /* Simplify reads from constant strings. */
1152 else if (op->opcode == ARRAY_REF
1153 && TREE_CODE (op->op0) == INTEGER_CST
1154 && integer_zerop (op->op1)
1155 && VEC_length (vn_reference_op_s, operands) == 2)
1157 vn_reference_op_t arg0;
1158 arg0 = &VEC_index (vn_reference_op_s, operands, 1);
1159 if (arg0->opcode == STRING_CST
1160 && (TYPE_MODE (op->type)
1161 == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
1162 && GET_MODE_CLASS (TYPE_MODE (op->type)) == MODE_INT
1163 && GET_MODE_SIZE (TYPE_MODE (op->type)) == 1
1164 && compare_tree_int (op->op0, TREE_STRING_LENGTH (arg0->op0)) < 0)
1165 return build_int_cst_type (op->type,
1166 (TREE_STRING_POINTER (arg0->op0)
1167 [TREE_INT_CST_LOW (op->op0)]));
1170 return NULL_TREE;
1173 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1174 structures into their value numbers. This is done in-place, and
1175 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1176 whether any operands were valueized. */
1178 static VEC (vn_reference_op_s, heap) *
1179 valueize_refs_1 (VEC (vn_reference_op_s, heap) *orig, bool *valueized_anything)
1181 vn_reference_op_t vro;
1182 unsigned int i;
1184 *valueized_anything = false;
1186 FOR_EACH_VEC_ELT (vn_reference_op_s, orig, i, vro)
1188 if (vro->opcode == SSA_NAME
1189 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1191 tree tem = SSA_VAL (vro->op0);
1192 if (tem != vro->op0)
1194 *valueized_anything = true;
1195 vro->op0 = tem;
1197 /* If it transforms from an SSA_NAME to a constant, update
1198 the opcode. */
1199 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1200 vro->opcode = TREE_CODE (vro->op0);
1202 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1204 tree tem = SSA_VAL (vro->op1);
1205 if (tem != vro->op1)
1207 *valueized_anything = true;
1208 vro->op1 = tem;
1211 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1213 tree tem = SSA_VAL (vro->op2);
1214 if (tem != vro->op2)
1216 *valueized_anything = true;
1217 vro->op2 = tem;
1220 /* If it transforms from an SSA_NAME to an address, fold with
1221 a preceding indirect reference. */
1222 if (i > 0
1223 && vro->op0
1224 && TREE_CODE (vro->op0) == ADDR_EXPR
1225 && VEC_index (vn_reference_op_s,
1226 orig, i - 1).opcode == MEM_REF)
1227 vn_reference_fold_indirect (&orig, &i);
1228 else if (i > 0
1229 && vro->opcode == SSA_NAME
1230 && VEC_index (vn_reference_op_s,
1231 orig, i - 1).opcode == MEM_REF)
1232 vn_reference_maybe_forwprop_address (&orig, &i);
1233 /* If it transforms a non-constant ARRAY_REF into a constant
1234 one, adjust the constant offset. */
1235 else if (vro->opcode == ARRAY_REF
1236 && vro->off == -1
1237 && TREE_CODE (vro->op0) == INTEGER_CST
1238 && TREE_CODE (vro->op1) == INTEGER_CST
1239 && TREE_CODE (vro->op2) == INTEGER_CST)
1241 double_int off = tree_to_double_int (vro->op0);
1242 off += -tree_to_double_int (vro->op1);
1243 off *= tree_to_double_int (vro->op2);
1244 if (off.fits_shwi ())
1245 vro->off = off.low;
1249 return orig;
1252 static VEC (vn_reference_op_s, heap) *
1253 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
1255 bool tem;
1256 return valueize_refs_1 (orig, &tem);
1259 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
1261 /* Create a vector of vn_reference_op_s structures from REF, a
1262 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1263 this function. *VALUEIZED_ANYTHING will specify whether any
1264 operands were valueized. */
1266 static VEC(vn_reference_op_s, heap) *
1267 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1269 if (!ref)
1270 return NULL;
1271 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1272 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1273 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1274 valueized_anything);
1275 return shared_lookup_references;
1278 /* Create a vector of vn_reference_op_s structures from CALL, a
1279 call statement. The vector is shared among all callers of
1280 this function. */
1282 static VEC(vn_reference_op_s, heap) *
1283 valueize_shared_reference_ops_from_call (gimple call)
1285 if (!call)
1286 return NULL;
1287 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1288 copy_reference_ops_from_call (call, &shared_lookup_references);
1289 shared_lookup_references = valueize_refs (shared_lookup_references);
1290 return shared_lookup_references;
1293 /* Lookup a SCCVN reference operation VR in the current hash table.
1294 Returns the resulting value number if it exists in the hash table,
1295 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1296 vn_reference_t stored in the hashtable if something is found. */
1298 static tree
1299 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1301 void **slot;
1302 hashval_t hash;
1304 hash = vr->hashcode;
1305 slot = htab_find_slot_with_hash (current_info->references, vr,
1306 hash, NO_INSERT);
1307 if (!slot && current_info == optimistic_info)
1308 slot = htab_find_slot_with_hash (valid_info->references, vr,
1309 hash, NO_INSERT);
1310 if (slot)
1312 if (vnresult)
1313 *vnresult = (vn_reference_t)*slot;
1314 return ((vn_reference_t)*slot)->result;
1317 return NULL_TREE;
1320 static tree *last_vuse_ptr;
1321 static vn_lookup_kind vn_walk_kind;
1322 static vn_lookup_kind default_vn_walk_kind;
1324 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1325 with the current VUSE and performs the expression lookup. */
1327 static void *
1328 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1329 unsigned int cnt, void *vr_)
1331 vn_reference_t vr = (vn_reference_t)vr_;
1332 void **slot;
1333 hashval_t hash;
1335 /* This bounds the stmt walks we perform on reference lookups
1336 to O(1) instead of O(N) where N is the number of dominating
1337 stores. */
1338 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1339 return (void *)-1;
1341 if (last_vuse_ptr)
1342 *last_vuse_ptr = vuse;
1344 /* Fixup vuse and hash. */
1345 if (vr->vuse)
1346 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1347 vr->vuse = SSA_VAL (vuse);
1348 if (vr->vuse)
1349 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1351 hash = vr->hashcode;
1352 slot = htab_find_slot_with_hash (current_info->references, vr,
1353 hash, NO_INSERT);
1354 if (!slot && current_info == optimistic_info)
1355 slot = htab_find_slot_with_hash (valid_info->references, vr,
1356 hash, NO_INSERT);
1357 if (slot)
1358 return *slot;
1360 return NULL;
1363 /* Lookup an existing or insert a new vn_reference entry into the
1364 value table for the VUSE, SET, TYPE, OPERANDS reference which
1365 has the value VALUE which is either a constant or an SSA name. */
1367 static vn_reference_t
1368 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1369 alias_set_type set,
1370 tree type,
1371 VEC (vn_reference_op_s,
1372 heap) *operands,
1373 tree value)
1375 struct vn_reference_s vr1;
1376 vn_reference_t result;
1377 unsigned value_id;
1378 vr1.vuse = vuse;
1379 vr1.operands = operands;
1380 vr1.type = type;
1381 vr1.set = set;
1382 vr1.hashcode = vn_reference_compute_hash (&vr1);
1383 if (vn_reference_lookup_1 (&vr1, &result))
1384 return result;
1385 if (TREE_CODE (value) == SSA_NAME)
1386 value_id = VN_INFO (value)->value_id;
1387 else
1388 value_id = get_or_alloc_constant_value_id (value);
1389 return vn_reference_insert_pieces (vuse, set, type,
1390 VEC_copy (vn_reference_op_s, heap,
1391 operands), value, value_id);
1394 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1395 from the statement defining VUSE and if not successful tries to
1396 translate *REFP and VR_ through an aggregate copy at the definition
1397 of VUSE. */
1399 static void *
1400 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
1402 vn_reference_t vr = (vn_reference_t)vr_;
1403 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1404 tree base;
1405 HOST_WIDE_INT offset, maxsize;
1406 static VEC (vn_reference_op_s, heap) *lhs_ops = NULL;
1407 ao_ref lhs_ref;
1408 bool lhs_ref_ok = false;
1410 /* First try to disambiguate after value-replacing in the definitions LHS. */
1411 if (is_gimple_assign (def_stmt))
1413 VEC (vn_reference_op_s, heap) *tem;
1414 tree lhs = gimple_assign_lhs (def_stmt);
1415 bool valueized_anything = false;
1416 /* Avoid re-allocation overhead. */
1417 VEC_truncate (vn_reference_op_s, lhs_ops, 0);
1418 copy_reference_ops_from_ref (lhs, &lhs_ops);
1419 tem = lhs_ops;
1420 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1421 gcc_assert (lhs_ops == tem);
1422 if (valueized_anything)
1424 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1425 get_alias_set (lhs),
1426 TREE_TYPE (lhs), lhs_ops);
1427 if (lhs_ref_ok
1428 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1429 return NULL;
1431 else
1433 ao_ref_init (&lhs_ref, lhs);
1434 lhs_ref_ok = true;
1438 base = ao_ref_base (ref);
1439 offset = ref->offset;
1440 maxsize = ref->max_size;
1442 /* If we cannot constrain the size of the reference we cannot
1443 test if anything kills it. */
1444 if (maxsize == -1)
1445 return (void *)-1;
1447 /* We can't deduce anything useful from clobbers. */
1448 if (gimple_clobber_p (def_stmt))
1449 return (void *)-1;
1451 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1452 from that definition.
1453 1) Memset. */
1454 if (is_gimple_reg_type (vr->type)
1455 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1456 && integer_zerop (gimple_call_arg (def_stmt, 1))
1457 && host_integerp (gimple_call_arg (def_stmt, 2), 1)
1458 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1460 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1461 tree base2;
1462 HOST_WIDE_INT offset2, size2, maxsize2;
1463 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1464 size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
1465 if ((unsigned HOST_WIDE_INT)size2 / 8
1466 == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
1467 && maxsize2 != -1
1468 && operand_equal_p (base, base2, 0)
1469 && offset2 <= offset
1470 && offset2 + size2 >= offset + maxsize)
1472 tree val = build_zero_cst (vr->type);
1473 return vn_reference_lookup_or_insert_for_pieces
1474 (vuse, vr->set, vr->type, vr->operands, val);
1478 /* 2) Assignment from an empty CONSTRUCTOR. */
1479 else if (is_gimple_reg_type (vr->type)
1480 && gimple_assign_single_p (def_stmt)
1481 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1482 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1484 tree base2;
1485 HOST_WIDE_INT offset2, size2, maxsize2;
1486 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1487 &offset2, &size2, &maxsize2);
1488 if (maxsize2 != -1
1489 && operand_equal_p (base, base2, 0)
1490 && offset2 <= offset
1491 && offset2 + size2 >= offset + maxsize)
1493 tree val = build_zero_cst (vr->type);
1494 return vn_reference_lookup_or_insert_for_pieces
1495 (vuse, vr->set, vr->type, vr->operands, val);
1499 /* 3) Assignment from a constant. We can use folds native encode/interpret
1500 routines to extract the assigned bits. */
1501 else if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
1502 && ref->size == maxsize
1503 && maxsize % BITS_PER_UNIT == 0
1504 && offset % BITS_PER_UNIT == 0
1505 && is_gimple_reg_type (vr->type)
1506 && gimple_assign_single_p (def_stmt)
1507 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
1509 tree base2;
1510 HOST_WIDE_INT offset2, size2, maxsize2;
1511 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1512 &offset2, &size2, &maxsize2);
1513 if (maxsize2 != -1
1514 && maxsize2 == size2
1515 && size2 % BITS_PER_UNIT == 0
1516 && offset2 % BITS_PER_UNIT == 0
1517 && operand_equal_p (base, base2, 0)
1518 && offset2 <= offset
1519 && offset2 + size2 >= offset + maxsize)
1521 /* We support up to 512-bit values (for V8DFmode). */
1522 unsigned char buffer[64];
1523 int len;
1525 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1526 buffer, sizeof (buffer));
1527 if (len > 0)
1529 tree val = native_interpret_expr (vr->type,
1530 buffer
1531 + ((offset - offset2)
1532 / BITS_PER_UNIT),
1533 ref->size / BITS_PER_UNIT);
1534 if (val)
1535 return vn_reference_lookup_or_insert_for_pieces
1536 (vuse, vr->set, vr->type, vr->operands, val);
1541 /* 4) Assignment from an SSA name which definition we may be able
1542 to access pieces from. */
1543 else if (ref->size == maxsize
1544 && is_gimple_reg_type (vr->type)
1545 && gimple_assign_single_p (def_stmt)
1546 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
1548 tree rhs1 = gimple_assign_rhs1 (def_stmt);
1549 gimple def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
1550 if (is_gimple_assign (def_stmt2)
1551 && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
1552 || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
1553 && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1))))
1555 tree base2;
1556 HOST_WIDE_INT offset2, size2, maxsize2, off;
1557 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1558 &offset2, &size2, &maxsize2);
1559 off = offset - offset2;
1560 if (maxsize2 != -1
1561 && maxsize2 == size2
1562 && operand_equal_p (base, base2, 0)
1563 && offset2 <= offset
1564 && offset2 + size2 >= offset + maxsize)
1566 tree val = NULL_TREE;
1567 HOST_WIDE_INT elsz
1568 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1))));
1569 if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR)
1571 if (off == 0)
1572 val = gimple_assign_rhs1 (def_stmt2);
1573 else if (off == elsz)
1574 val = gimple_assign_rhs2 (def_stmt2);
1576 else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR
1577 && off % elsz == 0)
1579 tree ctor = gimple_assign_rhs1 (def_stmt2);
1580 unsigned i = off / elsz;
1581 if (i < CONSTRUCTOR_NELTS (ctor))
1583 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
1584 if (compare_tree_int (elt->index, i) == 0)
1585 val = elt->value;
1588 if (val)
1589 return vn_reference_lookup_or_insert_for_pieces
1590 (vuse, vr->set, vr->type, vr->operands, val);
1595 /* 5) For aggregate copies translate the reference through them if
1596 the copy kills ref. */
1597 else if (vn_walk_kind == VN_WALKREWRITE
1598 && gimple_assign_single_p (def_stmt)
1599 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1600 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1601 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1603 tree base2;
1604 HOST_WIDE_INT offset2, size2, maxsize2;
1605 int i, j;
1606 VEC (vn_reference_op_s, heap) *rhs = NULL;
1607 vn_reference_op_t vro;
1608 ao_ref r;
1610 if (!lhs_ref_ok)
1611 return (void *)-1;
1613 /* See if the assignment kills REF. */
1614 base2 = ao_ref_base (&lhs_ref);
1615 offset2 = lhs_ref.offset;
1616 size2 = lhs_ref.size;
1617 maxsize2 = lhs_ref.max_size;
1618 if (maxsize2 == -1
1619 || (base != base2 && !operand_equal_p (base, base2, 0))
1620 || offset2 > offset
1621 || offset2 + size2 < offset + maxsize)
1622 return (void *)-1;
1624 /* Find the common base of ref and the lhs. lhs_ops already
1625 contains valueized operands for the lhs. */
1626 i = VEC_length (vn_reference_op_s, vr->operands) - 1;
1627 j = VEC_length (vn_reference_op_s, lhs_ops) - 1;
1628 while (j >= 0 && i >= 0
1629 && vn_reference_op_eq (&VEC_index (vn_reference_op_s,
1630 vr->operands, i),
1631 &VEC_index (vn_reference_op_s, lhs_ops, j)))
1633 i--;
1634 j--;
1637 /* ??? The innermost op should always be a MEM_REF and we already
1638 checked that the assignment to the lhs kills vr. Thus for
1639 aggregate copies using char[] types the vn_reference_op_eq
1640 may fail when comparing types for compatibility. But we really
1641 don't care here - further lookups with the rewritten operands
1642 will simply fail if we messed up types too badly. */
1643 if (j == 0 && i >= 0
1644 && VEC_index (vn_reference_op_s, lhs_ops, 0).opcode == MEM_REF
1645 && VEC_index (vn_reference_op_s, lhs_ops, 0).off != -1
1646 && (VEC_index (vn_reference_op_s, lhs_ops, 0).off
1647 == VEC_index (vn_reference_op_s, vr->operands, i).off))
1648 i--, j--;
1650 /* i now points to the first additional op.
1651 ??? LHS may not be completely contained in VR, one or more
1652 VIEW_CONVERT_EXPRs could be in its way. We could at least
1653 try handling outermost VIEW_CONVERT_EXPRs. */
1654 if (j != -1)
1655 return (void *)-1;
1657 /* Now re-write REF to be based on the rhs of the assignment. */
1658 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1659 /* We need to pre-pend vr->operands[0..i] to rhs. */
1660 if (i + 1 + VEC_length (vn_reference_op_s, rhs)
1661 > VEC_length (vn_reference_op_s, vr->operands))
1663 VEC (vn_reference_op_s, heap) *old = vr->operands;
1664 VEC_safe_grow (vn_reference_op_s, heap, vr->operands,
1665 i + 1 + VEC_length (vn_reference_op_s, rhs));
1666 if (old == shared_lookup_references
1667 && vr->operands != old)
1668 shared_lookup_references = NULL;
1670 else
1671 VEC_truncate (vn_reference_op_s, vr->operands,
1672 i + 1 + VEC_length (vn_reference_op_s, rhs));
1673 FOR_EACH_VEC_ELT (vn_reference_op_s, rhs, j, vro)
1674 VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, *vro);
1675 VEC_free (vn_reference_op_s, heap, rhs);
1676 vr->operands = valueize_refs (vr->operands);
1677 vr->hashcode = vn_reference_compute_hash (vr);
1679 /* Adjust *ref from the new operands. */
1680 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1681 return (void *)-1;
1682 /* This can happen with bitfields. */
1683 if (ref->size != r.size)
1684 return (void *)-1;
1685 *ref = r;
1687 /* Do not update last seen VUSE after translating. */
1688 last_vuse_ptr = NULL;
1690 /* Keep looking for the adjusted *REF / VR pair. */
1691 return NULL;
1694 /* 6) For memcpy copies translate the reference through them if
1695 the copy kills ref. */
1696 else if (vn_walk_kind == VN_WALKREWRITE
1697 && is_gimple_reg_type (vr->type)
1698 /* ??? Handle BCOPY as well. */
1699 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
1700 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
1701 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
1702 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
1703 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
1704 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
1705 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
1706 && host_integerp (gimple_call_arg (def_stmt, 2), 1))
1708 tree lhs, rhs;
1709 ao_ref r;
1710 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
1711 vn_reference_op_s op;
1712 HOST_WIDE_INT at;
1715 /* Only handle non-variable, addressable refs. */
1716 if (ref->size != maxsize
1717 || offset % BITS_PER_UNIT != 0
1718 || ref->size % BITS_PER_UNIT != 0)
1719 return (void *)-1;
1721 /* Extract a pointer base and an offset for the destination. */
1722 lhs = gimple_call_arg (def_stmt, 0);
1723 lhs_offset = 0;
1724 if (TREE_CODE (lhs) == SSA_NAME)
1725 lhs = SSA_VAL (lhs);
1726 if (TREE_CODE (lhs) == ADDR_EXPR)
1728 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
1729 &lhs_offset);
1730 if (!tem)
1731 return (void *)-1;
1732 if (TREE_CODE (tem) == MEM_REF
1733 && host_integerp (TREE_OPERAND (tem, 1), 1))
1735 lhs = TREE_OPERAND (tem, 0);
1736 lhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1738 else if (DECL_P (tem))
1739 lhs = build_fold_addr_expr (tem);
1740 else
1741 return (void *)-1;
1743 if (TREE_CODE (lhs) != SSA_NAME
1744 && TREE_CODE (lhs) != ADDR_EXPR)
1745 return (void *)-1;
1747 /* Extract a pointer base and an offset for the source. */
1748 rhs = gimple_call_arg (def_stmt, 1);
1749 rhs_offset = 0;
1750 if (TREE_CODE (rhs) == SSA_NAME)
1751 rhs = SSA_VAL (rhs);
1752 if (TREE_CODE (rhs) == ADDR_EXPR)
1754 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
1755 &rhs_offset);
1756 if (!tem)
1757 return (void *)-1;
1758 if (TREE_CODE (tem) == MEM_REF
1759 && host_integerp (TREE_OPERAND (tem, 1), 1))
1761 rhs = TREE_OPERAND (tem, 0);
1762 rhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1764 else if (DECL_P (tem))
1765 rhs = build_fold_addr_expr (tem);
1766 else
1767 return (void *)-1;
1769 if (TREE_CODE (rhs) != SSA_NAME
1770 && TREE_CODE (rhs) != ADDR_EXPR)
1771 return (void *)-1;
1773 copy_size = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2));
1775 /* The bases of the destination and the references have to agree. */
1776 if ((TREE_CODE (base) != MEM_REF
1777 && !DECL_P (base))
1778 || (TREE_CODE (base) == MEM_REF
1779 && (TREE_OPERAND (base, 0) != lhs
1780 || !host_integerp (TREE_OPERAND (base, 1), 1)))
1781 || (DECL_P (base)
1782 && (TREE_CODE (lhs) != ADDR_EXPR
1783 || TREE_OPERAND (lhs, 0) != base)))
1784 return (void *)-1;
1786 /* And the access has to be contained within the memcpy destination. */
1787 at = offset / BITS_PER_UNIT;
1788 if (TREE_CODE (base) == MEM_REF)
1789 at += TREE_INT_CST_LOW (TREE_OPERAND (base, 1));
1790 if (lhs_offset > at
1791 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
1792 return (void *)-1;
1794 /* Make room for 2 operands in the new reference. */
1795 if (VEC_length (vn_reference_op_s, vr->operands) < 2)
1797 VEC (vn_reference_op_s, heap) *old = vr->operands;
1798 VEC_safe_grow (vn_reference_op_s, heap, vr->operands, 2);
1799 if (old == shared_lookup_references
1800 && vr->operands != old)
1801 shared_lookup_references = NULL;
1803 else
1804 VEC_truncate (vn_reference_op_s, vr->operands, 2);
1806 /* The looked-through reference is a simple MEM_REF. */
1807 memset (&op, 0, sizeof (op));
1808 op.type = vr->type;
1809 op.opcode = MEM_REF;
1810 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
1811 op.off = at - lhs_offset + rhs_offset;
1812 VEC_replace (vn_reference_op_s, vr->operands, 0, op);
1813 op.type = TREE_TYPE (rhs);
1814 op.opcode = TREE_CODE (rhs);
1815 op.op0 = rhs;
1816 op.off = -1;
1817 VEC_replace (vn_reference_op_s, vr->operands, 1, op);
1818 vr->hashcode = vn_reference_compute_hash (vr);
1820 /* Adjust *ref from the new operands. */
1821 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1822 return (void *)-1;
1823 /* This can happen with bitfields. */
1824 if (ref->size != r.size)
1825 return (void *)-1;
1826 *ref = r;
1828 /* Do not update last seen VUSE after translating. */
1829 last_vuse_ptr = NULL;
1831 /* Keep looking for the adjusted *REF / VR pair. */
1832 return NULL;
1835 /* Bail out and stop walking. */
1836 return (void *)-1;
1839 /* Lookup a reference operation by it's parts, in the current hash table.
1840 Returns the resulting value number if it exists in the hash table,
1841 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1842 vn_reference_t stored in the hashtable if something is found. */
1844 tree
1845 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
1846 VEC (vn_reference_op_s, heap) *operands,
1847 vn_reference_t *vnresult, vn_lookup_kind kind)
1849 struct vn_reference_s vr1;
1850 vn_reference_t tmp;
1851 tree cst;
1853 if (!vnresult)
1854 vnresult = &tmp;
1855 *vnresult = NULL;
1857 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1858 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1859 VEC_safe_grow (vn_reference_op_s, heap, shared_lookup_references,
1860 VEC_length (vn_reference_op_s, operands));
1861 memcpy (VEC_address (vn_reference_op_s, shared_lookup_references),
1862 VEC_address (vn_reference_op_s, operands),
1863 sizeof (vn_reference_op_s)
1864 * VEC_length (vn_reference_op_s, operands));
1865 vr1.operands = operands = shared_lookup_references
1866 = valueize_refs (shared_lookup_references);
1867 vr1.type = type;
1868 vr1.set = set;
1869 vr1.hashcode = vn_reference_compute_hash (&vr1);
1870 if ((cst = fully_constant_vn_reference_p (&vr1)))
1871 return cst;
1873 vn_reference_lookup_1 (&vr1, vnresult);
1874 if (!*vnresult
1875 && kind != VN_NOWALK
1876 && vr1.vuse)
1878 ao_ref r;
1879 vn_walk_kind = kind;
1880 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
1881 *vnresult =
1882 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1883 vn_reference_lookup_2,
1884 vn_reference_lookup_3, &vr1);
1885 if (vr1.operands != operands)
1886 VEC_free (vn_reference_op_s, heap, vr1.operands);
1889 if (*vnresult)
1890 return (*vnresult)->result;
1892 return NULL_TREE;
1895 /* Lookup OP in the current hash table, and return the resulting value
1896 number if it exists in the hash table. Return NULL_TREE if it does
1897 not exist in the hash table or if the result field of the structure
1898 was NULL.. VNRESULT will be filled in with the vn_reference_t
1899 stored in the hashtable if one exists. */
1901 tree
1902 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
1903 vn_reference_t *vnresult)
1905 VEC (vn_reference_op_s, heap) *operands;
1906 struct vn_reference_s vr1;
1907 tree cst;
1908 bool valuezied_anything;
1910 if (vnresult)
1911 *vnresult = NULL;
1913 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1914 vr1.operands = operands
1915 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
1916 vr1.type = TREE_TYPE (op);
1917 vr1.set = get_alias_set (op);
1918 vr1.hashcode = vn_reference_compute_hash (&vr1);
1919 if ((cst = fully_constant_vn_reference_p (&vr1)))
1920 return cst;
1922 if (kind != VN_NOWALK
1923 && vr1.vuse)
1925 vn_reference_t wvnresult;
1926 ao_ref r;
1927 /* Make sure to use a valueized reference if we valueized anything.
1928 Otherwise preserve the full reference for advanced TBAA. */
1929 if (!valuezied_anything
1930 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
1931 vr1.operands))
1932 ao_ref_init (&r, op);
1933 vn_walk_kind = kind;
1934 wvnresult =
1935 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1936 vn_reference_lookup_2,
1937 vn_reference_lookup_3, &vr1);
1938 if (vr1.operands != operands)
1939 VEC_free (vn_reference_op_s, heap, vr1.operands);
1940 if (wvnresult)
1942 if (vnresult)
1943 *vnresult = wvnresult;
1944 return wvnresult->result;
1947 return NULL_TREE;
1950 return vn_reference_lookup_1 (&vr1, vnresult);
1954 /* Insert OP into the current hash table with a value number of
1955 RESULT, and return the resulting reference structure we created. */
1957 vn_reference_t
1958 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
1960 void **slot;
1961 vn_reference_t vr1;
1963 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1964 if (TREE_CODE (result) == SSA_NAME)
1965 vr1->value_id = VN_INFO (result)->value_id;
1966 else
1967 vr1->value_id = get_or_alloc_constant_value_id (result);
1968 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1969 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
1970 vr1->type = TREE_TYPE (op);
1971 vr1->set = get_alias_set (op);
1972 vr1->hashcode = vn_reference_compute_hash (vr1);
1973 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
1974 vr1->result_vdef = vdef;
1976 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1977 INSERT);
1979 /* Because we lookup stores using vuses, and value number failures
1980 using the vdefs (see visit_reference_op_store for how and why),
1981 it's possible that on failure we may try to insert an already
1982 inserted store. This is not wrong, there is no ssa name for a
1983 store that we could use as a differentiator anyway. Thus, unlike
1984 the other lookup functions, you cannot gcc_assert (!*slot)
1985 here. */
1987 /* But free the old slot in case of a collision. */
1988 if (*slot)
1989 free_reference (*slot);
1991 *slot = vr1;
1992 return vr1;
1995 /* Insert a reference by it's pieces into the current hash table with
1996 a value number of RESULT. Return the resulting reference
1997 structure we created. */
1999 vn_reference_t
2000 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2001 VEC (vn_reference_op_s, heap) *operands,
2002 tree result, unsigned int value_id)
2005 void **slot;
2006 vn_reference_t vr1;
2008 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
2009 vr1->value_id = value_id;
2010 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2011 vr1->operands = valueize_refs (operands);
2012 vr1->type = type;
2013 vr1->set = set;
2014 vr1->hashcode = vn_reference_compute_hash (vr1);
2015 if (result && TREE_CODE (result) == SSA_NAME)
2016 result = SSA_VAL (result);
2017 vr1->result = result;
2019 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
2020 INSERT);
2022 /* At this point we should have all the things inserted that we have
2023 seen before, and we should never try inserting something that
2024 already exists. */
2025 gcc_assert (!*slot);
2026 if (*slot)
2027 free_reference (*slot);
2029 *slot = vr1;
2030 return vr1;
2033 /* Compute and return the hash value for nary operation VBO1. */
2035 hashval_t
2036 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2038 hashval_t hash;
2039 unsigned i;
2041 for (i = 0; i < vno1->length; ++i)
2042 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2043 vno1->op[i] = SSA_VAL (vno1->op[i]);
2045 if (vno1->length == 2
2046 && commutative_tree_code (vno1->opcode)
2047 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2049 tree temp = vno1->op[0];
2050 vno1->op[0] = vno1->op[1];
2051 vno1->op[1] = temp;
2054 hash = iterative_hash_hashval_t (vno1->opcode, 0);
2055 for (i = 0; i < vno1->length; ++i)
2056 hash = iterative_hash_expr (vno1->op[i], hash);
2058 return hash;
2061 /* Return the computed hashcode for nary operation P1. */
2063 static hashval_t
2064 vn_nary_op_hash (const void *p1)
2066 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
2067 return vno1->hashcode;
2070 /* Compare nary operations P1 and P2 and return true if they are
2071 equivalent. */
2074 vn_nary_op_eq (const void *p1, const void *p2)
2076 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
2077 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
2078 unsigned i;
2080 if (vno1->hashcode != vno2->hashcode)
2081 return false;
2083 if (vno1->length != vno2->length)
2084 return false;
2086 if (vno1->opcode != vno2->opcode
2087 || !types_compatible_p (vno1->type, vno2->type))
2088 return false;
2090 for (i = 0; i < vno1->length; ++i)
2091 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2092 return false;
2094 return true;
2097 /* Initialize VNO from the pieces provided. */
2099 static void
2100 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2101 enum tree_code code, tree type, tree *ops)
2103 vno->opcode = code;
2104 vno->length = length;
2105 vno->type = type;
2106 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2109 /* Initialize VNO from OP. */
2111 static void
2112 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2114 unsigned i;
2116 vno->opcode = TREE_CODE (op);
2117 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2118 vno->type = TREE_TYPE (op);
2119 for (i = 0; i < vno->length; ++i)
2120 vno->op[i] = TREE_OPERAND (op, i);
2123 /* Return the number of operands for a vn_nary ops structure from STMT. */
2125 static unsigned int
2126 vn_nary_length_from_stmt (gimple stmt)
2128 switch (gimple_assign_rhs_code (stmt))
2130 case REALPART_EXPR:
2131 case IMAGPART_EXPR:
2132 case VIEW_CONVERT_EXPR:
2133 return 1;
2135 case CONSTRUCTOR:
2136 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2138 default:
2139 return gimple_num_ops (stmt) - 1;
2143 /* Initialize VNO from STMT. */
2145 static void
2146 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
2148 unsigned i;
2150 vno->opcode = gimple_assign_rhs_code (stmt);
2151 vno->type = gimple_expr_type (stmt);
2152 switch (vno->opcode)
2154 case REALPART_EXPR:
2155 case IMAGPART_EXPR:
2156 case VIEW_CONVERT_EXPR:
2157 vno->length = 1;
2158 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2159 break;
2161 case CONSTRUCTOR:
2162 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2163 for (i = 0; i < vno->length; ++i)
2164 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2165 break;
2167 default:
2168 vno->length = gimple_num_ops (stmt) - 1;
2169 for (i = 0; i < vno->length; ++i)
2170 vno->op[i] = gimple_op (stmt, i + 1);
2174 /* Compute the hashcode for VNO and look for it in the hash table;
2175 return the resulting value number if it exists in the hash table.
2176 Return NULL_TREE if it does not exist in the hash table or if the
2177 result field of the operation is NULL. VNRESULT will contain the
2178 vn_nary_op_t from the hashtable if it exists. */
2180 static tree
2181 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2183 void **slot;
2185 if (vnresult)
2186 *vnresult = NULL;
2188 vno->hashcode = vn_nary_op_compute_hash (vno);
2189 slot = htab_find_slot_with_hash (current_info->nary, vno, vno->hashcode,
2190 NO_INSERT);
2191 if (!slot && current_info == optimistic_info)
2192 slot = htab_find_slot_with_hash (valid_info->nary, vno, vno->hashcode,
2193 NO_INSERT);
2194 if (!slot)
2195 return NULL_TREE;
2196 if (vnresult)
2197 *vnresult = (vn_nary_op_t)*slot;
2198 return ((vn_nary_op_t)*slot)->result;
2201 /* Lookup a n-ary operation by its pieces and return the resulting value
2202 number if it exists in the hash table. Return NULL_TREE if it does
2203 not exist in the hash table or if the result field of the operation
2204 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2205 if it exists. */
2207 tree
2208 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2209 tree type, tree *ops, vn_nary_op_t *vnresult)
2211 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2212 sizeof_vn_nary_op (length));
2213 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2214 return vn_nary_op_lookup_1 (vno1, vnresult);
2217 /* Lookup OP in the current hash table, and return the resulting value
2218 number if it exists in the hash table. Return NULL_TREE if it does
2219 not exist in the hash table or if the result field of the operation
2220 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2221 if it exists. */
2223 tree
2224 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2226 vn_nary_op_t vno1
2227 = XALLOCAVAR (struct vn_nary_op_s,
2228 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2229 init_vn_nary_op_from_op (vno1, op);
2230 return vn_nary_op_lookup_1 (vno1, vnresult);
2233 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2234 value number if it exists in the hash table. Return NULL_TREE if
2235 it does not exist in the hash table. VNRESULT will contain the
2236 vn_nary_op_t from the hashtable if it exists. */
2238 tree
2239 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
2241 vn_nary_op_t vno1
2242 = XALLOCAVAR (struct vn_nary_op_s,
2243 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2244 init_vn_nary_op_from_stmt (vno1, stmt);
2245 return vn_nary_op_lookup_1 (vno1, vnresult);
2248 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2250 static vn_nary_op_t
2251 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2253 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2256 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2257 obstack. */
2259 static vn_nary_op_t
2260 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2262 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2263 &current_info->nary_obstack);
2265 vno1->value_id = value_id;
2266 vno1->length = length;
2267 vno1->result = result;
2269 return vno1;
2272 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2273 VNO->HASHCODE first. */
2275 static vn_nary_op_t
2276 vn_nary_op_insert_into (vn_nary_op_t vno, htab_t table, bool compute_hash)
2278 void **slot;
2280 if (compute_hash)
2281 vno->hashcode = vn_nary_op_compute_hash (vno);
2283 slot = htab_find_slot_with_hash (table, vno, vno->hashcode, INSERT);
2284 gcc_assert (!*slot);
2286 *slot = vno;
2287 return vno;
2290 /* Insert a n-ary operation into the current hash table using it's
2291 pieces. Return the vn_nary_op_t structure we created and put in
2292 the hashtable. */
2294 vn_nary_op_t
2295 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2296 tree type, tree *ops,
2297 tree result, unsigned int value_id)
2299 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2300 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2301 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2304 /* Insert OP into the current hash table with a value number of
2305 RESULT. Return the vn_nary_op_t structure we created and put in
2306 the hashtable. */
2308 vn_nary_op_t
2309 vn_nary_op_insert (tree op, tree result)
2311 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2312 vn_nary_op_t vno1;
2314 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2315 init_vn_nary_op_from_op (vno1, op);
2316 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2319 /* Insert the rhs of STMT into the current hash table with a value number of
2320 RESULT. */
2322 vn_nary_op_t
2323 vn_nary_op_insert_stmt (gimple stmt, tree result)
2325 vn_nary_op_t vno1
2326 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2327 result, VN_INFO (result)->value_id);
2328 init_vn_nary_op_from_stmt (vno1, stmt);
2329 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2332 /* Compute a hashcode for PHI operation VP1 and return it. */
2334 static inline hashval_t
2335 vn_phi_compute_hash (vn_phi_t vp1)
2337 hashval_t result;
2338 int i;
2339 tree phi1op;
2340 tree type;
2342 result = vp1->block->index;
2344 /* If all PHI arguments are constants we need to distinguish
2345 the PHI node via its type. */
2346 type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
2347 result += (INTEGRAL_TYPE_P (type)
2348 + (INTEGRAL_TYPE_P (type)
2349 ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
2351 FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
2353 if (phi1op == VN_TOP)
2354 continue;
2355 result = iterative_hash_expr (phi1op, result);
2358 return result;
2361 /* Return the computed hashcode for phi operation P1. */
2363 static hashval_t
2364 vn_phi_hash (const void *p1)
2366 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
2367 return vp1->hashcode;
2370 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2372 static int
2373 vn_phi_eq (const void *p1, const void *p2)
2375 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
2376 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
2378 if (vp1->hashcode != vp2->hashcode)
2379 return false;
2381 if (vp1->block == vp2->block)
2383 int i;
2384 tree phi1op;
2386 /* If the PHI nodes do not have compatible types
2387 they are not the same. */
2388 if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
2389 TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
2390 return false;
2392 /* Any phi in the same block will have it's arguments in the
2393 same edge order, because of how we store phi nodes. */
2394 FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
2396 tree phi2op = VEC_index (tree, vp2->phiargs, i);
2397 if (phi1op == VN_TOP || phi2op == VN_TOP)
2398 continue;
2399 if (!expressions_equal_p (phi1op, phi2op))
2400 return false;
2402 return true;
2404 return false;
2407 static VEC(tree, heap) *shared_lookup_phiargs;
2409 /* Lookup PHI in the current hash table, and return the resulting
2410 value number if it exists in the hash table. Return NULL_TREE if
2411 it does not exist in the hash table. */
2413 static tree
2414 vn_phi_lookup (gimple phi)
2416 void **slot;
2417 struct vn_phi_s vp1;
2418 unsigned i;
2420 VEC_truncate (tree, shared_lookup_phiargs, 0);
2422 /* Canonicalize the SSA_NAME's to their value number. */
2423 for (i = 0; i < gimple_phi_num_args (phi); i++)
2425 tree def = PHI_ARG_DEF (phi, i);
2426 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2427 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
2429 vp1.phiargs = shared_lookup_phiargs;
2430 vp1.block = gimple_bb (phi);
2431 vp1.hashcode = vn_phi_compute_hash (&vp1);
2432 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
2433 NO_INSERT);
2434 if (!slot && current_info == optimistic_info)
2435 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
2436 NO_INSERT);
2437 if (!slot)
2438 return NULL_TREE;
2439 return ((vn_phi_t)*slot)->result;
2442 /* Insert PHI into the current hash table with a value number of
2443 RESULT. */
2445 static vn_phi_t
2446 vn_phi_insert (gimple phi, tree result)
2448 void **slot;
2449 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
2450 unsigned i;
2451 VEC (tree, heap) *args = NULL;
2453 /* Canonicalize the SSA_NAME's to their value number. */
2454 for (i = 0; i < gimple_phi_num_args (phi); i++)
2456 tree def = PHI_ARG_DEF (phi, i);
2457 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2458 VEC_safe_push (tree, heap, args, def);
2460 vp1->value_id = VN_INFO (result)->value_id;
2461 vp1->phiargs = args;
2462 vp1->block = gimple_bb (phi);
2463 vp1->result = result;
2464 vp1->hashcode = vn_phi_compute_hash (vp1);
2466 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
2467 INSERT);
2469 /* Because we iterate over phi operations more than once, it's
2470 possible the slot might already exist here, hence no assert.*/
2471 *slot = vp1;
2472 return vp1;
2476 /* Print set of components in strongly connected component SCC to OUT. */
2478 static void
2479 print_scc (FILE *out, VEC (tree, heap) *scc)
2481 tree var;
2482 unsigned int i;
2484 fprintf (out, "SCC consists of:");
2485 FOR_EACH_VEC_ELT (tree, scc, i, var)
2487 fprintf (out, " ");
2488 print_generic_expr (out, var, 0);
2490 fprintf (out, "\n");
2493 /* Set the value number of FROM to TO, return true if it has changed
2494 as a result. */
2496 static inline bool
2497 set_ssa_val_to (tree from, tree to)
2499 tree currval = SSA_VAL (from);
2501 if (from != to)
2503 if (currval == from)
2505 if (dump_file && (dump_flags & TDF_DETAILS))
2507 fprintf (dump_file, "Not changing value number of ");
2508 print_generic_expr (dump_file, from, 0);
2509 fprintf (dump_file, " from VARYING to ");
2510 print_generic_expr (dump_file, to, 0);
2511 fprintf (dump_file, "\n");
2513 return false;
2515 else if (TREE_CODE (to) == SSA_NAME
2516 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2517 to = from;
2520 /* The only thing we allow as value numbers are VN_TOP, ssa_names
2521 and invariants. So assert that here. */
2522 gcc_assert (to != NULL_TREE
2523 && (to == VN_TOP
2524 || TREE_CODE (to) == SSA_NAME
2525 || is_gimple_min_invariant (to)));
2527 if (dump_file && (dump_flags & TDF_DETAILS))
2529 fprintf (dump_file, "Setting value number of ");
2530 print_generic_expr (dump_file, from, 0);
2531 fprintf (dump_file, " to ");
2532 print_generic_expr (dump_file, to, 0);
2535 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
2537 VN_INFO (from)->valnum = to;
2538 if (dump_file && (dump_flags & TDF_DETAILS))
2539 fprintf (dump_file, " (changed)\n");
2540 return true;
2542 if (dump_file && (dump_flags & TDF_DETAILS))
2543 fprintf (dump_file, "\n");
2544 return false;
2547 /* Mark as processed all the definitions in the defining stmt of USE, or
2548 the USE itself. */
2550 static void
2551 mark_use_processed (tree use)
2553 ssa_op_iter iter;
2554 def_operand_p defp;
2555 gimple stmt = SSA_NAME_DEF_STMT (use);
2557 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
2559 VN_INFO (use)->use_processed = true;
2560 return;
2563 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2565 tree def = DEF_FROM_PTR (defp);
2567 VN_INFO (def)->use_processed = true;
2571 /* Set all definitions in STMT to value number to themselves.
2572 Return true if a value number changed. */
2574 static bool
2575 defs_to_varying (gimple stmt)
2577 bool changed = false;
2578 ssa_op_iter iter;
2579 def_operand_p defp;
2581 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2583 tree def = DEF_FROM_PTR (defp);
2584 changed |= set_ssa_val_to (def, def);
2586 return changed;
2589 static bool expr_has_constants (tree expr);
2590 static tree valueize_expr (tree expr);
2592 /* Visit a copy between LHS and RHS, return true if the value number
2593 changed. */
2595 static bool
2596 visit_copy (tree lhs, tree rhs)
2598 /* Follow chains of copies to their destination. */
2599 while (TREE_CODE (rhs) == SSA_NAME
2600 && SSA_VAL (rhs) != rhs)
2601 rhs = SSA_VAL (rhs);
2603 /* The copy may have a more interesting constant filled expression
2604 (we don't, since we know our RHS is just an SSA name). */
2605 if (TREE_CODE (rhs) == SSA_NAME)
2607 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2608 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2611 return set_ssa_val_to (lhs, rhs);
2614 /* Visit a nary operator RHS, value number it, and return true if the
2615 value number of LHS has changed as a result. */
2617 static bool
2618 visit_nary_op (tree lhs, gimple stmt)
2620 bool changed = false;
2621 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2623 if (result)
2624 changed = set_ssa_val_to (lhs, result);
2625 else
2627 changed = set_ssa_val_to (lhs, lhs);
2628 vn_nary_op_insert_stmt (stmt, lhs);
2631 return changed;
2634 /* Visit a call STMT storing into LHS. Return true if the value number
2635 of the LHS has changed as a result. */
2637 static bool
2638 visit_reference_op_call (tree lhs, gimple stmt)
2640 bool changed = false;
2641 struct vn_reference_s vr1;
2642 vn_reference_t vnresult = NULL;
2643 tree vuse = gimple_vuse (stmt);
2644 tree vdef = gimple_vdef (stmt);
2646 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
2647 if (lhs && TREE_CODE (lhs) != SSA_NAME)
2648 lhs = NULL_TREE;
2650 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2651 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
2652 vr1.type = gimple_expr_type (stmt);
2653 vr1.set = 0;
2654 vr1.hashcode = vn_reference_compute_hash (&vr1);
2655 vn_reference_lookup_1 (&vr1, &vnresult);
2657 if (vnresult)
2659 if (vnresult->result_vdef)
2660 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
2662 if (!vnresult->result && lhs)
2663 vnresult->result = lhs;
2665 if (vnresult->result && lhs)
2667 changed |= set_ssa_val_to (lhs, vnresult->result);
2669 if (VN_INFO (vnresult->result)->has_constants)
2670 VN_INFO (lhs)->has_constants = true;
2673 else
2675 void **slot;
2676 vn_reference_t vr2;
2677 if (vdef)
2678 changed |= set_ssa_val_to (vdef, vdef);
2679 if (lhs)
2680 changed |= set_ssa_val_to (lhs, lhs);
2681 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
2682 vr2->vuse = vr1.vuse;
2683 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
2684 vr2->type = vr1.type;
2685 vr2->set = vr1.set;
2686 vr2->hashcode = vr1.hashcode;
2687 vr2->result = lhs;
2688 vr2->result_vdef = vdef;
2689 slot = htab_find_slot_with_hash (current_info->references,
2690 vr2, vr2->hashcode, INSERT);
2691 if (*slot)
2692 free_reference (*slot);
2693 *slot = vr2;
2696 return changed;
2699 /* Visit a load from a reference operator RHS, part of STMT, value number it,
2700 and return true if the value number of the LHS has changed as a result. */
2702 static bool
2703 visit_reference_op_load (tree lhs, tree op, gimple stmt)
2705 bool changed = false;
2706 tree last_vuse;
2707 tree result;
2709 last_vuse = gimple_vuse (stmt);
2710 last_vuse_ptr = &last_vuse;
2711 result = vn_reference_lookup (op, gimple_vuse (stmt),
2712 default_vn_walk_kind, NULL);
2713 last_vuse_ptr = NULL;
2715 /* If we have a VCE, try looking up its operand as it might be stored in
2716 a different type. */
2717 if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
2718 result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
2719 default_vn_walk_kind, NULL);
2721 /* We handle type-punning through unions by value-numbering based
2722 on offset and size of the access. Be prepared to handle a
2723 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
2724 if (result
2725 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
2727 /* We will be setting the value number of lhs to the value number
2728 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2729 So first simplify and lookup this expression to see if it
2730 is already available. */
2731 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
2732 if ((CONVERT_EXPR_P (val)
2733 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
2734 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
2736 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
2737 if ((CONVERT_EXPR_P (tem)
2738 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
2739 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
2740 TREE_TYPE (val), tem)))
2741 val = tem;
2743 result = val;
2744 if (!is_gimple_min_invariant (val)
2745 && TREE_CODE (val) != SSA_NAME)
2746 result = vn_nary_op_lookup (val, NULL);
2747 /* If the expression is not yet available, value-number lhs to
2748 a new SSA_NAME we create. */
2749 if (!result)
2751 result = make_temp_ssa_name (TREE_TYPE (lhs), gimple_build_nop (),
2752 "vntemp");
2753 /* Initialize value-number information properly. */
2754 VN_INFO_GET (result)->valnum = result;
2755 VN_INFO (result)->value_id = get_next_value_id ();
2756 VN_INFO (result)->expr = val;
2757 VN_INFO (result)->has_constants = expr_has_constants (val);
2758 VN_INFO (result)->needs_insertion = true;
2759 /* As all "inserted" statements are singleton SCCs, insert
2760 to the valid table. This is strictly needed to
2761 avoid re-generating new value SSA_NAMEs for the same
2762 expression during SCC iteration over and over (the
2763 optimistic table gets cleared after each iteration).
2764 We do not need to insert into the optimistic table, as
2765 lookups there will fall back to the valid table. */
2766 if (current_info == optimistic_info)
2768 current_info = valid_info;
2769 vn_nary_op_insert (val, result);
2770 current_info = optimistic_info;
2772 else
2773 vn_nary_op_insert (val, result);
2774 if (dump_file && (dump_flags & TDF_DETAILS))
2776 fprintf (dump_file, "Inserting name ");
2777 print_generic_expr (dump_file, result, 0);
2778 fprintf (dump_file, " for expression ");
2779 print_generic_expr (dump_file, val, 0);
2780 fprintf (dump_file, "\n");
2785 if (result)
2787 changed = set_ssa_val_to (lhs, result);
2788 if (TREE_CODE (result) == SSA_NAME
2789 && VN_INFO (result)->has_constants)
2791 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2792 VN_INFO (lhs)->has_constants = true;
2795 else
2797 changed = set_ssa_val_to (lhs, lhs);
2798 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
2801 return changed;
2805 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2806 and return true if the value number of the LHS has changed as a result. */
2808 static bool
2809 visit_reference_op_store (tree lhs, tree op, gimple stmt)
2811 bool changed = false;
2812 vn_reference_t vnresult = NULL;
2813 tree result, assign;
2814 bool resultsame = false;
2815 tree vuse = gimple_vuse (stmt);
2816 tree vdef = gimple_vdef (stmt);
2818 /* First we want to lookup using the *vuses* from the store and see
2819 if there the last store to this location with the same address
2820 had the same value.
2822 The vuses represent the memory state before the store. If the
2823 memory state, address, and value of the store is the same as the
2824 last store to this location, then this store will produce the
2825 same memory state as that store.
2827 In this case the vdef versions for this store are value numbered to those
2828 vuse versions, since they represent the same memory state after
2829 this store.
2831 Otherwise, the vdefs for the store are used when inserting into
2832 the table, since the store generates a new memory state. */
2834 result = vn_reference_lookup (lhs, vuse, VN_NOWALK, NULL);
2836 if (result)
2838 if (TREE_CODE (result) == SSA_NAME)
2839 result = SSA_VAL (result);
2840 if (TREE_CODE (op) == SSA_NAME)
2841 op = SSA_VAL (op);
2842 resultsame = expressions_equal_p (result, op);
2845 if (!result || !resultsame)
2847 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
2848 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult);
2849 if (vnresult)
2851 VN_INFO (vdef)->use_processed = true;
2852 return set_ssa_val_to (vdef, vnresult->result_vdef);
2856 if (!result || !resultsame)
2858 if (dump_file && (dump_flags & TDF_DETAILS))
2860 fprintf (dump_file, "No store match\n");
2861 fprintf (dump_file, "Value numbering store ");
2862 print_generic_expr (dump_file, lhs, 0);
2863 fprintf (dump_file, " to ");
2864 print_generic_expr (dump_file, op, 0);
2865 fprintf (dump_file, "\n");
2867 /* Have to set value numbers before insert, since insert is
2868 going to valueize the references in-place. */
2869 if (vdef)
2871 changed |= set_ssa_val_to (vdef, vdef);
2874 /* Do not insert structure copies into the tables. */
2875 if (is_gimple_min_invariant (op)
2876 || is_gimple_reg (op))
2877 vn_reference_insert (lhs, op, vdef, NULL);
2879 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
2880 vn_reference_insert (assign, lhs, vuse, vdef);
2882 else
2884 /* We had a match, so value number the vdef to have the value
2885 number of the vuse it came from. */
2887 if (dump_file && (dump_flags & TDF_DETAILS))
2888 fprintf (dump_file, "Store matched earlier value,"
2889 "value numbering store vdefs to matching vuses.\n");
2891 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
2894 return changed;
2897 /* Visit and value number PHI, return true if the value number
2898 changed. */
2900 static bool
2901 visit_phi (gimple phi)
2903 bool changed = false;
2904 tree result;
2905 tree sameval = VN_TOP;
2906 bool allsame = true;
2907 unsigned i;
2909 /* TODO: We could check for this in init_sccvn, and replace this
2910 with a gcc_assert. */
2911 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
2912 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2914 /* See if all non-TOP arguments have the same value. TOP is
2915 equivalent to everything, so we can ignore it. */
2916 for (i = 0; i < gimple_phi_num_args (phi); i++)
2918 tree def = PHI_ARG_DEF (phi, i);
2920 if (TREE_CODE (def) == SSA_NAME)
2921 def = SSA_VAL (def);
2922 if (def == VN_TOP)
2923 continue;
2924 if (sameval == VN_TOP)
2926 sameval = def;
2928 else
2930 if (!expressions_equal_p (def, sameval))
2932 allsame = false;
2933 break;
2938 /* If all value numbered to the same value, the phi node has that
2939 value. */
2940 if (allsame)
2942 if (is_gimple_min_invariant (sameval))
2944 VN_INFO (PHI_RESULT (phi))->has_constants = true;
2945 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2947 else
2949 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2950 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2953 if (TREE_CODE (sameval) == SSA_NAME)
2954 return visit_copy (PHI_RESULT (phi), sameval);
2956 return set_ssa_val_to (PHI_RESULT (phi), sameval);
2959 /* Otherwise, see if it is equivalent to a phi node in this block. */
2960 result = vn_phi_lookup (phi);
2961 if (result)
2963 if (TREE_CODE (result) == SSA_NAME)
2964 changed = visit_copy (PHI_RESULT (phi), result);
2965 else
2966 changed = set_ssa_val_to (PHI_RESULT (phi), result);
2968 else
2970 vn_phi_insert (phi, PHI_RESULT (phi));
2971 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2972 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
2973 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2976 return changed;
2979 /* Return true if EXPR contains constants. */
2981 static bool
2982 expr_has_constants (tree expr)
2984 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2986 case tcc_unary:
2987 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
2989 case tcc_binary:
2990 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
2991 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
2992 /* Constants inside reference ops are rarely interesting, but
2993 it can take a lot of looking to find them. */
2994 case tcc_reference:
2995 case tcc_declaration:
2996 return false;
2997 default:
2998 return is_gimple_min_invariant (expr);
3000 return false;
3003 /* Return true if STMT contains constants. */
3005 static bool
3006 stmt_has_constants (gimple stmt)
3008 if (gimple_code (stmt) != GIMPLE_ASSIGN)
3009 return false;
3011 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
3013 case GIMPLE_UNARY_RHS:
3014 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
3016 case GIMPLE_BINARY_RHS:
3017 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
3018 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
3019 case GIMPLE_TERNARY_RHS:
3020 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
3021 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt))
3022 || is_gimple_min_invariant (gimple_assign_rhs3 (stmt)));
3023 case GIMPLE_SINGLE_RHS:
3024 /* Constants inside reference ops are rarely interesting, but
3025 it can take a lot of looking to find them. */
3026 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
3027 default:
3028 gcc_unreachable ();
3030 return false;
3033 /* Replace SSA_NAMES in expr with their value numbers, and return the
3034 result.
3035 This is performed in place. */
3037 static tree
3038 valueize_expr (tree expr)
3040 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
3042 case tcc_binary:
3043 TREE_OPERAND (expr, 1) = vn_valueize (TREE_OPERAND (expr, 1));
3044 /* Fallthru. */
3045 case tcc_unary:
3046 TREE_OPERAND (expr, 0) = vn_valueize (TREE_OPERAND (expr, 0));
3047 break;
3048 default:;
3050 return expr;
3053 /* Simplify the binary expression RHS, and return the result if
3054 simplified. */
3056 static tree
3057 simplify_binary_expression (gimple stmt)
3059 tree result = NULL_TREE;
3060 tree op0 = gimple_assign_rhs1 (stmt);
3061 tree op1 = gimple_assign_rhs2 (stmt);
3062 enum tree_code code = gimple_assign_rhs_code (stmt);
3064 /* This will not catch every single case we could combine, but will
3065 catch those with constants. The goal here is to simultaneously
3066 combine constants between expressions, but avoid infinite
3067 expansion of expressions during simplification. */
3068 if (TREE_CODE (op0) == SSA_NAME)
3070 if (VN_INFO (op0)->has_constants
3071 || TREE_CODE_CLASS (code) == tcc_comparison
3072 || code == COMPLEX_EXPR)
3073 op0 = valueize_expr (vn_get_expr_for (op0));
3074 else
3075 op0 = vn_valueize (op0);
3078 if (TREE_CODE (op1) == SSA_NAME)
3080 if (VN_INFO (op1)->has_constants
3081 || code == COMPLEX_EXPR)
3082 op1 = valueize_expr (vn_get_expr_for (op1));
3083 else
3084 op1 = vn_valueize (op1);
3087 /* Pointer plus constant can be represented as invariant address.
3088 Do so to allow further propatation, see also tree forwprop. */
3089 if (code == POINTER_PLUS_EXPR
3090 && host_integerp (op1, 1)
3091 && TREE_CODE (op0) == ADDR_EXPR
3092 && is_gimple_min_invariant (op0))
3093 return build_invariant_address (TREE_TYPE (op0),
3094 TREE_OPERAND (op0, 0),
3095 TREE_INT_CST_LOW (op1));
3097 /* Avoid folding if nothing changed. */
3098 if (op0 == gimple_assign_rhs1 (stmt)
3099 && op1 == gimple_assign_rhs2 (stmt))
3100 return NULL_TREE;
3102 fold_defer_overflow_warnings ();
3104 result = fold_binary (code, gimple_expr_type (stmt), op0, op1);
3105 if (result)
3106 STRIP_USELESS_TYPE_CONVERSION (result);
3108 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
3109 stmt, 0);
3111 /* Make sure result is not a complex expression consisting
3112 of operators of operators (IE (a + b) + (a + c))
3113 Otherwise, we will end up with unbounded expressions if
3114 fold does anything at all. */
3115 if (result && valid_gimple_rhs_p (result))
3116 return result;
3118 return NULL_TREE;
3121 /* Simplify the unary expression RHS, and return the result if
3122 simplified. */
3124 static tree
3125 simplify_unary_expression (gimple stmt)
3127 tree result = NULL_TREE;
3128 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
3129 enum tree_code code = gimple_assign_rhs_code (stmt);
3131 /* We handle some tcc_reference codes here that are all
3132 GIMPLE_ASSIGN_SINGLE codes. */
3133 if (code == REALPART_EXPR
3134 || code == IMAGPART_EXPR
3135 || code == VIEW_CONVERT_EXPR
3136 || code == BIT_FIELD_REF)
3137 op0 = TREE_OPERAND (op0, 0);
3139 if (TREE_CODE (op0) != SSA_NAME)
3140 return NULL_TREE;
3142 orig_op0 = op0;
3143 if (VN_INFO (op0)->has_constants)
3144 op0 = valueize_expr (vn_get_expr_for (op0));
3145 else if (CONVERT_EXPR_CODE_P (code)
3146 || code == REALPART_EXPR
3147 || code == IMAGPART_EXPR
3148 || code == VIEW_CONVERT_EXPR
3149 || code == BIT_FIELD_REF)
3151 /* We want to do tree-combining on conversion-like expressions.
3152 Make sure we feed only SSA_NAMEs or constants to fold though. */
3153 tree tem = valueize_expr (vn_get_expr_for (op0));
3154 if (UNARY_CLASS_P (tem)
3155 || BINARY_CLASS_P (tem)
3156 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
3157 || TREE_CODE (tem) == SSA_NAME
3158 || TREE_CODE (tem) == CONSTRUCTOR
3159 || is_gimple_min_invariant (tem))
3160 op0 = tem;
3163 /* Avoid folding if nothing changed, but remember the expression. */
3164 if (op0 == orig_op0)
3165 return NULL_TREE;
3167 if (code == BIT_FIELD_REF)
3169 tree rhs = gimple_assign_rhs1 (stmt);
3170 result = fold_ternary (BIT_FIELD_REF, TREE_TYPE (rhs),
3171 op0, TREE_OPERAND (rhs, 1), TREE_OPERAND (rhs, 2));
3173 else
3174 result = fold_unary_ignore_overflow (code, gimple_expr_type (stmt), op0);
3175 if (result)
3177 STRIP_USELESS_TYPE_CONVERSION (result);
3178 if (valid_gimple_rhs_p (result))
3179 return result;
3182 return NULL_TREE;
3185 /* Try to simplify RHS using equivalences and constant folding. */
3187 static tree
3188 try_to_simplify (gimple stmt)
3190 enum tree_code code = gimple_assign_rhs_code (stmt);
3191 tree tem;
3193 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3194 in this case, there is no point in doing extra work. */
3195 if (code == SSA_NAME)
3196 return NULL_TREE;
3198 /* First try constant folding based on our current lattice. */
3199 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize);
3200 if (tem
3201 && (TREE_CODE (tem) == SSA_NAME
3202 || is_gimple_min_invariant (tem)))
3203 return tem;
3205 /* If that didn't work try combining multiple statements. */
3206 switch (TREE_CODE_CLASS (code))
3208 case tcc_reference:
3209 /* Fallthrough for some unary codes that can operate on registers. */
3210 if (!(code == REALPART_EXPR
3211 || code == IMAGPART_EXPR
3212 || code == VIEW_CONVERT_EXPR
3213 || code == BIT_FIELD_REF))
3214 break;
3215 /* We could do a little more with unary ops, if they expand
3216 into binary ops, but it's debatable whether it is worth it. */
3217 case tcc_unary:
3218 return simplify_unary_expression (stmt);
3220 case tcc_comparison:
3221 case tcc_binary:
3222 return simplify_binary_expression (stmt);
3224 default:
3225 break;
3228 return NULL_TREE;
3231 /* Visit and value number USE, return true if the value number
3232 changed. */
3234 static bool
3235 visit_use (tree use)
3237 bool changed = false;
3238 gimple stmt = SSA_NAME_DEF_STMT (use);
3240 mark_use_processed (use);
3242 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3243 if (dump_file && (dump_flags & TDF_DETAILS)
3244 && !SSA_NAME_IS_DEFAULT_DEF (use))
3246 fprintf (dump_file, "Value numbering ");
3247 print_generic_expr (dump_file, use, 0);
3248 fprintf (dump_file, " stmt = ");
3249 print_gimple_stmt (dump_file, stmt, 0, 0);
3252 /* Handle uninitialized uses. */
3253 if (SSA_NAME_IS_DEFAULT_DEF (use))
3254 changed = set_ssa_val_to (use, use);
3255 else
3257 if (gimple_code (stmt) == GIMPLE_PHI)
3258 changed = visit_phi (stmt);
3259 else if (gimple_has_volatile_ops (stmt))
3260 changed = defs_to_varying (stmt);
3261 else if (is_gimple_assign (stmt))
3263 enum tree_code code = gimple_assign_rhs_code (stmt);
3264 tree lhs = gimple_assign_lhs (stmt);
3265 tree rhs1 = gimple_assign_rhs1 (stmt);
3266 tree simplified;
3268 /* Shortcut for copies. Simplifying copies is pointless,
3269 since we copy the expression and value they represent. */
3270 if (code == SSA_NAME
3271 && TREE_CODE (lhs) == SSA_NAME)
3273 changed = visit_copy (lhs, rhs1);
3274 goto done;
3276 simplified = try_to_simplify (stmt);
3277 if (simplified)
3279 if (dump_file && (dump_flags & TDF_DETAILS))
3281 fprintf (dump_file, "RHS ");
3282 print_gimple_expr (dump_file, stmt, 0, 0);
3283 fprintf (dump_file, " simplified to ");
3284 print_generic_expr (dump_file, simplified, 0);
3285 if (TREE_CODE (lhs) == SSA_NAME)
3286 fprintf (dump_file, " has constants %d\n",
3287 expr_has_constants (simplified));
3288 else
3289 fprintf (dump_file, "\n");
3292 /* Setting value numbers to constants will occasionally
3293 screw up phi congruence because constants are not
3294 uniquely associated with a single ssa name that can be
3295 looked up. */
3296 if (simplified
3297 && is_gimple_min_invariant (simplified)
3298 && TREE_CODE (lhs) == SSA_NAME)
3300 VN_INFO (lhs)->expr = simplified;
3301 VN_INFO (lhs)->has_constants = true;
3302 changed = set_ssa_val_to (lhs, simplified);
3303 goto done;
3305 else if (simplified
3306 && TREE_CODE (simplified) == SSA_NAME
3307 && TREE_CODE (lhs) == SSA_NAME)
3309 changed = visit_copy (lhs, simplified);
3310 goto done;
3312 else if (simplified)
3314 if (TREE_CODE (lhs) == SSA_NAME)
3316 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
3317 /* We have to unshare the expression or else
3318 valuizing may change the IL stream. */
3319 VN_INFO (lhs)->expr = unshare_expr (simplified);
3322 else if (stmt_has_constants (stmt)
3323 && TREE_CODE (lhs) == SSA_NAME)
3324 VN_INFO (lhs)->has_constants = true;
3325 else if (TREE_CODE (lhs) == SSA_NAME)
3327 /* We reset expr and constantness here because we may
3328 have been value numbering optimistically, and
3329 iterating. They may become non-constant in this case,
3330 even if they were optimistically constant. */
3332 VN_INFO (lhs)->has_constants = false;
3333 VN_INFO (lhs)->expr = NULL_TREE;
3336 if ((TREE_CODE (lhs) == SSA_NAME
3337 /* We can substitute SSA_NAMEs that are live over
3338 abnormal edges with their constant value. */
3339 && !(gimple_assign_copy_p (stmt)
3340 && is_gimple_min_invariant (rhs1))
3341 && !(simplified
3342 && is_gimple_min_invariant (simplified))
3343 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3344 /* Stores or copies from SSA_NAMEs that are live over
3345 abnormal edges are a problem. */
3346 || (code == SSA_NAME
3347 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3348 changed = defs_to_varying (stmt);
3349 else if (REFERENCE_CLASS_P (lhs)
3350 || DECL_P (lhs))
3351 changed = visit_reference_op_store (lhs, rhs1, stmt);
3352 else if (TREE_CODE (lhs) == SSA_NAME)
3354 if ((gimple_assign_copy_p (stmt)
3355 && is_gimple_min_invariant (rhs1))
3356 || (simplified
3357 && is_gimple_min_invariant (simplified)))
3359 VN_INFO (lhs)->has_constants = true;
3360 if (simplified)
3361 changed = set_ssa_val_to (lhs, simplified);
3362 else
3363 changed = set_ssa_val_to (lhs, rhs1);
3365 else
3367 switch (get_gimple_rhs_class (code))
3369 case GIMPLE_UNARY_RHS:
3370 case GIMPLE_BINARY_RHS:
3371 case GIMPLE_TERNARY_RHS:
3372 changed = visit_nary_op (lhs, stmt);
3373 break;
3374 case GIMPLE_SINGLE_RHS:
3375 switch (TREE_CODE_CLASS (code))
3377 case tcc_reference:
3378 /* VOP-less references can go through unary case. */
3379 if ((code == REALPART_EXPR
3380 || code == IMAGPART_EXPR
3381 || code == VIEW_CONVERT_EXPR
3382 || code == BIT_FIELD_REF)
3383 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
3385 changed = visit_nary_op (lhs, stmt);
3386 break;
3388 /* Fallthrough. */
3389 case tcc_declaration:
3390 changed = visit_reference_op_load (lhs, rhs1, stmt);
3391 break;
3392 default:
3393 if (code == ADDR_EXPR)
3395 changed = visit_nary_op (lhs, stmt);
3396 break;
3398 else if (code == CONSTRUCTOR)
3400 changed = visit_nary_op (lhs, stmt);
3401 break;
3403 changed = defs_to_varying (stmt);
3405 break;
3406 default:
3407 changed = defs_to_varying (stmt);
3408 break;
3412 else
3413 changed = defs_to_varying (stmt);
3415 else if (is_gimple_call (stmt))
3417 tree lhs = gimple_call_lhs (stmt);
3419 /* ??? We could try to simplify calls. */
3421 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3423 if (stmt_has_constants (stmt))
3424 VN_INFO (lhs)->has_constants = true;
3425 else
3427 /* We reset expr and constantness here because we may
3428 have been value numbering optimistically, and
3429 iterating. They may become non-constant in this case,
3430 even if they were optimistically constant. */
3431 VN_INFO (lhs)->has_constants = false;
3432 VN_INFO (lhs)->expr = NULL_TREE;
3435 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3437 changed = defs_to_varying (stmt);
3438 goto done;
3442 if (!gimple_call_internal_p (stmt)
3443 && (/* Calls to the same function with the same vuse
3444 and the same operands do not necessarily return the same
3445 value, unless they're pure or const. */
3446 gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST)
3447 /* If calls have a vdef, subsequent calls won't have
3448 the same incoming vuse. So, if 2 calls with vdef have the
3449 same vuse, we know they're not subsequent.
3450 We can value number 2 calls to the same function with the
3451 same vuse and the same operands which are not subsequent
3452 the same, because there is no code in the program that can
3453 compare the 2 values. */
3454 || gimple_vdef (stmt)))
3455 changed = visit_reference_op_call (lhs, stmt);
3456 else
3457 changed = defs_to_varying (stmt);
3459 else
3460 changed = defs_to_varying (stmt);
3462 done:
3463 return changed;
3466 /* Compare two operands by reverse postorder index */
3468 static int
3469 compare_ops (const void *pa, const void *pb)
3471 const tree opa = *((const tree *)pa);
3472 const tree opb = *((const tree *)pb);
3473 gimple opstmta = SSA_NAME_DEF_STMT (opa);
3474 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
3475 basic_block bba;
3476 basic_block bbb;
3478 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3479 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3480 else if (gimple_nop_p (opstmta))
3481 return -1;
3482 else if (gimple_nop_p (opstmtb))
3483 return 1;
3485 bba = gimple_bb (opstmta);
3486 bbb = gimple_bb (opstmtb);
3488 if (!bba && !bbb)
3489 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3490 else if (!bba)
3491 return -1;
3492 else if (!bbb)
3493 return 1;
3495 if (bba == bbb)
3497 if (gimple_code (opstmta) == GIMPLE_PHI
3498 && gimple_code (opstmtb) == GIMPLE_PHI)
3499 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3500 else if (gimple_code (opstmta) == GIMPLE_PHI)
3501 return -1;
3502 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3503 return 1;
3504 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3505 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3506 else
3507 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3509 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3512 /* Sort an array containing members of a strongly connected component
3513 SCC so that the members are ordered by RPO number.
3514 This means that when the sort is complete, iterating through the
3515 array will give you the members in RPO order. */
3517 static void
3518 sort_scc (VEC (tree, heap) *scc)
3520 VEC_qsort (tree, scc, compare_ops);
3523 /* Insert the no longer used nary ONARY to the hash INFO. */
3525 static void
3526 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3528 size_t size = sizeof_vn_nary_op (onary->length);
3529 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3530 &info->nary_obstack);
3531 memcpy (nary, onary, size);
3532 vn_nary_op_insert_into (nary, info->nary, false);
3535 /* Insert the no longer used phi OPHI to the hash INFO. */
3537 static void
3538 copy_phi (vn_phi_t ophi, vn_tables_t info)
3540 vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
3541 void **slot;
3542 memcpy (phi, ophi, sizeof (*phi));
3543 ophi->phiargs = NULL;
3544 slot = htab_find_slot_with_hash (info->phis, phi, phi->hashcode, INSERT);
3545 gcc_assert (!*slot);
3546 *slot = phi;
3549 /* Insert the no longer used reference OREF to the hash INFO. */
3551 static void
3552 copy_reference (vn_reference_t oref, vn_tables_t info)
3554 vn_reference_t ref;
3555 void **slot;
3556 ref = (vn_reference_t) pool_alloc (info->references_pool);
3557 memcpy (ref, oref, sizeof (*ref));
3558 oref->operands = NULL;
3559 slot = htab_find_slot_with_hash (info->references, ref, ref->hashcode,
3560 INSERT);
3561 if (*slot)
3562 free_reference (*slot);
3563 *slot = ref;
3566 /* Process a strongly connected component in the SSA graph. */
3568 static void
3569 process_scc (VEC (tree, heap) *scc)
3571 tree var;
3572 unsigned int i;
3573 unsigned int iterations = 0;
3574 bool changed = true;
3575 htab_iterator hi;
3576 vn_nary_op_t nary;
3577 vn_phi_t phi;
3578 vn_reference_t ref;
3580 /* If the SCC has a single member, just visit it. */
3581 if (VEC_length (tree, scc) == 1)
3583 tree use = VEC_index (tree, scc, 0);
3584 if (VN_INFO (use)->use_processed)
3585 return;
3586 /* We need to make sure it doesn't form a cycle itself, which can
3587 happen for self-referential PHI nodes. In that case we would
3588 end up inserting an expression with VN_TOP operands into the
3589 valid table which makes us derive bogus equivalences later.
3590 The cheapest way to check this is to assume it for all PHI nodes. */
3591 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3592 /* Fallthru to iteration. */ ;
3593 else
3595 visit_use (use);
3596 return;
3600 /* Iterate over the SCC with the optimistic table until it stops
3601 changing. */
3602 current_info = optimistic_info;
3603 while (changed)
3605 changed = false;
3606 iterations++;
3607 if (dump_file && (dump_flags & TDF_DETAILS))
3608 fprintf (dump_file, "Starting iteration %d\n", iterations);
3609 /* As we are value-numbering optimistically we have to
3610 clear the expression tables and the simplified expressions
3611 in each iteration until we converge. */
3612 htab_empty (optimistic_info->nary);
3613 htab_empty (optimistic_info->phis);
3614 htab_empty (optimistic_info->references);
3615 obstack_free (&optimistic_info->nary_obstack, NULL);
3616 gcc_obstack_init (&optimistic_info->nary_obstack);
3617 empty_alloc_pool (optimistic_info->phis_pool);
3618 empty_alloc_pool (optimistic_info->references_pool);
3619 FOR_EACH_VEC_ELT (tree, scc, i, var)
3620 VN_INFO (var)->expr = NULL_TREE;
3621 FOR_EACH_VEC_ELT (tree, scc, i, var)
3622 changed |= visit_use (var);
3625 statistics_histogram_event (cfun, "SCC iterations", iterations);
3627 /* Finally, copy the contents of the no longer used optimistic
3628 table to the valid table. */
3629 FOR_EACH_HTAB_ELEMENT (optimistic_info->nary, nary, vn_nary_op_t, hi)
3630 copy_nary (nary, valid_info);
3631 FOR_EACH_HTAB_ELEMENT (optimistic_info->phis, phi, vn_phi_t, hi)
3632 copy_phi (phi, valid_info);
3633 FOR_EACH_HTAB_ELEMENT (optimistic_info->references, ref, vn_reference_t, hi)
3634 copy_reference (ref, valid_info);
3636 current_info = valid_info;
3639 DEF_VEC_O(ssa_op_iter);
3640 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
3642 /* Pop the components of the found SCC for NAME off the SCC stack
3643 and process them. Returns true if all went well, false if
3644 we run into resource limits. */
3646 static bool
3647 extract_and_process_scc_for_name (tree name)
3649 VEC (tree, heap) *scc = NULL;
3650 tree x;
3652 /* Found an SCC, pop the components off the SCC stack and
3653 process them. */
3656 x = VEC_pop (tree, sccstack);
3658 VN_INFO (x)->on_sccstack = false;
3659 VEC_safe_push (tree, heap, scc, x);
3660 } while (x != name);
3662 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
3663 if (VEC_length (tree, scc)
3664 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
3666 if (dump_file)
3667 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
3668 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
3669 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
3671 VEC_free (tree, heap, scc);
3672 return false;
3675 if (VEC_length (tree, scc) > 1)
3676 sort_scc (scc);
3678 if (dump_file && (dump_flags & TDF_DETAILS))
3679 print_scc (dump_file, scc);
3681 process_scc (scc);
3683 VEC_free (tree, heap, scc);
3685 return true;
3688 /* Depth first search on NAME to discover and process SCC's in the SSA
3689 graph.
3690 Execution of this algorithm relies on the fact that the SCC's are
3691 popped off the stack in topological order.
3692 Returns true if successful, false if we stopped processing SCC's due
3693 to resource constraints. */
3695 static bool
3696 DFS (tree name)
3698 VEC(ssa_op_iter, heap) *itervec = NULL;
3699 VEC(tree, heap) *namevec = NULL;
3700 use_operand_p usep = NULL;
3701 gimple defstmt;
3702 tree use;
3703 ssa_op_iter iter;
3705 start_over:
3706 /* SCC info */
3707 VN_INFO (name)->dfsnum = next_dfs_num++;
3708 VN_INFO (name)->visited = true;
3709 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
3711 VEC_safe_push (tree, heap, sccstack, name);
3712 VN_INFO (name)->on_sccstack = true;
3713 defstmt = SSA_NAME_DEF_STMT (name);
3715 /* Recursively DFS on our operands, looking for SCC's. */
3716 if (!gimple_nop_p (defstmt))
3718 /* Push a new iterator. */
3719 if (gimple_code (defstmt) == GIMPLE_PHI)
3720 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
3721 else
3722 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
3724 else
3725 clear_and_done_ssa_iter (&iter);
3727 while (1)
3729 /* If we are done processing uses of a name, go up the stack
3730 of iterators and process SCCs as we found them. */
3731 if (op_iter_done (&iter))
3733 /* See if we found an SCC. */
3734 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
3735 if (!extract_and_process_scc_for_name (name))
3737 VEC_free (tree, heap, namevec);
3738 VEC_free (ssa_op_iter, heap, itervec);
3739 return false;
3742 /* Check if we are done. */
3743 if (VEC_empty (tree, namevec))
3745 VEC_free (tree, heap, namevec);
3746 VEC_free (ssa_op_iter, heap, itervec);
3747 return true;
3750 /* Restore the last use walker and continue walking there. */
3751 use = name;
3752 name = VEC_pop (tree, namevec);
3753 memcpy (&iter, &VEC_last (ssa_op_iter, itervec),
3754 sizeof (ssa_op_iter));
3755 VEC_pop (ssa_op_iter, itervec);
3756 goto continue_walking;
3759 use = USE_FROM_PTR (usep);
3761 /* Since we handle phi nodes, we will sometimes get
3762 invariants in the use expression. */
3763 if (TREE_CODE (use) == SSA_NAME)
3765 if (! (VN_INFO (use)->visited))
3767 /* Recurse by pushing the current use walking state on
3768 the stack and starting over. */
3769 VEC_safe_push(ssa_op_iter, heap, itervec, iter);
3770 VEC_safe_push(tree, heap, namevec, name);
3771 name = use;
3772 goto start_over;
3774 continue_walking:
3775 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3776 VN_INFO (use)->low);
3778 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3779 && VN_INFO (use)->on_sccstack)
3781 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3782 VN_INFO (name)->low);
3786 usep = op_iter_next_use (&iter);
3790 /* Allocate a value number table. */
3792 static void
3793 allocate_vn_table (vn_tables_t table)
3795 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
3796 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
3797 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
3798 free_reference);
3800 gcc_obstack_init (&table->nary_obstack);
3801 table->phis_pool = create_alloc_pool ("VN phis",
3802 sizeof (struct vn_phi_s),
3803 30);
3804 table->references_pool = create_alloc_pool ("VN references",
3805 sizeof (struct vn_reference_s),
3806 30);
3809 /* Free a value number table. */
3811 static void
3812 free_vn_table (vn_tables_t table)
3814 htab_delete (table->phis);
3815 htab_delete (table->nary);
3816 htab_delete (table->references);
3817 obstack_free (&table->nary_obstack, NULL);
3818 free_alloc_pool (table->phis_pool);
3819 free_alloc_pool (table->references_pool);
3822 static void
3823 init_scc_vn (void)
3825 size_t i;
3826 int j;
3827 int *rpo_numbers_temp;
3829 calculate_dominance_info (CDI_DOMINATORS);
3830 sccstack = NULL;
3831 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
3832 free);
3834 constant_value_ids = BITMAP_ALLOC (NULL);
3836 next_dfs_num = 1;
3837 next_value_id = 1;
3839 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
3840 /* VEC_alloc doesn't actually grow it to the right size, it just
3841 preallocates the space to do so. */
3842 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table,
3843 num_ssa_names + 1);
3844 gcc_obstack_init (&vn_ssa_aux_obstack);
3846 shared_lookup_phiargs = NULL;
3847 shared_lookup_references = NULL;
3848 rpo_numbers = XNEWVEC (int, last_basic_block);
3849 rpo_numbers_temp = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
3850 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
3852 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3853 the i'th block in RPO order is bb. We want to map bb's to RPO
3854 numbers, so we need to rearrange this array. */
3855 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
3856 rpo_numbers[rpo_numbers_temp[j]] = j;
3858 XDELETE (rpo_numbers_temp);
3860 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
3862 /* Create the VN_INFO structures, and initialize value numbers to
3863 TOP. */
3864 for (i = 0; i < num_ssa_names; i++)
3866 tree name = ssa_name (i);
3867 if (name)
3869 VN_INFO_GET (name)->valnum = VN_TOP;
3870 VN_INFO (name)->expr = NULL_TREE;
3871 VN_INFO (name)->value_id = 0;
3875 renumber_gimple_stmt_uids ();
3877 /* Create the valid and optimistic value numbering tables. */
3878 valid_info = XCNEW (struct vn_tables_s);
3879 allocate_vn_table (valid_info);
3880 optimistic_info = XCNEW (struct vn_tables_s);
3881 allocate_vn_table (optimistic_info);
3884 void
3885 free_scc_vn (void)
3887 size_t i;
3889 htab_delete (constant_to_value_id);
3890 BITMAP_FREE (constant_value_ids);
3891 VEC_free (tree, heap, shared_lookup_phiargs);
3892 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
3893 XDELETEVEC (rpo_numbers);
3895 for (i = 0; i < num_ssa_names; i++)
3897 tree name = ssa_name (i);
3898 if (name
3899 && VN_INFO (name)->needs_insertion)
3900 release_ssa_name (name);
3902 obstack_free (&vn_ssa_aux_obstack, NULL);
3903 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
3905 VEC_free (tree, heap, sccstack);
3906 free_vn_table (valid_info);
3907 XDELETE (valid_info);
3908 free_vn_table (optimistic_info);
3909 XDELETE (optimistic_info);
3912 /* Set *ID if we computed something useful in RESULT. */
3914 static void
3915 set_value_id_for_result (tree result, unsigned int *id)
3917 if (result)
3919 if (TREE_CODE (result) == SSA_NAME)
3920 *id = VN_INFO (result)->value_id;
3921 else if (is_gimple_min_invariant (result))
3922 *id = get_or_alloc_constant_value_id (result);
3926 /* Set the value ids in the valid hash tables. */
3928 static void
3929 set_hashtable_value_ids (void)
3931 htab_iterator hi;
3932 vn_nary_op_t vno;
3933 vn_reference_t vr;
3934 vn_phi_t vp;
3936 /* Now set the value ids of the things we had put in the hash
3937 table. */
3939 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
3940 vno, vn_nary_op_t, hi)
3941 set_value_id_for_result (vno->result, &vno->value_id);
3943 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
3944 vp, vn_phi_t, hi)
3945 set_value_id_for_result (vp->result, &vp->value_id);
3947 FOR_EACH_HTAB_ELEMENT (valid_info->references,
3948 vr, vn_reference_t, hi)
3949 set_value_id_for_result (vr->result, &vr->value_id);
3952 /* Do SCCVN. Returns true if it finished, false if we bailed out
3953 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
3954 how we use the alias oracle walking during the VN process. */
3956 bool
3957 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
3959 size_t i;
3960 tree param;
3961 bool changed = true;
3963 default_vn_walk_kind = default_vn_walk_kind_;
3965 init_scc_vn ();
3966 current_info = valid_info;
3968 for (param = DECL_ARGUMENTS (current_function_decl);
3969 param;
3970 param = DECL_CHAIN (param))
3972 tree def = ssa_default_def (cfun, param);
3973 if (def)
3974 VN_INFO (def)->valnum = def;
3977 for (i = 1; i < num_ssa_names; ++i)
3979 tree name = ssa_name (i);
3980 if (name
3981 && VN_INFO (name)->visited == false
3982 && !has_zero_uses (name))
3983 if (!DFS (name))
3985 free_scc_vn ();
3986 return false;
3990 /* Initialize the value ids. */
3992 for (i = 1; i < num_ssa_names; ++i)
3994 tree name = ssa_name (i);
3995 vn_ssa_aux_t info;
3996 if (!name)
3997 continue;
3998 info = VN_INFO (name);
3999 if (info->valnum == name
4000 || info->valnum == VN_TOP)
4001 info->value_id = get_next_value_id ();
4002 else if (is_gimple_min_invariant (info->valnum))
4003 info->value_id = get_or_alloc_constant_value_id (info->valnum);
4006 /* Propagate until they stop changing. */
4007 while (changed)
4009 changed = false;
4010 for (i = 1; i < num_ssa_names; ++i)
4012 tree name = ssa_name (i);
4013 vn_ssa_aux_t info;
4014 if (!name)
4015 continue;
4016 info = VN_INFO (name);
4017 if (TREE_CODE (info->valnum) == SSA_NAME
4018 && info->valnum != name
4019 && info->value_id != VN_INFO (info->valnum)->value_id)
4021 changed = true;
4022 info->value_id = VN_INFO (info->valnum)->value_id;
4027 set_hashtable_value_ids ();
4029 if (dump_file && (dump_flags & TDF_DETAILS))
4031 fprintf (dump_file, "Value numbers:\n");
4032 for (i = 0; i < num_ssa_names; i++)
4034 tree name = ssa_name (i);
4035 if (name
4036 && VN_INFO (name)->visited
4037 && SSA_VAL (name) != name)
4039 print_generic_expr (dump_file, name, 0);
4040 fprintf (dump_file, " = ");
4041 print_generic_expr (dump_file, SSA_VAL (name), 0);
4042 fprintf (dump_file, "\n");
4047 return true;
4050 /* Return the maximum value id we have ever seen. */
4052 unsigned int
4053 get_max_value_id (void)
4055 return next_value_id;
4058 /* Return the next unique value id. */
4060 unsigned int
4061 get_next_value_id (void)
4063 return next_value_id++;
4067 /* Compare two expressions E1 and E2 and return true if they are equal. */
4069 bool
4070 expressions_equal_p (tree e1, tree e2)
4072 /* The obvious case. */
4073 if (e1 == e2)
4074 return true;
4076 /* If only one of them is null, they cannot be equal. */
4077 if (!e1 || !e2)
4078 return false;
4080 /* Now perform the actual comparison. */
4081 if (TREE_CODE (e1) == TREE_CODE (e2)
4082 && operand_equal_p (e1, e2, OEP_PURE_SAME))
4083 return true;
4085 return false;
4089 /* Return true if the nary operation NARY may trap. This is a copy
4090 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4092 bool
4093 vn_nary_may_trap (vn_nary_op_t nary)
4095 tree type;
4096 tree rhs2 = NULL_TREE;
4097 bool honor_nans = false;
4098 bool honor_snans = false;
4099 bool fp_operation = false;
4100 bool honor_trapv = false;
4101 bool handled, ret;
4102 unsigned i;
4104 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4105 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4106 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4108 type = nary->type;
4109 fp_operation = FLOAT_TYPE_P (type);
4110 if (fp_operation)
4112 honor_nans = flag_trapping_math && !flag_finite_math_only;
4113 honor_snans = flag_signaling_nans != 0;
4115 else if (INTEGRAL_TYPE_P (type)
4116 && TYPE_OVERFLOW_TRAPS (type))
4117 honor_trapv = true;
4119 if (nary->length >= 2)
4120 rhs2 = nary->op[1];
4121 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4122 honor_trapv,
4123 honor_nans, honor_snans, rhs2,
4124 &handled);
4125 if (handled
4126 && ret)
4127 return true;
4129 for (i = 0; i < nary->length; ++i)
4130 if (tree_could_trap_p (nary->op[i]))
4131 return true;
4133 return false;