2012-07-26 Kazu Hirata <kazu@codesourcery.com>
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blobd2661b3c69ce1569d818206513937d4b36f45169
1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "basic-block.h"
28 #include "gimple-pretty-print.h"
29 #include "tree-inline.h"
30 #include "tree-flow.h"
31 #include "gimple.h"
32 #include "dumpfile.h"
33 #include "hashtab.h"
34 #include "alloc-pool.h"
35 #include "flags.h"
36 #include "bitmap.h"
37 #include "cfgloop.h"
38 #include "params.h"
39 #include "tree-ssa-propagate.h"
40 #include "tree-ssa-sccvn.h"
41 #include "gimple-fold.h"
43 /* This algorithm is based on the SCC algorithm presented by Keith
44 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
45 (http://citeseer.ist.psu.edu/41805.html). In
46 straight line code, it is equivalent to a regular hash based value
47 numbering that is performed in reverse postorder.
49 For code with cycles, there are two alternatives, both of which
50 require keeping the hashtables separate from the actual list of
51 value numbers for SSA names.
53 1. Iterate value numbering in an RPO walk of the blocks, removing
54 all the entries from the hashtable after each iteration (but
55 keeping the SSA name->value number mapping between iterations).
56 Iterate until it does not change.
58 2. Perform value numbering as part of an SCC walk on the SSA graph,
59 iterating only the cycles in the SSA graph until they do not change
60 (using a separate, optimistic hashtable for value numbering the SCC
61 operands).
63 The second is not just faster in practice (because most SSA graph
64 cycles do not involve all the variables in the graph), it also has
65 some nice properties.
67 One of these nice properties is that when we pop an SCC off the
68 stack, we are guaranteed to have processed all the operands coming from
69 *outside of that SCC*, so we do not need to do anything special to
70 ensure they have value numbers.
72 Another nice property is that the SCC walk is done as part of a DFS
73 of the SSA graph, which makes it easy to perform combining and
74 simplifying operations at the same time.
76 The code below is deliberately written in a way that makes it easy
77 to separate the SCC walk from the other work it does.
79 In order to propagate constants through the code, we track which
80 expressions contain constants, and use those while folding. In
81 theory, we could also track expressions whose value numbers are
82 replaced, in case we end up folding based on expression
83 identities.
85 In order to value number memory, we assign value numbers to vuses.
86 This enables us to note that, for example, stores to the same
87 address of the same value from the same starting memory states are
88 equivalent.
89 TODO:
91 1. We can iterate only the changing portions of the SCC's, but
92 I have not seen an SCC big enough for this to be a win.
93 2. If you differentiate between phi nodes for loops and phi nodes
94 for if-then-else, you can properly consider phi nodes in different
95 blocks for equivalence.
96 3. We could value number vuses in more cases, particularly, whole
97 structure copies.
100 /* The set of hashtables and alloc_pool's for their items. */
102 typedef struct vn_tables_s
104 htab_t nary;
105 htab_t phis;
106 htab_t references;
107 struct obstack nary_obstack;
108 alloc_pool phis_pool;
109 alloc_pool references_pool;
110 } *vn_tables_t;
112 static htab_t constant_to_value_id;
113 static bitmap constant_value_ids;
116 /* Valid hashtables storing information we have proven to be
117 correct. */
119 static vn_tables_t valid_info;
121 /* Optimistic hashtables storing information we are making assumptions about
122 during iterations. */
124 static vn_tables_t optimistic_info;
126 /* Pointer to the set of hashtables that is currently being used.
127 Should always point to either the optimistic_info, or the
128 valid_info. */
130 static vn_tables_t current_info;
133 /* Reverse post order index for each basic block. */
135 static int *rpo_numbers;
137 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
139 /* This represents the top of the VN lattice, which is the universal
140 value. */
142 tree VN_TOP;
144 /* Unique counter for our value ids. */
146 static unsigned int next_value_id;
148 /* Next DFS number and the stack for strongly connected component
149 detection. */
151 static unsigned int next_dfs_num;
152 static VEC (tree, heap) *sccstack;
155 DEF_VEC_P(vn_ssa_aux_t);
156 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
158 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
159 are allocated on an obstack for locality reasons, and to free them
160 without looping over the VEC. */
162 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
163 static struct obstack vn_ssa_aux_obstack;
165 /* Return the value numbering information for a given SSA name. */
167 vn_ssa_aux_t
168 VN_INFO (tree name)
170 vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
171 SSA_NAME_VERSION (name));
172 gcc_checking_assert (res);
173 return res;
176 /* Set the value numbering info for a given SSA name to a given
177 value. */
179 static inline void
180 VN_INFO_SET (tree name, vn_ssa_aux_t value)
182 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
183 SSA_NAME_VERSION (name), value);
186 /* Initialize the value numbering info for a given SSA name.
187 This should be called just once for every SSA name. */
189 vn_ssa_aux_t
190 VN_INFO_GET (tree name)
192 vn_ssa_aux_t newinfo;
194 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
195 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
196 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
197 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
198 SSA_NAME_VERSION (name) + 1);
199 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
200 SSA_NAME_VERSION (name), newinfo);
201 return newinfo;
205 /* Get the representative expression for the SSA_NAME NAME. Returns
206 the representative SSA_NAME if there is no expression associated with it. */
208 tree
209 vn_get_expr_for (tree name)
211 vn_ssa_aux_t vn = VN_INFO (name);
212 gimple def_stmt;
213 tree expr = NULL_TREE;
214 enum tree_code code;
216 if (vn->valnum == VN_TOP)
217 return name;
219 /* If the value-number is a constant it is the representative
220 expression. */
221 if (TREE_CODE (vn->valnum) != SSA_NAME)
222 return vn->valnum;
224 /* Get to the information of the value of this SSA_NAME. */
225 vn = VN_INFO (vn->valnum);
227 /* If the value-number is a constant it is the representative
228 expression. */
229 if (TREE_CODE (vn->valnum) != SSA_NAME)
230 return vn->valnum;
232 /* Else if we have an expression, return it. */
233 if (vn->expr != NULL_TREE)
234 return vn->expr;
236 /* Otherwise use the defining statement to build the expression. */
237 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
239 /* If the value number is not an assignment use it directly. */
240 if (!is_gimple_assign (def_stmt))
241 return vn->valnum;
243 /* FIXME tuples. This is incomplete and likely will miss some
244 simplifications. */
245 code = gimple_assign_rhs_code (def_stmt);
246 switch (TREE_CODE_CLASS (code))
248 case tcc_reference:
249 if ((code == REALPART_EXPR
250 || code == IMAGPART_EXPR
251 || code == VIEW_CONVERT_EXPR)
252 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (def_stmt),
253 0)) == SSA_NAME)
254 expr = fold_build1 (code,
255 gimple_expr_type (def_stmt),
256 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
257 break;
259 case tcc_unary:
260 expr = fold_build1 (code,
261 gimple_expr_type (def_stmt),
262 gimple_assign_rhs1 (def_stmt));
263 break;
265 case tcc_binary:
266 expr = fold_build2 (code,
267 gimple_expr_type (def_stmt),
268 gimple_assign_rhs1 (def_stmt),
269 gimple_assign_rhs2 (def_stmt));
270 break;
272 case tcc_exceptional:
273 if (code == CONSTRUCTOR
274 && TREE_CODE
275 (TREE_TYPE (gimple_assign_rhs1 (def_stmt))) == VECTOR_TYPE)
276 expr = gimple_assign_rhs1 (def_stmt);
277 break;
279 default:;
281 if (expr == NULL_TREE)
282 return vn->valnum;
284 /* Cache the expression. */
285 vn->expr = expr;
287 return expr;
291 /* Free a phi operation structure VP. */
293 static void
294 free_phi (void *vp)
296 vn_phi_t phi = (vn_phi_t) vp;
297 VEC_free (tree, heap, phi->phiargs);
300 /* Free a reference operation structure VP. */
302 static void
303 free_reference (void *vp)
305 vn_reference_t vr = (vn_reference_t) vp;
306 VEC_free (vn_reference_op_s, heap, vr->operands);
309 /* Hash table equality function for vn_constant_t. */
311 static int
312 vn_constant_eq (const void *p1, const void *p2)
314 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
315 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
317 if (vc1->hashcode != vc2->hashcode)
318 return false;
320 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
323 /* Hash table hash function for vn_constant_t. */
325 static hashval_t
326 vn_constant_hash (const void *p1)
328 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
329 return vc1->hashcode;
332 /* Lookup a value id for CONSTANT and return it. If it does not
333 exist returns 0. */
335 unsigned int
336 get_constant_value_id (tree constant)
338 void **slot;
339 struct vn_constant_s vc;
341 vc.hashcode = vn_hash_constant_with_type (constant);
342 vc.constant = constant;
343 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
344 vc.hashcode, NO_INSERT);
345 if (slot)
346 return ((vn_constant_t)*slot)->value_id;
347 return 0;
350 /* Lookup a value id for CONSTANT, and if it does not exist, create a
351 new one and return it. If it does exist, return it. */
353 unsigned int
354 get_or_alloc_constant_value_id (tree constant)
356 void **slot;
357 struct vn_constant_s vc;
358 vn_constant_t vcp;
360 vc.hashcode = vn_hash_constant_with_type (constant);
361 vc.constant = constant;
362 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
363 vc.hashcode, INSERT);
364 if (*slot)
365 return ((vn_constant_t)*slot)->value_id;
367 vcp = XNEW (struct vn_constant_s);
368 vcp->hashcode = vc.hashcode;
369 vcp->constant = constant;
370 vcp->value_id = get_next_value_id ();
371 *slot = (void *) vcp;
372 bitmap_set_bit (constant_value_ids, vcp->value_id);
373 return vcp->value_id;
376 /* Return true if V is a value id for a constant. */
378 bool
379 value_id_constant_p (unsigned int v)
381 return bitmap_bit_p (constant_value_ids, v);
384 /* Compare two reference operands P1 and P2 for equality. Return true if
385 they are equal, and false otherwise. */
387 static int
388 vn_reference_op_eq (const void *p1, const void *p2)
390 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
391 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
393 return (vro1->opcode == vro2->opcode
394 /* We do not care for differences in type qualification. */
395 && (vro1->type == vro2->type
396 || (vro1->type && vro2->type
397 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
398 TYPE_MAIN_VARIANT (vro2->type))))
399 && expressions_equal_p (vro1->op0, vro2->op0)
400 && expressions_equal_p (vro1->op1, vro2->op1)
401 && expressions_equal_p (vro1->op2, vro2->op2));
404 /* Compute the hash for a reference operand VRO1. */
406 static hashval_t
407 vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result)
409 result = iterative_hash_hashval_t (vro1->opcode, result);
410 if (vro1->op0)
411 result = iterative_hash_expr (vro1->op0, result);
412 if (vro1->op1)
413 result = iterative_hash_expr (vro1->op1, result);
414 if (vro1->op2)
415 result = iterative_hash_expr (vro1->op2, result);
416 return result;
419 /* Return the hashcode for a given reference operation P1. */
421 static hashval_t
422 vn_reference_hash (const void *p1)
424 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
425 return vr1->hashcode;
428 /* Compute a hash for the reference operation VR1 and return it. */
430 hashval_t
431 vn_reference_compute_hash (const vn_reference_t vr1)
433 hashval_t result = 0;
434 int i;
435 vn_reference_op_t vro;
436 HOST_WIDE_INT off = -1;
437 bool deref = false;
439 FOR_EACH_VEC_ELT (vn_reference_op_s, vr1->operands, i, vro)
441 if (vro->opcode == MEM_REF)
442 deref = true;
443 else if (vro->opcode != ADDR_EXPR)
444 deref = false;
445 if (vro->off != -1)
447 if (off == -1)
448 off = 0;
449 off += vro->off;
451 else
453 if (off != -1
454 && off != 0)
455 result = iterative_hash_hashval_t (off, result);
456 off = -1;
457 if (deref
458 && vro->opcode == ADDR_EXPR)
460 if (vro->op0)
462 tree op = TREE_OPERAND (vro->op0, 0);
463 result = iterative_hash_hashval_t (TREE_CODE (op), result);
464 result = iterative_hash_expr (op, result);
467 else
468 result = vn_reference_op_compute_hash (vro, result);
471 if (vr1->vuse)
472 result += SSA_NAME_VERSION (vr1->vuse);
474 return result;
477 /* Return true if reference operations P1 and P2 are equivalent. This
478 means they have the same set of operands and vuses. */
481 vn_reference_eq (const void *p1, const void *p2)
483 unsigned i, j;
485 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
486 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
487 if (vr1->hashcode != vr2->hashcode)
488 return false;
490 /* Early out if this is not a hash collision. */
491 if (vr1->hashcode != vr2->hashcode)
492 return false;
494 /* The VOP needs to be the same. */
495 if (vr1->vuse != vr2->vuse)
496 return false;
498 /* If the operands are the same we are done. */
499 if (vr1->operands == vr2->operands)
500 return true;
502 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
503 return false;
505 if (INTEGRAL_TYPE_P (vr1->type)
506 && INTEGRAL_TYPE_P (vr2->type))
508 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
509 return false;
511 else if (INTEGRAL_TYPE_P (vr1->type)
512 && (TYPE_PRECISION (vr1->type)
513 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
514 return false;
515 else if (INTEGRAL_TYPE_P (vr2->type)
516 && (TYPE_PRECISION (vr2->type)
517 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
518 return false;
520 i = 0;
521 j = 0;
524 HOST_WIDE_INT off1 = 0, off2 = 0;
525 vn_reference_op_t vro1, vro2;
526 vn_reference_op_s tem1, tem2;
527 bool deref1 = false, deref2 = false;
528 for (; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro1); i++)
530 if (vro1->opcode == MEM_REF)
531 deref1 = true;
532 if (vro1->off == -1)
533 break;
534 off1 += vro1->off;
536 for (; VEC_iterate (vn_reference_op_s, vr2->operands, j, vro2); j++)
538 if (vro2->opcode == MEM_REF)
539 deref2 = true;
540 if (vro2->off == -1)
541 break;
542 off2 += vro2->off;
544 if (off1 != off2)
545 return false;
546 if (deref1 && vro1->opcode == ADDR_EXPR)
548 memset (&tem1, 0, sizeof (tem1));
549 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
550 tem1.type = TREE_TYPE (tem1.op0);
551 tem1.opcode = TREE_CODE (tem1.op0);
552 vro1 = &tem1;
553 deref1 = false;
555 if (deref2 && vro2->opcode == ADDR_EXPR)
557 memset (&tem2, 0, sizeof (tem2));
558 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
559 tem2.type = TREE_TYPE (tem2.op0);
560 tem2.opcode = TREE_CODE (tem2.op0);
561 vro2 = &tem2;
562 deref2 = false;
564 if (deref1 != deref2)
565 return false;
566 if (!vn_reference_op_eq (vro1, vro2))
567 return false;
568 ++j;
569 ++i;
571 while (VEC_length (vn_reference_op_s, vr1->operands) != i
572 || VEC_length (vn_reference_op_s, vr2->operands) != j);
574 return true;
577 /* Copy the operations present in load/store REF into RESULT, a vector of
578 vn_reference_op_s's. */
580 void
581 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
583 if (TREE_CODE (ref) == TARGET_MEM_REF)
585 vn_reference_op_s temp;
587 memset (&temp, 0, sizeof (temp));
588 temp.type = TREE_TYPE (ref);
589 temp.opcode = TREE_CODE (ref);
590 temp.op0 = TMR_INDEX (ref);
591 temp.op1 = TMR_STEP (ref);
592 temp.op2 = TMR_OFFSET (ref);
593 temp.off = -1;
594 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
596 memset (&temp, 0, sizeof (temp));
597 temp.type = NULL_TREE;
598 temp.opcode = ERROR_MARK;
599 temp.op0 = TMR_INDEX2 (ref);
600 temp.off = -1;
601 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
603 memset (&temp, 0, sizeof (temp));
604 temp.type = NULL_TREE;
605 temp.opcode = TREE_CODE (TMR_BASE (ref));
606 temp.op0 = TMR_BASE (ref);
607 temp.off = -1;
608 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
609 return;
612 /* For non-calls, store the information that makes up the address. */
614 while (ref)
616 vn_reference_op_s temp;
618 memset (&temp, 0, sizeof (temp));
619 temp.type = TREE_TYPE (ref);
620 temp.opcode = TREE_CODE (ref);
621 temp.off = -1;
623 switch (temp.opcode)
625 case MODIFY_EXPR:
626 temp.op0 = TREE_OPERAND (ref, 1);
627 break;
628 case WITH_SIZE_EXPR:
629 temp.op0 = TREE_OPERAND (ref, 1);
630 temp.off = 0;
631 break;
632 case MEM_REF:
633 /* The base address gets its own vn_reference_op_s structure. */
634 temp.op0 = TREE_OPERAND (ref, 1);
635 if (host_integerp (TREE_OPERAND (ref, 1), 0))
636 temp.off = TREE_INT_CST_LOW (TREE_OPERAND (ref, 1));
637 break;
638 case BIT_FIELD_REF:
639 /* Record bits and position. */
640 temp.op0 = TREE_OPERAND (ref, 1);
641 temp.op1 = TREE_OPERAND (ref, 2);
642 break;
643 case COMPONENT_REF:
644 /* The field decl is enough to unambiguously specify the field,
645 a matching type is not necessary and a mismatching type
646 is always a spurious difference. */
647 temp.type = NULL_TREE;
648 temp.op0 = TREE_OPERAND (ref, 1);
649 temp.op1 = TREE_OPERAND (ref, 2);
651 tree this_offset = component_ref_field_offset (ref);
652 if (this_offset
653 && TREE_CODE (this_offset) == INTEGER_CST)
655 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
656 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
658 double_int off
659 = double_int_add (tree_to_double_int (this_offset),
660 double_int_rshift
661 (tree_to_double_int (bit_offset),
662 BITS_PER_UNIT == 8
663 ? 3 : exact_log2 (BITS_PER_UNIT),
664 HOST_BITS_PER_DOUBLE_INT, true));
665 if (double_int_fits_in_shwi_p (off))
666 temp.off = off.low;
670 break;
671 case ARRAY_RANGE_REF:
672 case ARRAY_REF:
673 /* Record index as operand. */
674 temp.op0 = TREE_OPERAND (ref, 1);
675 /* Always record lower bounds and element size. */
676 temp.op1 = array_ref_low_bound (ref);
677 temp.op2 = array_ref_element_size (ref);
678 if (TREE_CODE (temp.op0) == INTEGER_CST
679 && TREE_CODE (temp.op1) == INTEGER_CST
680 && TREE_CODE (temp.op2) == INTEGER_CST)
682 double_int off = tree_to_double_int (temp.op0);
683 off = double_int_add (off,
684 double_int_neg
685 (tree_to_double_int (temp.op1)));
686 off = double_int_mul (off, tree_to_double_int (temp.op2));
687 if (double_int_fits_in_shwi_p (off))
688 temp.off = off.low;
690 break;
691 case VAR_DECL:
692 if (DECL_HARD_REGISTER (ref))
694 temp.op0 = ref;
695 break;
697 /* Fallthru. */
698 case PARM_DECL:
699 case CONST_DECL:
700 case RESULT_DECL:
701 /* Canonicalize decls to MEM[&decl] which is what we end up with
702 when valueizing MEM[ptr] with ptr = &decl. */
703 temp.opcode = MEM_REF;
704 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
705 temp.off = 0;
706 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
707 temp.opcode = ADDR_EXPR;
708 temp.op0 = build_fold_addr_expr (ref);
709 temp.type = TREE_TYPE (temp.op0);
710 temp.off = -1;
711 break;
712 case STRING_CST:
713 case INTEGER_CST:
714 case COMPLEX_CST:
715 case VECTOR_CST:
716 case REAL_CST:
717 case FIXED_CST:
718 case CONSTRUCTOR:
719 case SSA_NAME:
720 temp.op0 = ref;
721 break;
722 case ADDR_EXPR:
723 if (is_gimple_min_invariant (ref))
725 temp.op0 = ref;
726 break;
728 /* Fallthrough. */
729 /* These are only interesting for their operands, their
730 existence, and their type. They will never be the last
731 ref in the chain of references (IE they require an
732 operand), so we don't have to put anything
733 for op* as it will be handled by the iteration */
734 case REALPART_EXPR:
735 case VIEW_CONVERT_EXPR:
736 temp.off = 0;
737 break;
738 case IMAGPART_EXPR:
739 /* This is only interesting for its constant offset. */
740 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
741 break;
742 default:
743 gcc_unreachable ();
745 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
747 if (REFERENCE_CLASS_P (ref)
748 || TREE_CODE (ref) == MODIFY_EXPR
749 || TREE_CODE (ref) == WITH_SIZE_EXPR
750 || (TREE_CODE (ref) == ADDR_EXPR
751 && !is_gimple_min_invariant (ref)))
752 ref = TREE_OPERAND (ref, 0);
753 else
754 ref = NULL_TREE;
758 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
759 operands in *OPS, the reference alias set SET and the reference type TYPE.
760 Return true if something useful was produced. */
762 bool
763 ao_ref_init_from_vn_reference (ao_ref *ref,
764 alias_set_type set, tree type,
765 VEC (vn_reference_op_s, heap) *ops)
767 vn_reference_op_t op;
768 unsigned i;
769 tree base = NULL_TREE;
770 tree *op0_p = &base;
771 HOST_WIDE_INT offset = 0;
772 HOST_WIDE_INT max_size;
773 HOST_WIDE_INT size = -1;
774 tree size_tree = NULL_TREE;
775 alias_set_type base_alias_set = -1;
777 /* First get the final access size from just the outermost expression. */
778 op = VEC_index (vn_reference_op_s, ops, 0);
779 if (op->opcode == COMPONENT_REF)
780 size_tree = DECL_SIZE (op->op0);
781 else if (op->opcode == BIT_FIELD_REF)
782 size_tree = op->op0;
783 else
785 enum machine_mode mode = TYPE_MODE (type);
786 if (mode == BLKmode)
787 size_tree = TYPE_SIZE (type);
788 else
789 size = GET_MODE_BITSIZE (mode);
791 if (size_tree != NULL_TREE)
793 if (!host_integerp (size_tree, 1))
794 size = -1;
795 else
796 size = TREE_INT_CST_LOW (size_tree);
799 /* Initially, maxsize is the same as the accessed element size.
800 In the following it will only grow (or become -1). */
801 max_size = size;
803 /* Compute cumulative bit-offset for nested component-refs and array-refs,
804 and find the ultimate containing object. */
805 FOR_EACH_VEC_ELT (vn_reference_op_s, ops, i, op)
807 switch (op->opcode)
809 /* These may be in the reference ops, but we cannot do anything
810 sensible with them here. */
811 case ADDR_EXPR:
812 /* Apart from ADDR_EXPR arguments to MEM_REF. */
813 if (base != NULL_TREE
814 && TREE_CODE (base) == MEM_REF
815 && op->op0
816 && DECL_P (TREE_OPERAND (op->op0, 0)))
818 vn_reference_op_t pop = VEC_index (vn_reference_op_s, ops, i-1);
819 base = TREE_OPERAND (op->op0, 0);
820 if (pop->off == -1)
822 max_size = -1;
823 offset = 0;
825 else
826 offset += pop->off * BITS_PER_UNIT;
827 op0_p = NULL;
828 break;
830 /* Fallthru. */
831 case CALL_EXPR:
832 return false;
834 /* Record the base objects. */
835 case MEM_REF:
836 base_alias_set = get_deref_alias_set (op->op0);
837 *op0_p = build2 (MEM_REF, op->type,
838 NULL_TREE, op->op0);
839 op0_p = &TREE_OPERAND (*op0_p, 0);
840 break;
842 case VAR_DECL:
843 case PARM_DECL:
844 case RESULT_DECL:
845 case SSA_NAME:
846 *op0_p = op->op0;
847 op0_p = NULL;
848 break;
850 /* And now the usual component-reference style ops. */
851 case BIT_FIELD_REF:
852 offset += tree_low_cst (op->op1, 0);
853 break;
855 case COMPONENT_REF:
857 tree field = op->op0;
858 /* We do not have a complete COMPONENT_REF tree here so we
859 cannot use component_ref_field_offset. Do the interesting
860 parts manually. */
862 if (op->op1
863 || !host_integerp (DECL_FIELD_OFFSET (field), 1))
864 max_size = -1;
865 else
867 offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
868 * BITS_PER_UNIT);
869 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
871 break;
874 case ARRAY_RANGE_REF:
875 case ARRAY_REF:
876 /* We recorded the lower bound and the element size. */
877 if (!host_integerp (op->op0, 0)
878 || !host_integerp (op->op1, 0)
879 || !host_integerp (op->op2, 0))
880 max_size = -1;
881 else
883 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
884 hindex -= TREE_INT_CST_LOW (op->op1);
885 hindex *= TREE_INT_CST_LOW (op->op2);
886 hindex *= BITS_PER_UNIT;
887 offset += hindex;
889 break;
891 case REALPART_EXPR:
892 break;
894 case IMAGPART_EXPR:
895 offset += size;
896 break;
898 case VIEW_CONVERT_EXPR:
899 break;
901 case STRING_CST:
902 case INTEGER_CST:
903 case COMPLEX_CST:
904 case VECTOR_CST:
905 case REAL_CST:
906 case CONSTRUCTOR:
907 case CONST_DECL:
908 return false;
910 default:
911 return false;
915 if (base == NULL_TREE)
916 return false;
918 ref->ref = NULL_TREE;
919 ref->base = base;
920 ref->offset = offset;
921 ref->size = size;
922 ref->max_size = max_size;
923 ref->ref_alias_set = set;
924 if (base_alias_set != -1)
925 ref->base_alias_set = base_alias_set;
926 else
927 ref->base_alias_set = get_alias_set (base);
928 /* We discount volatiles from value-numbering elsewhere. */
929 ref->volatile_p = false;
931 return true;
934 /* Copy the operations present in load/store/call REF into RESULT, a vector of
935 vn_reference_op_s's. */
937 void
938 copy_reference_ops_from_call (gimple call,
939 VEC(vn_reference_op_s, heap) **result)
941 vn_reference_op_s temp;
942 unsigned i;
943 tree lhs = gimple_call_lhs (call);
945 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
946 different. By adding the lhs here in the vector, we ensure that the
947 hashcode is different, guaranteeing a different value number. */
948 if (lhs && TREE_CODE (lhs) != SSA_NAME)
950 memset (&temp, 0, sizeof (temp));
951 temp.opcode = MODIFY_EXPR;
952 temp.type = TREE_TYPE (lhs);
953 temp.op0 = lhs;
954 temp.off = -1;
955 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
958 /* Copy the type, opcode, function being called and static chain. */
959 memset (&temp, 0, sizeof (temp));
960 temp.type = gimple_call_return_type (call);
961 temp.opcode = CALL_EXPR;
962 temp.op0 = gimple_call_fn (call);
963 temp.op1 = gimple_call_chain (call);
964 temp.off = -1;
965 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
967 /* Copy the call arguments. As they can be references as well,
968 just chain them together. */
969 for (i = 0; i < gimple_call_num_args (call); ++i)
971 tree callarg = gimple_call_arg (call, i);
972 copy_reference_ops_from_ref (callarg, result);
976 /* Create a vector of vn_reference_op_s structures from REF, a
977 REFERENCE_CLASS_P tree. The vector is not shared. */
979 static VEC(vn_reference_op_s, heap) *
980 create_reference_ops_from_ref (tree ref)
982 VEC (vn_reference_op_s, heap) *result = NULL;
984 copy_reference_ops_from_ref (ref, &result);
985 return result;
988 /* Create a vector of vn_reference_op_s structures from CALL, a
989 call statement. The vector is not shared. */
991 static VEC(vn_reference_op_s, heap) *
992 create_reference_ops_from_call (gimple call)
994 VEC (vn_reference_op_s, heap) *result = NULL;
996 copy_reference_ops_from_call (call, &result);
997 return result;
1000 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1001 *I_P to point to the last element of the replacement. */
1002 void
1003 vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
1004 unsigned int *i_p)
1006 unsigned int i = *i_p;
1007 vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
1008 vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
1009 tree addr_base;
1010 HOST_WIDE_INT addr_offset;
1012 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1013 from .foo.bar to the preceding MEM_REF offset and replace the
1014 address with &OBJ. */
1015 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1016 &addr_offset);
1017 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1018 if (addr_base != op->op0)
1020 double_int off = tree_to_double_int (mem_op->op0);
1021 off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1022 off = double_int_add (off, shwi_to_double_int (addr_offset));
1023 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1024 op->op0 = build_fold_addr_expr (addr_base);
1025 if (host_integerp (mem_op->op0, 0))
1026 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1027 else
1028 mem_op->off = -1;
1032 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1033 *I_P to point to the last element of the replacement. */
1034 static void
1035 vn_reference_maybe_forwprop_address (VEC (vn_reference_op_s, heap) **ops,
1036 unsigned int *i_p)
1038 unsigned int i = *i_p;
1039 vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
1040 vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
1041 gimple def_stmt;
1042 enum tree_code code;
1043 double_int off;
1045 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1046 if (!is_gimple_assign (def_stmt))
1047 return;
1049 code = gimple_assign_rhs_code (def_stmt);
1050 if (code != ADDR_EXPR
1051 && code != POINTER_PLUS_EXPR)
1052 return;
1054 off = tree_to_double_int (mem_op->op0);
1055 off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1057 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1058 from .foo.bar to the preceding MEM_REF offset and replace the
1059 address with &OBJ. */
1060 if (code == ADDR_EXPR)
1062 tree addr, addr_base;
1063 HOST_WIDE_INT addr_offset;
1065 addr = gimple_assign_rhs1 (def_stmt);
1066 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1067 &addr_offset);
1068 if (!addr_base
1069 || TREE_CODE (addr_base) != MEM_REF)
1070 return;
1072 off = double_int_add (off, shwi_to_double_int (addr_offset));
1073 off = double_int_add (off, mem_ref_offset (addr_base));
1074 op->op0 = TREE_OPERAND (addr_base, 0);
1076 else
1078 tree ptr, ptroff;
1079 ptr = gimple_assign_rhs1 (def_stmt);
1080 ptroff = gimple_assign_rhs2 (def_stmt);
1081 if (TREE_CODE (ptr) != SSA_NAME
1082 || TREE_CODE (ptroff) != INTEGER_CST)
1083 return;
1085 off = double_int_add (off, tree_to_double_int (ptroff));
1086 op->op0 = ptr;
1089 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1090 if (host_integerp (mem_op->op0, 0))
1091 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1092 else
1093 mem_op->off = -1;
1094 if (TREE_CODE (op->op0) == SSA_NAME)
1095 op->op0 = SSA_VAL (op->op0);
1096 if (TREE_CODE (op->op0) != SSA_NAME)
1097 op->opcode = TREE_CODE (op->op0);
1099 /* And recurse. */
1100 if (TREE_CODE (op->op0) == SSA_NAME)
1101 vn_reference_maybe_forwprop_address (ops, i_p);
1102 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1103 vn_reference_fold_indirect (ops, i_p);
1106 /* Optimize the reference REF to a constant if possible or return
1107 NULL_TREE if not. */
1109 tree
1110 fully_constant_vn_reference_p (vn_reference_t ref)
1112 VEC (vn_reference_op_s, heap) *operands = ref->operands;
1113 vn_reference_op_t op;
1115 /* Try to simplify the translated expression if it is
1116 a call to a builtin function with at most two arguments. */
1117 op = VEC_index (vn_reference_op_s, operands, 0);
1118 if (op->opcode == CALL_EXPR
1119 && TREE_CODE (op->op0) == ADDR_EXPR
1120 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1121 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1122 && VEC_length (vn_reference_op_s, operands) >= 2
1123 && VEC_length (vn_reference_op_s, operands) <= 3)
1125 vn_reference_op_t arg0, arg1 = NULL;
1126 bool anyconst = false;
1127 arg0 = VEC_index (vn_reference_op_s, operands, 1);
1128 if (VEC_length (vn_reference_op_s, operands) > 2)
1129 arg1 = VEC_index (vn_reference_op_s, operands, 2);
1130 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1131 || (arg0->opcode == ADDR_EXPR
1132 && is_gimple_min_invariant (arg0->op0)))
1133 anyconst = true;
1134 if (arg1
1135 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1136 || (arg1->opcode == ADDR_EXPR
1137 && is_gimple_min_invariant (arg1->op0))))
1138 anyconst = true;
1139 if (anyconst)
1141 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1142 arg1 ? 2 : 1,
1143 arg0->op0,
1144 arg1 ? arg1->op0 : NULL);
1145 if (folded
1146 && TREE_CODE (folded) == NOP_EXPR)
1147 folded = TREE_OPERAND (folded, 0);
1148 if (folded
1149 && is_gimple_min_invariant (folded))
1150 return folded;
1154 /* Simplify reads from constant strings. */
1155 else if (op->opcode == ARRAY_REF
1156 && TREE_CODE (op->op0) == INTEGER_CST
1157 && integer_zerop (op->op1)
1158 && VEC_length (vn_reference_op_s, operands) == 2)
1160 vn_reference_op_t arg0;
1161 arg0 = VEC_index (vn_reference_op_s, operands, 1);
1162 if (arg0->opcode == STRING_CST
1163 && (TYPE_MODE (op->type)
1164 == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
1165 && GET_MODE_CLASS (TYPE_MODE (op->type)) == MODE_INT
1166 && GET_MODE_SIZE (TYPE_MODE (op->type)) == 1
1167 && compare_tree_int (op->op0, TREE_STRING_LENGTH (arg0->op0)) < 0)
1168 return build_int_cst_type (op->type,
1169 (TREE_STRING_POINTER (arg0->op0)
1170 [TREE_INT_CST_LOW (op->op0)]));
1173 return NULL_TREE;
1176 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1177 structures into their value numbers. This is done in-place, and
1178 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1179 whether any operands were valueized. */
1181 static VEC (vn_reference_op_s, heap) *
1182 valueize_refs_1 (VEC (vn_reference_op_s, heap) *orig, bool *valueized_anything)
1184 vn_reference_op_t vro;
1185 unsigned int i;
1187 *valueized_anything = false;
1189 FOR_EACH_VEC_ELT (vn_reference_op_s, orig, i, vro)
1191 if (vro->opcode == SSA_NAME
1192 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1194 tree tem = SSA_VAL (vro->op0);
1195 if (tem != vro->op0)
1197 *valueized_anything = true;
1198 vro->op0 = tem;
1200 /* If it transforms from an SSA_NAME to a constant, update
1201 the opcode. */
1202 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1203 vro->opcode = TREE_CODE (vro->op0);
1205 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1207 tree tem = SSA_VAL (vro->op1);
1208 if (tem != vro->op1)
1210 *valueized_anything = true;
1211 vro->op1 = tem;
1214 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1216 tree tem = SSA_VAL (vro->op2);
1217 if (tem != vro->op2)
1219 *valueized_anything = true;
1220 vro->op2 = tem;
1223 /* If it transforms from an SSA_NAME to an address, fold with
1224 a preceding indirect reference. */
1225 if (i > 0
1226 && vro->op0
1227 && TREE_CODE (vro->op0) == ADDR_EXPR
1228 && VEC_index (vn_reference_op_s,
1229 orig, i - 1)->opcode == MEM_REF)
1230 vn_reference_fold_indirect (&orig, &i);
1231 else if (i > 0
1232 && vro->opcode == SSA_NAME
1233 && VEC_index (vn_reference_op_s,
1234 orig, i - 1)->opcode == MEM_REF)
1235 vn_reference_maybe_forwprop_address (&orig, &i);
1236 /* If it transforms a non-constant ARRAY_REF into a constant
1237 one, adjust the constant offset. */
1238 else if (vro->opcode == ARRAY_REF
1239 && vro->off == -1
1240 && TREE_CODE (vro->op0) == INTEGER_CST
1241 && TREE_CODE (vro->op1) == INTEGER_CST
1242 && TREE_CODE (vro->op2) == INTEGER_CST)
1244 double_int off = tree_to_double_int (vro->op0);
1245 off = double_int_add (off,
1246 double_int_neg
1247 (tree_to_double_int (vro->op1)));
1248 off = double_int_mul (off, tree_to_double_int (vro->op2));
1249 if (double_int_fits_in_shwi_p (off))
1250 vro->off = off.low;
1254 return orig;
1257 static VEC (vn_reference_op_s, heap) *
1258 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
1260 bool tem;
1261 return valueize_refs_1 (orig, &tem);
1264 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
1266 /* Create a vector of vn_reference_op_s structures from REF, a
1267 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1268 this function. *VALUEIZED_ANYTHING will specify whether any
1269 operands were valueized. */
1271 static VEC(vn_reference_op_s, heap) *
1272 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1274 if (!ref)
1275 return NULL;
1276 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1277 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1278 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1279 valueized_anything);
1280 return shared_lookup_references;
1283 /* Create a vector of vn_reference_op_s structures from CALL, a
1284 call statement. The vector is shared among all callers of
1285 this function. */
1287 static VEC(vn_reference_op_s, heap) *
1288 valueize_shared_reference_ops_from_call (gimple call)
1290 if (!call)
1291 return NULL;
1292 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1293 copy_reference_ops_from_call (call, &shared_lookup_references);
1294 shared_lookup_references = valueize_refs (shared_lookup_references);
1295 return shared_lookup_references;
1298 /* Lookup a SCCVN reference operation VR in the current hash table.
1299 Returns the resulting value number if it exists in the hash table,
1300 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1301 vn_reference_t stored in the hashtable if something is found. */
1303 static tree
1304 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1306 void **slot;
1307 hashval_t hash;
1309 hash = vr->hashcode;
1310 slot = htab_find_slot_with_hash (current_info->references, vr,
1311 hash, NO_INSERT);
1312 if (!slot && current_info == optimistic_info)
1313 slot = htab_find_slot_with_hash (valid_info->references, vr,
1314 hash, NO_INSERT);
1315 if (slot)
1317 if (vnresult)
1318 *vnresult = (vn_reference_t)*slot;
1319 return ((vn_reference_t)*slot)->result;
1322 return NULL_TREE;
1325 static tree *last_vuse_ptr;
1326 static vn_lookup_kind vn_walk_kind;
1327 static vn_lookup_kind default_vn_walk_kind;
1329 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1330 with the current VUSE and performs the expression lookup. */
1332 static void *
1333 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
1335 vn_reference_t vr = (vn_reference_t)vr_;
1336 void **slot;
1337 hashval_t hash;
1339 if (last_vuse_ptr)
1340 *last_vuse_ptr = vuse;
1342 /* Fixup vuse and hash. */
1343 if (vr->vuse)
1344 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1345 vr->vuse = SSA_VAL (vuse);
1346 if (vr->vuse)
1347 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1349 hash = vr->hashcode;
1350 slot = htab_find_slot_with_hash (current_info->references, vr,
1351 hash, NO_INSERT);
1352 if (!slot && current_info == optimistic_info)
1353 slot = htab_find_slot_with_hash (valid_info->references, vr,
1354 hash, NO_INSERT);
1355 if (slot)
1356 return *slot;
1358 return NULL;
1361 /* Lookup an existing or insert a new vn_reference entry into the
1362 value table for the VUSE, SET, TYPE, OPERANDS reference which
1363 has the value VALUE which is either a constant or an SSA name. */
1365 static vn_reference_t
1366 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1367 alias_set_type set,
1368 tree type,
1369 VEC (vn_reference_op_s,
1370 heap) *operands,
1371 tree value)
1373 struct vn_reference_s vr1;
1374 vn_reference_t result;
1375 unsigned value_id;
1376 vr1.vuse = vuse;
1377 vr1.operands = operands;
1378 vr1.type = type;
1379 vr1.set = set;
1380 vr1.hashcode = vn_reference_compute_hash (&vr1);
1381 if (vn_reference_lookup_1 (&vr1, &result))
1382 return result;
1383 if (TREE_CODE (value) == SSA_NAME)
1384 value_id = VN_INFO (value)->value_id;
1385 else
1386 value_id = get_or_alloc_constant_value_id (value);
1387 return vn_reference_insert_pieces (vuse, set, type,
1388 VEC_copy (vn_reference_op_s, heap,
1389 operands), value, value_id);
1392 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1393 from the statement defining VUSE and if not successful tries to
1394 translate *REFP and VR_ through an aggregate copy at the definition
1395 of VUSE. */
1397 static void *
1398 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
1400 vn_reference_t vr = (vn_reference_t)vr_;
1401 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1402 tree base;
1403 HOST_WIDE_INT offset, maxsize;
1404 static VEC (vn_reference_op_s, heap) *lhs_ops = NULL;
1405 ao_ref lhs_ref;
1406 bool lhs_ref_ok = false;
1408 /* First try to disambiguate after value-replacing in the definitions LHS. */
1409 if (is_gimple_assign (def_stmt))
1411 VEC (vn_reference_op_s, heap) *tem;
1412 tree lhs = gimple_assign_lhs (def_stmt);
1413 bool valueized_anything = false;
1414 /* Avoid re-allocation overhead. */
1415 VEC_truncate (vn_reference_op_s, lhs_ops, 0);
1416 copy_reference_ops_from_ref (lhs, &lhs_ops);
1417 tem = lhs_ops;
1418 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1419 gcc_assert (lhs_ops == tem);
1420 if (valueized_anything)
1422 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1423 get_alias_set (lhs),
1424 TREE_TYPE (lhs), lhs_ops);
1425 if (lhs_ref_ok
1426 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1427 return NULL;
1429 else
1431 ao_ref_init (&lhs_ref, lhs);
1432 lhs_ref_ok = true;
1436 base = ao_ref_base (ref);
1437 offset = ref->offset;
1438 maxsize = ref->max_size;
1440 /* If we cannot constrain the size of the reference we cannot
1441 test if anything kills it. */
1442 if (maxsize == -1)
1443 return (void *)-1;
1445 /* We can't deduce anything useful from clobbers. */
1446 if (gimple_clobber_p (def_stmt))
1447 return (void *)-1;
1449 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1450 from that definition.
1451 1) Memset. */
1452 if (is_gimple_reg_type (vr->type)
1453 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1454 && integer_zerop (gimple_call_arg (def_stmt, 1))
1455 && host_integerp (gimple_call_arg (def_stmt, 2), 1)
1456 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1458 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1459 tree base2;
1460 HOST_WIDE_INT offset2, size2, maxsize2;
1461 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1462 size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
1463 if ((unsigned HOST_WIDE_INT)size2 / 8
1464 == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
1465 && maxsize2 != -1
1466 && operand_equal_p (base, base2, 0)
1467 && offset2 <= offset
1468 && offset2 + size2 >= offset + maxsize)
1470 tree val = build_zero_cst (vr->type);
1471 return vn_reference_lookup_or_insert_for_pieces
1472 (vuse, vr->set, vr->type, vr->operands, val);
1476 /* 2) Assignment from an empty CONSTRUCTOR. */
1477 else if (is_gimple_reg_type (vr->type)
1478 && gimple_assign_single_p (def_stmt)
1479 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1480 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1482 tree base2;
1483 HOST_WIDE_INT offset2, size2, maxsize2;
1484 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1485 &offset2, &size2, &maxsize2);
1486 if (maxsize2 != -1
1487 && operand_equal_p (base, base2, 0)
1488 && offset2 <= offset
1489 && offset2 + size2 >= offset + maxsize)
1491 tree val = build_zero_cst (vr->type);
1492 return vn_reference_lookup_or_insert_for_pieces
1493 (vuse, vr->set, vr->type, vr->operands, val);
1497 /* 3) Assignment from a constant. We can use folds native encode/interpret
1498 routines to extract the assigned bits. */
1499 else if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
1500 && ref->size == maxsize
1501 && maxsize % BITS_PER_UNIT == 0
1502 && offset % BITS_PER_UNIT == 0
1503 && is_gimple_reg_type (vr->type)
1504 && gimple_assign_single_p (def_stmt)
1505 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
1507 tree base2;
1508 HOST_WIDE_INT offset2, size2, maxsize2;
1509 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1510 &offset2, &size2, &maxsize2);
1511 if (maxsize2 != -1
1512 && maxsize2 == size2
1513 && size2 % BITS_PER_UNIT == 0
1514 && offset2 % BITS_PER_UNIT == 0
1515 && operand_equal_p (base, base2, 0)
1516 && offset2 <= offset
1517 && offset2 + size2 >= offset + maxsize)
1519 /* We support up to 512-bit values (for V8DFmode). */
1520 unsigned char buffer[64];
1521 int len;
1523 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1524 buffer, sizeof (buffer));
1525 if (len > 0)
1527 tree val = native_interpret_expr (vr->type,
1528 buffer
1529 + ((offset - offset2)
1530 / BITS_PER_UNIT),
1531 ref->size / BITS_PER_UNIT);
1532 if (val)
1533 return vn_reference_lookup_or_insert_for_pieces
1534 (vuse, vr->set, vr->type, vr->operands, val);
1539 /* 4) Assignment from an SSA name which definition we may be able
1540 to access pieces from. */
1541 else if (ref->size == maxsize
1542 && is_gimple_reg_type (vr->type)
1543 && gimple_assign_single_p (def_stmt)
1544 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
1546 tree rhs1 = gimple_assign_rhs1 (def_stmt);
1547 gimple def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
1548 if (is_gimple_assign (def_stmt2)
1549 && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
1550 || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
1551 && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1))))
1553 tree base2;
1554 HOST_WIDE_INT offset2, size2, maxsize2, off;
1555 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1556 &offset2, &size2, &maxsize2);
1557 off = offset - offset2;
1558 if (maxsize2 != -1
1559 && maxsize2 == size2
1560 && operand_equal_p (base, base2, 0)
1561 && offset2 <= offset
1562 && offset2 + size2 >= offset + maxsize)
1564 tree val = NULL_TREE;
1565 HOST_WIDE_INT elsz
1566 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1))));
1567 if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR)
1569 if (off == 0)
1570 val = gimple_assign_rhs1 (def_stmt2);
1571 else if (off == elsz)
1572 val = gimple_assign_rhs2 (def_stmt2);
1574 else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR
1575 && off % elsz == 0)
1577 tree ctor = gimple_assign_rhs1 (def_stmt2);
1578 unsigned i = off / elsz;
1579 if (i < CONSTRUCTOR_NELTS (ctor))
1581 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
1582 if (compare_tree_int (elt->index, i) == 0)
1583 val = elt->value;
1586 if (val)
1587 return vn_reference_lookup_or_insert_for_pieces
1588 (vuse, vr->set, vr->type, vr->operands, val);
1593 /* 5) For aggregate copies translate the reference through them if
1594 the copy kills ref. */
1595 else if (vn_walk_kind == VN_WALKREWRITE
1596 && gimple_assign_single_p (def_stmt)
1597 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1598 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1599 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1601 tree base2;
1602 HOST_WIDE_INT offset2, size2, maxsize2;
1603 int i, j;
1604 VEC (vn_reference_op_s, heap) *rhs = NULL;
1605 vn_reference_op_t vro;
1606 ao_ref r;
1608 if (!lhs_ref_ok)
1609 return (void *)-1;
1611 /* See if the assignment kills REF. */
1612 base2 = ao_ref_base (&lhs_ref);
1613 offset2 = lhs_ref.offset;
1614 size2 = lhs_ref.size;
1615 maxsize2 = lhs_ref.max_size;
1616 if (maxsize2 == -1
1617 || (base != base2 && !operand_equal_p (base, base2, 0))
1618 || offset2 > offset
1619 || offset2 + size2 < offset + maxsize)
1620 return (void *)-1;
1622 /* Find the common base of ref and the lhs. lhs_ops already
1623 contains valueized operands for the lhs. */
1624 i = VEC_length (vn_reference_op_s, vr->operands) - 1;
1625 j = VEC_length (vn_reference_op_s, lhs_ops) - 1;
1626 while (j >= 0 && i >= 0
1627 && vn_reference_op_eq (VEC_index (vn_reference_op_s,
1628 vr->operands, i),
1629 VEC_index (vn_reference_op_s, lhs_ops, j)))
1631 i--;
1632 j--;
1635 /* ??? The innermost op should always be a MEM_REF and we already
1636 checked that the assignment to the lhs kills vr. Thus for
1637 aggregate copies using char[] types the vn_reference_op_eq
1638 may fail when comparing types for compatibility. But we really
1639 don't care here - further lookups with the rewritten operands
1640 will simply fail if we messed up types too badly. */
1641 if (j == 0 && i >= 0
1642 && VEC_index (vn_reference_op_s, lhs_ops, 0)->opcode == MEM_REF
1643 && VEC_index (vn_reference_op_s, lhs_ops, 0)->off != -1
1644 && (VEC_index (vn_reference_op_s, lhs_ops, 0)->off
1645 == VEC_index (vn_reference_op_s, vr->operands, i)->off))
1646 i--, j--;
1648 /* i now points to the first additional op.
1649 ??? LHS may not be completely contained in VR, one or more
1650 VIEW_CONVERT_EXPRs could be in its way. We could at least
1651 try handling outermost VIEW_CONVERT_EXPRs. */
1652 if (j != -1)
1653 return (void *)-1;
1655 /* Now re-write REF to be based on the rhs of the assignment. */
1656 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1657 /* We need to pre-pend vr->operands[0..i] to rhs. */
1658 if (i + 1 + VEC_length (vn_reference_op_s, rhs)
1659 > VEC_length (vn_reference_op_s, vr->operands))
1661 VEC (vn_reference_op_s, heap) *old = vr->operands;
1662 VEC_safe_grow (vn_reference_op_s, heap, vr->operands,
1663 i + 1 + VEC_length (vn_reference_op_s, rhs));
1664 if (old == shared_lookup_references
1665 && vr->operands != old)
1666 shared_lookup_references = NULL;
1668 else
1669 VEC_truncate (vn_reference_op_s, vr->operands,
1670 i + 1 + VEC_length (vn_reference_op_s, rhs));
1671 FOR_EACH_VEC_ELT (vn_reference_op_s, rhs, j, vro)
1672 VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, vro);
1673 VEC_free (vn_reference_op_s, heap, rhs);
1674 vr->operands = valueize_refs (vr->operands);
1675 vr->hashcode = vn_reference_compute_hash (vr);
1677 /* Adjust *ref from the new operands. */
1678 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1679 return (void *)-1;
1680 /* This can happen with bitfields. */
1681 if (ref->size != r.size)
1682 return (void *)-1;
1683 *ref = r;
1685 /* Do not update last seen VUSE after translating. */
1686 last_vuse_ptr = NULL;
1688 /* Keep looking for the adjusted *REF / VR pair. */
1689 return NULL;
1692 /* 6) For memcpy copies translate the reference through them if
1693 the copy kills ref. */
1694 else if (vn_walk_kind == VN_WALKREWRITE
1695 && is_gimple_reg_type (vr->type)
1696 /* ??? Handle BCOPY as well. */
1697 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
1698 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
1699 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
1700 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
1701 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
1702 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
1703 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
1704 && host_integerp (gimple_call_arg (def_stmt, 2), 1))
1706 tree lhs, rhs;
1707 ao_ref r;
1708 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
1709 vn_reference_op_s op;
1710 HOST_WIDE_INT at;
1713 /* Only handle non-variable, addressable refs. */
1714 if (ref->size != maxsize
1715 || offset % BITS_PER_UNIT != 0
1716 || ref->size % BITS_PER_UNIT != 0)
1717 return (void *)-1;
1719 /* Extract a pointer base and an offset for the destination. */
1720 lhs = gimple_call_arg (def_stmt, 0);
1721 lhs_offset = 0;
1722 if (TREE_CODE (lhs) == SSA_NAME)
1723 lhs = SSA_VAL (lhs);
1724 if (TREE_CODE (lhs) == ADDR_EXPR)
1726 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
1727 &lhs_offset);
1728 if (!tem)
1729 return (void *)-1;
1730 if (TREE_CODE (tem) == MEM_REF
1731 && host_integerp (TREE_OPERAND (tem, 1), 1))
1733 lhs = TREE_OPERAND (tem, 0);
1734 lhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1736 else if (DECL_P (tem))
1737 lhs = build_fold_addr_expr (tem);
1738 else
1739 return (void *)-1;
1741 if (TREE_CODE (lhs) != SSA_NAME
1742 && TREE_CODE (lhs) != ADDR_EXPR)
1743 return (void *)-1;
1745 /* Extract a pointer base and an offset for the source. */
1746 rhs = gimple_call_arg (def_stmt, 1);
1747 rhs_offset = 0;
1748 if (TREE_CODE (rhs) == SSA_NAME)
1749 rhs = SSA_VAL (rhs);
1750 if (TREE_CODE (rhs) == ADDR_EXPR)
1752 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
1753 &rhs_offset);
1754 if (!tem)
1755 return (void *)-1;
1756 if (TREE_CODE (tem) == MEM_REF
1757 && host_integerp (TREE_OPERAND (tem, 1), 1))
1759 rhs = TREE_OPERAND (tem, 0);
1760 rhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1762 else if (DECL_P (tem))
1763 rhs = build_fold_addr_expr (tem);
1764 else
1765 return (void *)-1;
1767 if (TREE_CODE (rhs) != SSA_NAME
1768 && TREE_CODE (rhs) != ADDR_EXPR)
1769 return (void *)-1;
1771 copy_size = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2));
1773 /* The bases of the destination and the references have to agree. */
1774 if ((TREE_CODE (base) != MEM_REF
1775 && !DECL_P (base))
1776 || (TREE_CODE (base) == MEM_REF
1777 && (TREE_OPERAND (base, 0) != lhs
1778 || !host_integerp (TREE_OPERAND (base, 1), 1)))
1779 || (DECL_P (base)
1780 && (TREE_CODE (lhs) != ADDR_EXPR
1781 || TREE_OPERAND (lhs, 0) != base)))
1782 return (void *)-1;
1784 /* And the access has to be contained within the memcpy destination. */
1785 at = offset / BITS_PER_UNIT;
1786 if (TREE_CODE (base) == MEM_REF)
1787 at += TREE_INT_CST_LOW (TREE_OPERAND (base, 1));
1788 if (lhs_offset > at
1789 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
1790 return (void *)-1;
1792 /* Make room for 2 operands in the new reference. */
1793 if (VEC_length (vn_reference_op_s, vr->operands) < 2)
1795 VEC (vn_reference_op_s, heap) *old = vr->operands;
1796 VEC_safe_grow (vn_reference_op_s, heap, vr->operands, 2);
1797 if (old == shared_lookup_references
1798 && vr->operands != old)
1799 shared_lookup_references = NULL;
1801 else
1802 VEC_truncate (vn_reference_op_s, vr->operands, 2);
1804 /* The looked-through reference is a simple MEM_REF. */
1805 memset (&op, 0, sizeof (op));
1806 op.type = vr->type;
1807 op.opcode = MEM_REF;
1808 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
1809 op.off = at - lhs_offset + rhs_offset;
1810 VEC_replace (vn_reference_op_s, vr->operands, 0, &op);
1811 op.type = TREE_TYPE (rhs);
1812 op.opcode = TREE_CODE (rhs);
1813 op.op0 = rhs;
1814 op.off = -1;
1815 VEC_replace (vn_reference_op_s, vr->operands, 1, &op);
1816 vr->hashcode = vn_reference_compute_hash (vr);
1818 /* Adjust *ref from the new operands. */
1819 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1820 return (void *)-1;
1821 /* This can happen with bitfields. */
1822 if (ref->size != r.size)
1823 return (void *)-1;
1824 *ref = r;
1826 /* Do not update last seen VUSE after translating. */
1827 last_vuse_ptr = NULL;
1829 /* Keep looking for the adjusted *REF / VR pair. */
1830 return NULL;
1833 /* Bail out and stop walking. */
1834 return (void *)-1;
1837 /* Lookup a reference operation by it's parts, in the current hash table.
1838 Returns the resulting value number if it exists in the hash table,
1839 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1840 vn_reference_t stored in the hashtable if something is found. */
1842 tree
1843 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
1844 VEC (vn_reference_op_s, heap) *operands,
1845 vn_reference_t *vnresult, vn_lookup_kind kind)
1847 struct vn_reference_s vr1;
1848 vn_reference_t tmp;
1849 tree cst;
1851 if (!vnresult)
1852 vnresult = &tmp;
1853 *vnresult = NULL;
1855 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1856 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1857 VEC_safe_grow (vn_reference_op_s, heap, shared_lookup_references,
1858 VEC_length (vn_reference_op_s, operands));
1859 memcpy (VEC_address (vn_reference_op_s, shared_lookup_references),
1860 VEC_address (vn_reference_op_s, operands),
1861 sizeof (vn_reference_op_s)
1862 * VEC_length (vn_reference_op_s, operands));
1863 vr1.operands = operands = shared_lookup_references
1864 = valueize_refs (shared_lookup_references);
1865 vr1.type = type;
1866 vr1.set = set;
1867 vr1.hashcode = vn_reference_compute_hash (&vr1);
1868 if ((cst = fully_constant_vn_reference_p (&vr1)))
1869 return cst;
1871 vn_reference_lookup_1 (&vr1, vnresult);
1872 if (!*vnresult
1873 && kind != VN_NOWALK
1874 && vr1.vuse)
1876 ao_ref r;
1877 vn_walk_kind = kind;
1878 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
1879 *vnresult =
1880 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1881 vn_reference_lookup_2,
1882 vn_reference_lookup_3, &vr1);
1883 if (vr1.operands != operands)
1884 VEC_free (vn_reference_op_s, heap, vr1.operands);
1887 if (*vnresult)
1888 return (*vnresult)->result;
1890 return NULL_TREE;
1893 /* Lookup OP in the current hash table, and return the resulting value
1894 number if it exists in the hash table. Return NULL_TREE if it does
1895 not exist in the hash table or if the result field of the structure
1896 was NULL.. VNRESULT will be filled in with the vn_reference_t
1897 stored in the hashtable if one exists. */
1899 tree
1900 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
1901 vn_reference_t *vnresult)
1903 VEC (vn_reference_op_s, heap) *operands;
1904 struct vn_reference_s vr1;
1905 tree cst;
1906 bool valuezied_anything;
1908 if (vnresult)
1909 *vnresult = NULL;
1911 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1912 vr1.operands = operands
1913 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
1914 vr1.type = TREE_TYPE (op);
1915 vr1.set = get_alias_set (op);
1916 vr1.hashcode = vn_reference_compute_hash (&vr1);
1917 if ((cst = fully_constant_vn_reference_p (&vr1)))
1918 return cst;
1920 if (kind != VN_NOWALK
1921 && vr1.vuse)
1923 vn_reference_t wvnresult;
1924 ao_ref r;
1925 /* Make sure to use a valueized reference if we valueized anything.
1926 Otherwise preserve the full reference for advanced TBAA. */
1927 if (!valuezied_anything
1928 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
1929 vr1.operands))
1930 ao_ref_init (&r, op);
1931 vn_walk_kind = kind;
1932 wvnresult =
1933 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1934 vn_reference_lookup_2,
1935 vn_reference_lookup_3, &vr1);
1936 if (vr1.operands != operands)
1937 VEC_free (vn_reference_op_s, heap, vr1.operands);
1938 if (wvnresult)
1940 if (vnresult)
1941 *vnresult = wvnresult;
1942 return wvnresult->result;
1945 return NULL_TREE;
1948 return vn_reference_lookup_1 (&vr1, vnresult);
1952 /* Insert OP into the current hash table with a value number of
1953 RESULT, and return the resulting reference structure we created. */
1955 vn_reference_t
1956 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
1958 void **slot;
1959 vn_reference_t vr1;
1961 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1962 if (TREE_CODE (result) == SSA_NAME)
1963 vr1->value_id = VN_INFO (result)->value_id;
1964 else
1965 vr1->value_id = get_or_alloc_constant_value_id (result);
1966 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1967 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
1968 vr1->type = TREE_TYPE (op);
1969 vr1->set = get_alias_set (op);
1970 vr1->hashcode = vn_reference_compute_hash (vr1);
1971 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
1972 vr1->result_vdef = vdef;
1974 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1975 INSERT);
1977 /* Because we lookup stores using vuses, and value number failures
1978 using the vdefs (see visit_reference_op_store for how and why),
1979 it's possible that on failure we may try to insert an already
1980 inserted store. This is not wrong, there is no ssa name for a
1981 store that we could use as a differentiator anyway. Thus, unlike
1982 the other lookup functions, you cannot gcc_assert (!*slot)
1983 here. */
1985 /* But free the old slot in case of a collision. */
1986 if (*slot)
1987 free_reference (*slot);
1989 *slot = vr1;
1990 return vr1;
1993 /* Insert a reference by it's pieces into the current hash table with
1994 a value number of RESULT. Return the resulting reference
1995 structure we created. */
1997 vn_reference_t
1998 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
1999 VEC (vn_reference_op_s, heap) *operands,
2000 tree result, unsigned int value_id)
2003 void **slot;
2004 vn_reference_t vr1;
2006 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
2007 vr1->value_id = value_id;
2008 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2009 vr1->operands = valueize_refs (operands);
2010 vr1->type = type;
2011 vr1->set = set;
2012 vr1->hashcode = vn_reference_compute_hash (vr1);
2013 if (result && TREE_CODE (result) == SSA_NAME)
2014 result = SSA_VAL (result);
2015 vr1->result = result;
2017 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
2018 INSERT);
2020 /* At this point we should have all the things inserted that we have
2021 seen before, and we should never try inserting something that
2022 already exists. */
2023 gcc_assert (!*slot);
2024 if (*slot)
2025 free_reference (*slot);
2027 *slot = vr1;
2028 return vr1;
2031 /* Compute and return the hash value for nary operation VBO1. */
2033 hashval_t
2034 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2036 hashval_t hash;
2037 unsigned i;
2039 for (i = 0; i < vno1->length; ++i)
2040 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2041 vno1->op[i] = SSA_VAL (vno1->op[i]);
2043 if (vno1->length == 2
2044 && commutative_tree_code (vno1->opcode)
2045 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2047 tree temp = vno1->op[0];
2048 vno1->op[0] = vno1->op[1];
2049 vno1->op[1] = temp;
2052 hash = iterative_hash_hashval_t (vno1->opcode, 0);
2053 for (i = 0; i < vno1->length; ++i)
2054 hash = iterative_hash_expr (vno1->op[i], hash);
2056 return hash;
2059 /* Return the computed hashcode for nary operation P1. */
2061 static hashval_t
2062 vn_nary_op_hash (const void *p1)
2064 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
2065 return vno1->hashcode;
2068 /* Compare nary operations P1 and P2 and return true if they are
2069 equivalent. */
2072 vn_nary_op_eq (const void *p1, const void *p2)
2074 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
2075 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
2076 unsigned i;
2078 if (vno1->hashcode != vno2->hashcode)
2079 return false;
2081 if (vno1->length != vno2->length)
2082 return false;
2084 if (vno1->opcode != vno2->opcode
2085 || !types_compatible_p (vno1->type, vno2->type))
2086 return false;
2088 for (i = 0; i < vno1->length; ++i)
2089 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2090 return false;
2092 return true;
2095 /* Initialize VNO from the pieces provided. */
2097 static void
2098 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2099 enum tree_code code, tree type, tree *ops)
2101 vno->opcode = code;
2102 vno->length = length;
2103 vno->type = type;
2104 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2107 /* Initialize VNO from OP. */
2109 static void
2110 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2112 unsigned i;
2114 vno->opcode = TREE_CODE (op);
2115 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2116 vno->type = TREE_TYPE (op);
2117 for (i = 0; i < vno->length; ++i)
2118 vno->op[i] = TREE_OPERAND (op, i);
2121 /* Return the number of operands for a vn_nary ops structure from STMT. */
2123 static unsigned int
2124 vn_nary_length_from_stmt (gimple stmt)
2126 switch (gimple_assign_rhs_code (stmt))
2128 case REALPART_EXPR:
2129 case IMAGPART_EXPR:
2130 case VIEW_CONVERT_EXPR:
2131 return 1;
2133 case CONSTRUCTOR:
2134 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2136 default:
2137 return gimple_num_ops (stmt) - 1;
2141 /* Initialize VNO from STMT. */
2143 static void
2144 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
2146 unsigned i;
2148 vno->opcode = gimple_assign_rhs_code (stmt);
2149 vno->type = gimple_expr_type (stmt);
2150 switch (vno->opcode)
2152 case REALPART_EXPR:
2153 case IMAGPART_EXPR:
2154 case VIEW_CONVERT_EXPR:
2155 vno->length = 1;
2156 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2157 break;
2159 case CONSTRUCTOR:
2160 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2161 for (i = 0; i < vno->length; ++i)
2162 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2163 break;
2165 default:
2166 vno->length = gimple_num_ops (stmt) - 1;
2167 for (i = 0; i < vno->length; ++i)
2168 vno->op[i] = gimple_op (stmt, i + 1);
2172 /* Compute the hashcode for VNO and look for it in the hash table;
2173 return the resulting value number if it exists in the hash table.
2174 Return NULL_TREE if it does not exist in the hash table or if the
2175 result field of the operation is NULL. VNRESULT will contain the
2176 vn_nary_op_t from the hashtable if it exists. */
2178 static tree
2179 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2181 void **slot;
2183 if (vnresult)
2184 *vnresult = NULL;
2186 vno->hashcode = vn_nary_op_compute_hash (vno);
2187 slot = htab_find_slot_with_hash (current_info->nary, vno, vno->hashcode,
2188 NO_INSERT);
2189 if (!slot && current_info == optimistic_info)
2190 slot = htab_find_slot_with_hash (valid_info->nary, vno, vno->hashcode,
2191 NO_INSERT);
2192 if (!slot)
2193 return NULL_TREE;
2194 if (vnresult)
2195 *vnresult = (vn_nary_op_t)*slot;
2196 return ((vn_nary_op_t)*slot)->result;
2199 /* Lookup a n-ary operation by its pieces and return the resulting value
2200 number if it exists in the hash table. Return NULL_TREE if it does
2201 not exist in the hash table or if the result field of the operation
2202 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2203 if it exists. */
2205 tree
2206 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2207 tree type, tree *ops, vn_nary_op_t *vnresult)
2209 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2210 sizeof_vn_nary_op (length));
2211 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2212 return vn_nary_op_lookup_1 (vno1, vnresult);
2215 /* Lookup OP in the current hash table, and return the resulting value
2216 number if it exists in the hash table. Return NULL_TREE if it does
2217 not exist in the hash table or if the result field of the operation
2218 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2219 if it exists. */
2221 tree
2222 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2224 vn_nary_op_t vno1
2225 = XALLOCAVAR (struct vn_nary_op_s,
2226 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2227 init_vn_nary_op_from_op (vno1, op);
2228 return vn_nary_op_lookup_1 (vno1, vnresult);
2231 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2232 value number if it exists in the hash table. Return NULL_TREE if
2233 it does not exist in the hash table. VNRESULT will contain the
2234 vn_nary_op_t from the hashtable if it exists. */
2236 tree
2237 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
2239 vn_nary_op_t vno1
2240 = XALLOCAVAR (struct vn_nary_op_s,
2241 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2242 init_vn_nary_op_from_stmt (vno1, stmt);
2243 return vn_nary_op_lookup_1 (vno1, vnresult);
2246 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2248 static vn_nary_op_t
2249 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2251 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2254 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2255 obstack. */
2257 static vn_nary_op_t
2258 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2260 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2261 &current_info->nary_obstack);
2263 vno1->value_id = value_id;
2264 vno1->length = length;
2265 vno1->result = result;
2267 return vno1;
2270 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2271 VNO->HASHCODE first. */
2273 static vn_nary_op_t
2274 vn_nary_op_insert_into (vn_nary_op_t vno, htab_t table, bool compute_hash)
2276 void **slot;
2278 if (compute_hash)
2279 vno->hashcode = vn_nary_op_compute_hash (vno);
2281 slot = htab_find_slot_with_hash (table, vno, vno->hashcode, INSERT);
2282 gcc_assert (!*slot);
2284 *slot = vno;
2285 return vno;
2288 /* Insert a n-ary operation into the current hash table using it's
2289 pieces. Return the vn_nary_op_t structure we created and put in
2290 the hashtable. */
2292 vn_nary_op_t
2293 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2294 tree type, tree *ops,
2295 tree result, unsigned int value_id)
2297 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2298 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2299 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2302 /* Insert OP into the current hash table with a value number of
2303 RESULT. Return the vn_nary_op_t structure we created and put in
2304 the hashtable. */
2306 vn_nary_op_t
2307 vn_nary_op_insert (tree op, tree result)
2309 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2310 vn_nary_op_t vno1;
2312 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2313 init_vn_nary_op_from_op (vno1, op);
2314 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2317 /* Insert the rhs of STMT into the current hash table with a value number of
2318 RESULT. */
2320 vn_nary_op_t
2321 vn_nary_op_insert_stmt (gimple stmt, tree result)
2323 vn_nary_op_t vno1
2324 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2325 result, VN_INFO (result)->value_id);
2326 init_vn_nary_op_from_stmt (vno1, stmt);
2327 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2330 /* Compute a hashcode for PHI operation VP1 and return it. */
2332 static inline hashval_t
2333 vn_phi_compute_hash (vn_phi_t vp1)
2335 hashval_t result;
2336 int i;
2337 tree phi1op;
2338 tree type;
2340 result = vp1->block->index;
2342 /* If all PHI arguments are constants we need to distinguish
2343 the PHI node via its type. */
2344 type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
2345 result += (INTEGRAL_TYPE_P (type)
2346 + (INTEGRAL_TYPE_P (type)
2347 ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
2349 FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
2351 if (phi1op == VN_TOP)
2352 continue;
2353 result = iterative_hash_expr (phi1op, result);
2356 return result;
2359 /* Return the computed hashcode for phi operation P1. */
2361 static hashval_t
2362 vn_phi_hash (const void *p1)
2364 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
2365 return vp1->hashcode;
2368 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2370 static int
2371 vn_phi_eq (const void *p1, const void *p2)
2373 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
2374 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
2376 if (vp1->hashcode != vp2->hashcode)
2377 return false;
2379 if (vp1->block == vp2->block)
2381 int i;
2382 tree phi1op;
2384 /* If the PHI nodes do not have compatible types
2385 they are not the same. */
2386 if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
2387 TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
2388 return false;
2390 /* Any phi in the same block will have it's arguments in the
2391 same edge order, because of how we store phi nodes. */
2392 FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
2394 tree phi2op = VEC_index (tree, vp2->phiargs, i);
2395 if (phi1op == VN_TOP || phi2op == VN_TOP)
2396 continue;
2397 if (!expressions_equal_p (phi1op, phi2op))
2398 return false;
2400 return true;
2402 return false;
2405 static VEC(tree, heap) *shared_lookup_phiargs;
2407 /* Lookup PHI in the current hash table, and return the resulting
2408 value number if it exists in the hash table. Return NULL_TREE if
2409 it does not exist in the hash table. */
2411 static tree
2412 vn_phi_lookup (gimple phi)
2414 void **slot;
2415 struct vn_phi_s vp1;
2416 unsigned i;
2418 VEC_truncate (tree, shared_lookup_phiargs, 0);
2420 /* Canonicalize the SSA_NAME's to their value number. */
2421 for (i = 0; i < gimple_phi_num_args (phi); i++)
2423 tree def = PHI_ARG_DEF (phi, i);
2424 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2425 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
2427 vp1.phiargs = shared_lookup_phiargs;
2428 vp1.block = gimple_bb (phi);
2429 vp1.hashcode = vn_phi_compute_hash (&vp1);
2430 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
2431 NO_INSERT);
2432 if (!slot && current_info == optimistic_info)
2433 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
2434 NO_INSERT);
2435 if (!slot)
2436 return NULL_TREE;
2437 return ((vn_phi_t)*slot)->result;
2440 /* Insert PHI into the current hash table with a value number of
2441 RESULT. */
2443 static vn_phi_t
2444 vn_phi_insert (gimple phi, tree result)
2446 void **slot;
2447 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
2448 unsigned i;
2449 VEC (tree, heap) *args = NULL;
2451 /* Canonicalize the SSA_NAME's to their value number. */
2452 for (i = 0; i < gimple_phi_num_args (phi); i++)
2454 tree def = PHI_ARG_DEF (phi, i);
2455 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2456 VEC_safe_push (tree, heap, args, def);
2458 vp1->value_id = VN_INFO (result)->value_id;
2459 vp1->phiargs = args;
2460 vp1->block = gimple_bb (phi);
2461 vp1->result = result;
2462 vp1->hashcode = vn_phi_compute_hash (vp1);
2464 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
2465 INSERT);
2467 /* Because we iterate over phi operations more than once, it's
2468 possible the slot might already exist here, hence no assert.*/
2469 *slot = vp1;
2470 return vp1;
2474 /* Print set of components in strongly connected component SCC to OUT. */
2476 static void
2477 print_scc (FILE *out, VEC (tree, heap) *scc)
2479 tree var;
2480 unsigned int i;
2482 fprintf (out, "SCC consists of:");
2483 FOR_EACH_VEC_ELT (tree, scc, i, var)
2485 fprintf (out, " ");
2486 print_generic_expr (out, var, 0);
2488 fprintf (out, "\n");
2491 /* Set the value number of FROM to TO, return true if it has changed
2492 as a result. */
2494 static inline bool
2495 set_ssa_val_to (tree from, tree to)
2497 tree currval = SSA_VAL (from);
2499 if (from != to)
2501 if (currval == from)
2503 if (dump_file && (dump_flags & TDF_DETAILS))
2505 fprintf (dump_file, "Not changing value number of ");
2506 print_generic_expr (dump_file, from, 0);
2507 fprintf (dump_file, " from VARYING to ");
2508 print_generic_expr (dump_file, to, 0);
2509 fprintf (dump_file, "\n");
2511 return false;
2513 else if (TREE_CODE (to) == SSA_NAME
2514 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2515 to = from;
2518 /* The only thing we allow as value numbers are VN_TOP, ssa_names
2519 and invariants. So assert that here. */
2520 gcc_assert (to != NULL_TREE
2521 && (to == VN_TOP
2522 || TREE_CODE (to) == SSA_NAME
2523 || is_gimple_min_invariant (to)));
2525 if (dump_file && (dump_flags & TDF_DETAILS))
2527 fprintf (dump_file, "Setting value number of ");
2528 print_generic_expr (dump_file, from, 0);
2529 fprintf (dump_file, " to ");
2530 print_generic_expr (dump_file, to, 0);
2533 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
2535 VN_INFO (from)->valnum = to;
2536 if (dump_file && (dump_flags & TDF_DETAILS))
2537 fprintf (dump_file, " (changed)\n");
2538 return true;
2540 if (dump_file && (dump_flags & TDF_DETAILS))
2541 fprintf (dump_file, "\n");
2542 return false;
2545 /* Mark as processed all the definitions in the defining stmt of USE, or
2546 the USE itself. */
2548 static void
2549 mark_use_processed (tree use)
2551 ssa_op_iter iter;
2552 def_operand_p defp;
2553 gimple stmt = SSA_NAME_DEF_STMT (use);
2555 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
2557 VN_INFO (use)->use_processed = true;
2558 return;
2561 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2563 tree def = DEF_FROM_PTR (defp);
2565 VN_INFO (def)->use_processed = true;
2569 /* Set all definitions in STMT to value number to themselves.
2570 Return true if a value number changed. */
2572 static bool
2573 defs_to_varying (gimple stmt)
2575 bool changed = false;
2576 ssa_op_iter iter;
2577 def_operand_p defp;
2579 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2581 tree def = DEF_FROM_PTR (defp);
2582 changed |= set_ssa_val_to (def, def);
2584 return changed;
2587 static bool expr_has_constants (tree expr);
2588 static tree valueize_expr (tree expr);
2590 /* Visit a copy between LHS and RHS, return true if the value number
2591 changed. */
2593 static bool
2594 visit_copy (tree lhs, tree rhs)
2596 /* Follow chains of copies to their destination. */
2597 while (TREE_CODE (rhs) == SSA_NAME
2598 && SSA_VAL (rhs) != rhs)
2599 rhs = SSA_VAL (rhs);
2601 /* The copy may have a more interesting constant filled expression
2602 (we don't, since we know our RHS is just an SSA name). */
2603 if (TREE_CODE (rhs) == SSA_NAME)
2605 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2606 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2609 return set_ssa_val_to (lhs, rhs);
2612 /* Visit a nary operator RHS, value number it, and return true if the
2613 value number of LHS has changed as a result. */
2615 static bool
2616 visit_nary_op (tree lhs, gimple stmt)
2618 bool changed = false;
2619 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2621 if (result)
2622 changed = set_ssa_val_to (lhs, result);
2623 else
2625 changed = set_ssa_val_to (lhs, lhs);
2626 vn_nary_op_insert_stmt (stmt, lhs);
2629 return changed;
2632 /* Visit a call STMT storing into LHS. Return true if the value number
2633 of the LHS has changed as a result. */
2635 static bool
2636 visit_reference_op_call (tree lhs, gimple stmt)
2638 bool changed = false;
2639 struct vn_reference_s vr1;
2640 vn_reference_t vnresult = NULL;
2641 tree vuse = gimple_vuse (stmt);
2642 tree vdef = gimple_vdef (stmt);
2644 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
2645 if (lhs && TREE_CODE (lhs) != SSA_NAME)
2646 lhs = NULL_TREE;
2648 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2649 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
2650 vr1.type = gimple_expr_type (stmt);
2651 vr1.set = 0;
2652 vr1.hashcode = vn_reference_compute_hash (&vr1);
2653 vn_reference_lookup_1 (&vr1, &vnresult);
2655 if (vnresult)
2657 if (vnresult->result_vdef)
2658 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
2660 if (!vnresult->result && lhs)
2661 vnresult->result = lhs;
2663 if (vnresult->result && lhs)
2665 changed |= set_ssa_val_to (lhs, vnresult->result);
2667 if (VN_INFO (vnresult->result)->has_constants)
2668 VN_INFO (lhs)->has_constants = true;
2671 else
2673 void **slot;
2674 vn_reference_t vr2;
2675 if (vdef)
2676 changed |= set_ssa_val_to (vdef, vdef);
2677 if (lhs)
2678 changed |= set_ssa_val_to (lhs, lhs);
2679 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
2680 vr2->vuse = vr1.vuse;
2681 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
2682 vr2->type = vr1.type;
2683 vr2->set = vr1.set;
2684 vr2->hashcode = vr1.hashcode;
2685 vr2->result = lhs;
2686 vr2->result_vdef = vdef;
2687 slot = htab_find_slot_with_hash (current_info->references,
2688 vr2, vr2->hashcode, INSERT);
2689 if (*slot)
2690 free_reference (*slot);
2691 *slot = vr2;
2694 return changed;
2697 /* Visit a load from a reference operator RHS, part of STMT, value number it,
2698 and return true if the value number of the LHS has changed as a result. */
2700 static bool
2701 visit_reference_op_load (tree lhs, tree op, gimple stmt)
2703 bool changed = false;
2704 tree last_vuse;
2705 tree result;
2707 last_vuse = gimple_vuse (stmt);
2708 last_vuse_ptr = &last_vuse;
2709 result = vn_reference_lookup (op, gimple_vuse (stmt),
2710 default_vn_walk_kind, NULL);
2711 last_vuse_ptr = NULL;
2713 /* If we have a VCE, try looking up its operand as it might be stored in
2714 a different type. */
2715 if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
2716 result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
2717 default_vn_walk_kind, NULL);
2719 /* We handle type-punning through unions by value-numbering based
2720 on offset and size of the access. Be prepared to handle a
2721 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
2722 if (result
2723 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
2725 /* We will be setting the value number of lhs to the value number
2726 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2727 So first simplify and lookup this expression to see if it
2728 is already available. */
2729 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
2730 if ((CONVERT_EXPR_P (val)
2731 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
2732 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
2734 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
2735 if ((CONVERT_EXPR_P (tem)
2736 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
2737 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
2738 TREE_TYPE (val), tem)))
2739 val = tem;
2741 result = val;
2742 if (!is_gimple_min_invariant (val)
2743 && TREE_CODE (val) != SSA_NAME)
2744 result = vn_nary_op_lookup (val, NULL);
2745 /* If the expression is not yet available, value-number lhs to
2746 a new SSA_NAME we create. */
2747 if (!result)
2749 result = make_ssa_name (SSA_NAME_VAR (lhs), gimple_build_nop ());
2750 /* Initialize value-number information properly. */
2751 VN_INFO_GET (result)->valnum = result;
2752 VN_INFO (result)->value_id = get_next_value_id ();
2753 VN_INFO (result)->expr = val;
2754 VN_INFO (result)->has_constants = expr_has_constants (val);
2755 VN_INFO (result)->needs_insertion = true;
2756 /* As all "inserted" statements are singleton SCCs, insert
2757 to the valid table. This is strictly needed to
2758 avoid re-generating new value SSA_NAMEs for the same
2759 expression during SCC iteration over and over (the
2760 optimistic table gets cleared after each iteration).
2761 We do not need to insert into the optimistic table, as
2762 lookups there will fall back to the valid table. */
2763 if (current_info == optimistic_info)
2765 current_info = valid_info;
2766 vn_nary_op_insert (val, result);
2767 current_info = optimistic_info;
2769 else
2770 vn_nary_op_insert (val, result);
2771 if (dump_file && (dump_flags & TDF_DETAILS))
2773 fprintf (dump_file, "Inserting name ");
2774 print_generic_expr (dump_file, result, 0);
2775 fprintf (dump_file, " for expression ");
2776 print_generic_expr (dump_file, val, 0);
2777 fprintf (dump_file, "\n");
2782 if (result)
2784 changed = set_ssa_val_to (lhs, result);
2785 if (TREE_CODE (result) == SSA_NAME
2786 && VN_INFO (result)->has_constants)
2788 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2789 VN_INFO (lhs)->has_constants = true;
2792 else
2794 changed = set_ssa_val_to (lhs, lhs);
2795 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
2798 return changed;
2802 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2803 and return true if the value number of the LHS has changed as a result. */
2805 static bool
2806 visit_reference_op_store (tree lhs, tree op, gimple stmt)
2808 bool changed = false;
2809 vn_reference_t vnresult = NULL;
2810 tree result, assign;
2811 bool resultsame = false;
2812 tree vuse = gimple_vuse (stmt);
2813 tree vdef = gimple_vdef (stmt);
2815 /* First we want to lookup using the *vuses* from the store and see
2816 if there the last store to this location with the same address
2817 had the same value.
2819 The vuses represent the memory state before the store. If the
2820 memory state, address, and value of the store is the same as the
2821 last store to this location, then this store will produce the
2822 same memory state as that store.
2824 In this case the vdef versions for this store are value numbered to those
2825 vuse versions, since they represent the same memory state after
2826 this store.
2828 Otherwise, the vdefs for the store are used when inserting into
2829 the table, since the store generates a new memory state. */
2831 result = vn_reference_lookup (lhs, vuse, VN_NOWALK, NULL);
2833 if (result)
2835 if (TREE_CODE (result) == SSA_NAME)
2836 result = SSA_VAL (result);
2837 if (TREE_CODE (op) == SSA_NAME)
2838 op = SSA_VAL (op);
2839 resultsame = expressions_equal_p (result, op);
2842 if (!result || !resultsame)
2844 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
2845 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult);
2846 if (vnresult)
2848 VN_INFO (vdef)->use_processed = true;
2849 return set_ssa_val_to (vdef, vnresult->result_vdef);
2853 if (!result || !resultsame)
2855 if (dump_file && (dump_flags & TDF_DETAILS))
2857 fprintf (dump_file, "No store match\n");
2858 fprintf (dump_file, "Value numbering store ");
2859 print_generic_expr (dump_file, lhs, 0);
2860 fprintf (dump_file, " to ");
2861 print_generic_expr (dump_file, op, 0);
2862 fprintf (dump_file, "\n");
2864 /* Have to set value numbers before insert, since insert is
2865 going to valueize the references in-place. */
2866 if (vdef)
2868 changed |= set_ssa_val_to (vdef, vdef);
2871 /* Do not insert structure copies into the tables. */
2872 if (is_gimple_min_invariant (op)
2873 || is_gimple_reg (op))
2874 vn_reference_insert (lhs, op, vdef, NULL);
2876 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
2877 vn_reference_insert (assign, lhs, vuse, vdef);
2879 else
2881 /* We had a match, so value number the vdef to have the value
2882 number of the vuse it came from. */
2884 if (dump_file && (dump_flags & TDF_DETAILS))
2885 fprintf (dump_file, "Store matched earlier value,"
2886 "value numbering store vdefs to matching vuses.\n");
2888 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
2891 return changed;
2894 /* Visit and value number PHI, return true if the value number
2895 changed. */
2897 static bool
2898 visit_phi (gimple phi)
2900 bool changed = false;
2901 tree result;
2902 tree sameval = VN_TOP;
2903 bool allsame = true;
2904 unsigned i;
2906 /* TODO: We could check for this in init_sccvn, and replace this
2907 with a gcc_assert. */
2908 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
2909 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2911 /* See if all non-TOP arguments have the same value. TOP is
2912 equivalent to everything, so we can ignore it. */
2913 for (i = 0; i < gimple_phi_num_args (phi); i++)
2915 tree def = PHI_ARG_DEF (phi, i);
2917 if (TREE_CODE (def) == SSA_NAME)
2918 def = SSA_VAL (def);
2919 if (def == VN_TOP)
2920 continue;
2921 if (sameval == VN_TOP)
2923 sameval = def;
2925 else
2927 if (!expressions_equal_p (def, sameval))
2929 allsame = false;
2930 break;
2935 /* If all value numbered to the same value, the phi node has that
2936 value. */
2937 if (allsame)
2939 if (is_gimple_min_invariant (sameval))
2941 VN_INFO (PHI_RESULT (phi))->has_constants = true;
2942 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2944 else
2946 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2947 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2950 if (TREE_CODE (sameval) == SSA_NAME)
2951 return visit_copy (PHI_RESULT (phi), sameval);
2953 return set_ssa_val_to (PHI_RESULT (phi), sameval);
2956 /* Otherwise, see if it is equivalent to a phi node in this block. */
2957 result = vn_phi_lookup (phi);
2958 if (result)
2960 if (TREE_CODE (result) == SSA_NAME)
2961 changed = visit_copy (PHI_RESULT (phi), result);
2962 else
2963 changed = set_ssa_val_to (PHI_RESULT (phi), result);
2965 else
2967 vn_phi_insert (phi, PHI_RESULT (phi));
2968 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2969 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
2970 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2973 return changed;
2976 /* Return true if EXPR contains constants. */
2978 static bool
2979 expr_has_constants (tree expr)
2981 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2983 case tcc_unary:
2984 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
2986 case tcc_binary:
2987 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
2988 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
2989 /* Constants inside reference ops are rarely interesting, but
2990 it can take a lot of looking to find them. */
2991 case tcc_reference:
2992 case tcc_declaration:
2993 return false;
2994 default:
2995 return is_gimple_min_invariant (expr);
2997 return false;
3000 /* Return true if STMT contains constants. */
3002 static bool
3003 stmt_has_constants (gimple stmt)
3005 if (gimple_code (stmt) != GIMPLE_ASSIGN)
3006 return false;
3008 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
3010 case GIMPLE_UNARY_RHS:
3011 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
3013 case GIMPLE_BINARY_RHS:
3014 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
3015 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
3016 case GIMPLE_TERNARY_RHS:
3017 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
3018 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt))
3019 || is_gimple_min_invariant (gimple_assign_rhs3 (stmt)));
3020 case GIMPLE_SINGLE_RHS:
3021 /* Constants inside reference ops are rarely interesting, but
3022 it can take a lot of looking to find them. */
3023 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
3024 default:
3025 gcc_unreachable ();
3027 return false;
3030 /* Replace SSA_NAMES in expr with their value numbers, and return the
3031 result.
3032 This is performed in place. */
3034 static tree
3035 valueize_expr (tree expr)
3037 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
3039 case tcc_binary:
3040 TREE_OPERAND (expr, 1) = vn_valueize (TREE_OPERAND (expr, 1));
3041 /* Fallthru. */
3042 case tcc_unary:
3043 TREE_OPERAND (expr, 0) = vn_valueize (TREE_OPERAND (expr, 0));
3044 break;
3045 default:;
3047 return expr;
3050 /* Simplify the binary expression RHS, and return the result if
3051 simplified. */
3053 static tree
3054 simplify_binary_expression (gimple stmt)
3056 tree result = NULL_TREE;
3057 tree op0 = gimple_assign_rhs1 (stmt);
3058 tree op1 = gimple_assign_rhs2 (stmt);
3059 enum tree_code code = gimple_assign_rhs_code (stmt);
3061 /* This will not catch every single case we could combine, but will
3062 catch those with constants. The goal here is to simultaneously
3063 combine constants between expressions, but avoid infinite
3064 expansion of expressions during simplification. */
3065 if (TREE_CODE (op0) == SSA_NAME)
3067 if (VN_INFO (op0)->has_constants
3068 || TREE_CODE_CLASS (code) == tcc_comparison
3069 || code == COMPLEX_EXPR)
3070 op0 = valueize_expr (vn_get_expr_for (op0));
3071 else
3072 op0 = vn_valueize (op0);
3075 if (TREE_CODE (op1) == SSA_NAME)
3077 if (VN_INFO (op1)->has_constants
3078 || code == COMPLEX_EXPR)
3079 op1 = valueize_expr (vn_get_expr_for (op1));
3080 else
3081 op1 = vn_valueize (op1);
3084 /* Pointer plus constant can be represented as invariant address.
3085 Do so to allow further propatation, see also tree forwprop. */
3086 if (code == POINTER_PLUS_EXPR
3087 && host_integerp (op1, 1)
3088 && TREE_CODE (op0) == ADDR_EXPR
3089 && is_gimple_min_invariant (op0))
3090 return build_invariant_address (TREE_TYPE (op0),
3091 TREE_OPERAND (op0, 0),
3092 TREE_INT_CST_LOW (op1));
3094 /* Avoid folding if nothing changed. */
3095 if (op0 == gimple_assign_rhs1 (stmt)
3096 && op1 == gimple_assign_rhs2 (stmt))
3097 return NULL_TREE;
3099 fold_defer_overflow_warnings ();
3101 result = fold_binary (code, gimple_expr_type (stmt), op0, op1);
3102 if (result)
3103 STRIP_USELESS_TYPE_CONVERSION (result);
3105 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
3106 stmt, 0);
3108 /* Make sure result is not a complex expression consisting
3109 of operators of operators (IE (a + b) + (a + c))
3110 Otherwise, we will end up with unbounded expressions if
3111 fold does anything at all. */
3112 if (result && valid_gimple_rhs_p (result))
3113 return result;
3115 return NULL_TREE;
3118 /* Simplify the unary expression RHS, and return the result if
3119 simplified. */
3121 static tree
3122 simplify_unary_expression (gimple stmt)
3124 tree result = NULL_TREE;
3125 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
3126 enum tree_code code = gimple_assign_rhs_code (stmt);
3128 /* We handle some tcc_reference codes here that are all
3129 GIMPLE_ASSIGN_SINGLE codes. */
3130 if (code == REALPART_EXPR
3131 || code == IMAGPART_EXPR
3132 || code == VIEW_CONVERT_EXPR
3133 || code == BIT_FIELD_REF)
3134 op0 = TREE_OPERAND (op0, 0);
3136 if (TREE_CODE (op0) != SSA_NAME)
3137 return NULL_TREE;
3139 orig_op0 = op0;
3140 if (VN_INFO (op0)->has_constants)
3141 op0 = valueize_expr (vn_get_expr_for (op0));
3142 else if (CONVERT_EXPR_CODE_P (code)
3143 || code == REALPART_EXPR
3144 || code == IMAGPART_EXPR
3145 || code == VIEW_CONVERT_EXPR
3146 || code == BIT_FIELD_REF)
3148 /* We want to do tree-combining on conversion-like expressions.
3149 Make sure we feed only SSA_NAMEs or constants to fold though. */
3150 tree tem = valueize_expr (vn_get_expr_for (op0));
3151 if (UNARY_CLASS_P (tem)
3152 || BINARY_CLASS_P (tem)
3153 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
3154 || TREE_CODE (tem) == SSA_NAME
3155 || TREE_CODE (tem) == CONSTRUCTOR
3156 || is_gimple_min_invariant (tem))
3157 op0 = tem;
3160 /* Avoid folding if nothing changed, but remember the expression. */
3161 if (op0 == orig_op0)
3162 return NULL_TREE;
3164 if (code == BIT_FIELD_REF)
3166 tree rhs = gimple_assign_rhs1 (stmt);
3167 result = fold_ternary (BIT_FIELD_REF, TREE_TYPE (rhs),
3168 op0, TREE_OPERAND (rhs, 1), TREE_OPERAND (rhs, 2));
3170 else
3171 result = fold_unary_ignore_overflow (code, gimple_expr_type (stmt), op0);
3172 if (result)
3174 STRIP_USELESS_TYPE_CONVERSION (result);
3175 if (valid_gimple_rhs_p (result))
3176 return result;
3179 return NULL_TREE;
3182 /* Try to simplify RHS using equivalences and constant folding. */
3184 static tree
3185 try_to_simplify (gimple stmt)
3187 enum tree_code code = gimple_assign_rhs_code (stmt);
3188 tree tem;
3190 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3191 in this case, there is no point in doing extra work. */
3192 if (code == SSA_NAME)
3193 return NULL_TREE;
3195 /* First try constant folding based on our current lattice. */
3196 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize);
3197 if (tem
3198 && (TREE_CODE (tem) == SSA_NAME
3199 || is_gimple_min_invariant (tem)))
3200 return tem;
3202 /* If that didn't work try combining multiple statements. */
3203 switch (TREE_CODE_CLASS (code))
3205 case tcc_reference:
3206 /* Fallthrough for some unary codes that can operate on registers. */
3207 if (!(code == REALPART_EXPR
3208 || code == IMAGPART_EXPR
3209 || code == VIEW_CONVERT_EXPR
3210 || code == BIT_FIELD_REF))
3211 break;
3212 /* We could do a little more with unary ops, if they expand
3213 into binary ops, but it's debatable whether it is worth it. */
3214 case tcc_unary:
3215 return simplify_unary_expression (stmt);
3217 case tcc_comparison:
3218 case tcc_binary:
3219 return simplify_binary_expression (stmt);
3221 default:
3222 break;
3225 return NULL_TREE;
3228 /* Visit and value number USE, return true if the value number
3229 changed. */
3231 static bool
3232 visit_use (tree use)
3234 bool changed = false;
3235 gimple stmt = SSA_NAME_DEF_STMT (use);
3237 mark_use_processed (use);
3239 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3240 if (dump_file && (dump_flags & TDF_DETAILS)
3241 && !SSA_NAME_IS_DEFAULT_DEF (use))
3243 fprintf (dump_file, "Value numbering ");
3244 print_generic_expr (dump_file, use, 0);
3245 fprintf (dump_file, " stmt = ");
3246 print_gimple_stmt (dump_file, stmt, 0, 0);
3249 /* Handle uninitialized uses. */
3250 if (SSA_NAME_IS_DEFAULT_DEF (use))
3251 changed = set_ssa_val_to (use, use);
3252 else
3254 if (gimple_code (stmt) == GIMPLE_PHI)
3255 changed = visit_phi (stmt);
3256 else if (gimple_has_volatile_ops (stmt))
3257 changed = defs_to_varying (stmt);
3258 else if (is_gimple_assign (stmt))
3260 enum tree_code code = gimple_assign_rhs_code (stmt);
3261 tree lhs = gimple_assign_lhs (stmt);
3262 tree rhs1 = gimple_assign_rhs1 (stmt);
3263 tree simplified;
3265 /* Shortcut for copies. Simplifying copies is pointless,
3266 since we copy the expression and value they represent. */
3267 if (code == SSA_NAME
3268 && TREE_CODE (lhs) == SSA_NAME)
3270 changed = visit_copy (lhs, rhs1);
3271 goto done;
3273 simplified = try_to_simplify (stmt);
3274 if (simplified)
3276 if (dump_file && (dump_flags & TDF_DETAILS))
3278 fprintf (dump_file, "RHS ");
3279 print_gimple_expr (dump_file, stmt, 0, 0);
3280 fprintf (dump_file, " simplified to ");
3281 print_generic_expr (dump_file, simplified, 0);
3282 if (TREE_CODE (lhs) == SSA_NAME)
3283 fprintf (dump_file, " has constants %d\n",
3284 expr_has_constants (simplified));
3285 else
3286 fprintf (dump_file, "\n");
3289 /* Setting value numbers to constants will occasionally
3290 screw up phi congruence because constants are not
3291 uniquely associated with a single ssa name that can be
3292 looked up. */
3293 if (simplified
3294 && is_gimple_min_invariant (simplified)
3295 && TREE_CODE (lhs) == SSA_NAME)
3297 VN_INFO (lhs)->expr = simplified;
3298 VN_INFO (lhs)->has_constants = true;
3299 changed = set_ssa_val_to (lhs, simplified);
3300 goto done;
3302 else if (simplified
3303 && TREE_CODE (simplified) == SSA_NAME
3304 && TREE_CODE (lhs) == SSA_NAME)
3306 changed = visit_copy (lhs, simplified);
3307 goto done;
3309 else if (simplified)
3311 if (TREE_CODE (lhs) == SSA_NAME)
3313 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
3314 /* We have to unshare the expression or else
3315 valuizing may change the IL stream. */
3316 VN_INFO (lhs)->expr = unshare_expr (simplified);
3319 else if (stmt_has_constants (stmt)
3320 && TREE_CODE (lhs) == SSA_NAME)
3321 VN_INFO (lhs)->has_constants = true;
3322 else if (TREE_CODE (lhs) == SSA_NAME)
3324 /* We reset expr and constantness here because we may
3325 have been value numbering optimistically, and
3326 iterating. They may become non-constant in this case,
3327 even if they were optimistically constant. */
3329 VN_INFO (lhs)->has_constants = false;
3330 VN_INFO (lhs)->expr = NULL_TREE;
3333 if ((TREE_CODE (lhs) == SSA_NAME
3334 /* We can substitute SSA_NAMEs that are live over
3335 abnormal edges with their constant value. */
3336 && !(gimple_assign_copy_p (stmt)
3337 && is_gimple_min_invariant (rhs1))
3338 && !(simplified
3339 && is_gimple_min_invariant (simplified))
3340 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3341 /* Stores or copies from SSA_NAMEs that are live over
3342 abnormal edges are a problem. */
3343 || (code == SSA_NAME
3344 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3345 changed = defs_to_varying (stmt);
3346 else if (REFERENCE_CLASS_P (lhs)
3347 || DECL_P (lhs))
3348 changed = visit_reference_op_store (lhs, rhs1, stmt);
3349 else if (TREE_CODE (lhs) == SSA_NAME)
3351 if ((gimple_assign_copy_p (stmt)
3352 && is_gimple_min_invariant (rhs1))
3353 || (simplified
3354 && is_gimple_min_invariant (simplified)))
3356 VN_INFO (lhs)->has_constants = true;
3357 if (simplified)
3358 changed = set_ssa_val_to (lhs, simplified);
3359 else
3360 changed = set_ssa_val_to (lhs, rhs1);
3362 else
3364 switch (get_gimple_rhs_class (code))
3366 case GIMPLE_UNARY_RHS:
3367 case GIMPLE_BINARY_RHS:
3368 case GIMPLE_TERNARY_RHS:
3369 changed = visit_nary_op (lhs, stmt);
3370 break;
3371 case GIMPLE_SINGLE_RHS:
3372 switch (TREE_CODE_CLASS (code))
3374 case tcc_reference:
3375 /* VOP-less references can go through unary case. */
3376 if ((code == REALPART_EXPR
3377 || code == IMAGPART_EXPR
3378 || code == VIEW_CONVERT_EXPR
3379 || code == BIT_FIELD_REF)
3380 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
3382 changed = visit_nary_op (lhs, stmt);
3383 break;
3385 /* Fallthrough. */
3386 case tcc_declaration:
3387 changed = visit_reference_op_load (lhs, rhs1, stmt);
3388 break;
3389 default:
3390 if (code == ADDR_EXPR)
3392 changed = visit_nary_op (lhs, stmt);
3393 break;
3395 else if (code == CONSTRUCTOR)
3397 changed = visit_nary_op (lhs, stmt);
3398 break;
3400 changed = defs_to_varying (stmt);
3402 break;
3403 default:
3404 changed = defs_to_varying (stmt);
3405 break;
3409 else
3410 changed = defs_to_varying (stmt);
3412 else if (is_gimple_call (stmt))
3414 tree lhs = gimple_call_lhs (stmt);
3416 /* ??? We could try to simplify calls. */
3418 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3420 if (stmt_has_constants (stmt))
3421 VN_INFO (lhs)->has_constants = true;
3422 else
3424 /* We reset expr and constantness here because we may
3425 have been value numbering optimistically, and
3426 iterating. They may become non-constant in this case,
3427 even if they were optimistically constant. */
3428 VN_INFO (lhs)->has_constants = false;
3429 VN_INFO (lhs)->expr = NULL_TREE;
3432 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3434 changed = defs_to_varying (stmt);
3435 goto done;
3439 if (!gimple_call_internal_p (stmt)
3440 && (/* Calls to the same function with the same vuse
3441 and the same operands do not necessarily return the same
3442 value, unless they're pure or const. */
3443 gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST)
3444 /* If calls have a vdef, subsequent calls won't have
3445 the same incoming vuse. So, if 2 calls with vdef have the
3446 same vuse, we know they're not subsequent.
3447 We can value number 2 calls to the same function with the
3448 same vuse and the same operands which are not subsequent
3449 the same, because there is no code in the program that can
3450 compare the 2 values. */
3451 || gimple_vdef (stmt)))
3452 changed = visit_reference_op_call (lhs, stmt);
3453 else
3454 changed = defs_to_varying (stmt);
3456 else
3457 changed = defs_to_varying (stmt);
3459 done:
3460 return changed;
3463 /* Compare two operands by reverse postorder index */
3465 static int
3466 compare_ops (const void *pa, const void *pb)
3468 const tree opa = *((const tree *)pa);
3469 const tree opb = *((const tree *)pb);
3470 gimple opstmta = SSA_NAME_DEF_STMT (opa);
3471 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
3472 basic_block bba;
3473 basic_block bbb;
3475 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3476 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3477 else if (gimple_nop_p (opstmta))
3478 return -1;
3479 else if (gimple_nop_p (opstmtb))
3480 return 1;
3482 bba = gimple_bb (opstmta);
3483 bbb = gimple_bb (opstmtb);
3485 if (!bba && !bbb)
3486 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3487 else if (!bba)
3488 return -1;
3489 else if (!bbb)
3490 return 1;
3492 if (bba == bbb)
3494 if (gimple_code (opstmta) == GIMPLE_PHI
3495 && gimple_code (opstmtb) == GIMPLE_PHI)
3496 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3497 else if (gimple_code (opstmta) == GIMPLE_PHI)
3498 return -1;
3499 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3500 return 1;
3501 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3502 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3503 else
3504 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3506 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3509 /* Sort an array containing members of a strongly connected component
3510 SCC so that the members are ordered by RPO number.
3511 This means that when the sort is complete, iterating through the
3512 array will give you the members in RPO order. */
3514 static void
3515 sort_scc (VEC (tree, heap) *scc)
3517 VEC_qsort (tree, scc, compare_ops);
3520 /* Insert the no longer used nary ONARY to the hash INFO. */
3522 static void
3523 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3525 size_t size = sizeof_vn_nary_op (onary->length);
3526 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3527 &info->nary_obstack);
3528 memcpy (nary, onary, size);
3529 vn_nary_op_insert_into (nary, info->nary, false);
3532 /* Insert the no longer used phi OPHI to the hash INFO. */
3534 static void
3535 copy_phi (vn_phi_t ophi, vn_tables_t info)
3537 vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
3538 void **slot;
3539 memcpy (phi, ophi, sizeof (*phi));
3540 ophi->phiargs = NULL;
3541 slot = htab_find_slot_with_hash (info->phis, phi, phi->hashcode, INSERT);
3542 gcc_assert (!*slot);
3543 *slot = phi;
3546 /* Insert the no longer used reference OREF to the hash INFO. */
3548 static void
3549 copy_reference (vn_reference_t oref, vn_tables_t info)
3551 vn_reference_t ref;
3552 void **slot;
3553 ref = (vn_reference_t) pool_alloc (info->references_pool);
3554 memcpy (ref, oref, sizeof (*ref));
3555 oref->operands = NULL;
3556 slot = htab_find_slot_with_hash (info->references, ref, ref->hashcode,
3557 INSERT);
3558 if (*slot)
3559 free_reference (*slot);
3560 *slot = ref;
3563 /* Process a strongly connected component in the SSA graph. */
3565 static void
3566 process_scc (VEC (tree, heap) *scc)
3568 tree var;
3569 unsigned int i;
3570 unsigned int iterations = 0;
3571 bool changed = true;
3572 htab_iterator hi;
3573 vn_nary_op_t nary;
3574 vn_phi_t phi;
3575 vn_reference_t ref;
3577 /* If the SCC has a single member, just visit it. */
3578 if (VEC_length (tree, scc) == 1)
3580 tree use = VEC_index (tree, scc, 0);
3581 if (VN_INFO (use)->use_processed)
3582 return;
3583 /* We need to make sure it doesn't form a cycle itself, which can
3584 happen for self-referential PHI nodes. In that case we would
3585 end up inserting an expression with VN_TOP operands into the
3586 valid table which makes us derive bogus equivalences later.
3587 The cheapest way to check this is to assume it for all PHI nodes. */
3588 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3589 /* Fallthru to iteration. */ ;
3590 else
3592 visit_use (use);
3593 return;
3597 /* Iterate over the SCC with the optimistic table until it stops
3598 changing. */
3599 current_info = optimistic_info;
3600 while (changed)
3602 changed = false;
3603 iterations++;
3604 if (dump_file && (dump_flags & TDF_DETAILS))
3605 fprintf (dump_file, "Starting iteration %d\n", iterations);
3606 /* As we are value-numbering optimistically we have to
3607 clear the expression tables and the simplified expressions
3608 in each iteration until we converge. */
3609 htab_empty (optimistic_info->nary);
3610 htab_empty (optimistic_info->phis);
3611 htab_empty (optimistic_info->references);
3612 obstack_free (&optimistic_info->nary_obstack, NULL);
3613 gcc_obstack_init (&optimistic_info->nary_obstack);
3614 empty_alloc_pool (optimistic_info->phis_pool);
3615 empty_alloc_pool (optimistic_info->references_pool);
3616 FOR_EACH_VEC_ELT (tree, scc, i, var)
3617 VN_INFO (var)->expr = NULL_TREE;
3618 FOR_EACH_VEC_ELT (tree, scc, i, var)
3619 changed |= visit_use (var);
3622 statistics_histogram_event (cfun, "SCC iterations", iterations);
3624 /* Finally, copy the contents of the no longer used optimistic
3625 table to the valid table. */
3626 FOR_EACH_HTAB_ELEMENT (optimistic_info->nary, nary, vn_nary_op_t, hi)
3627 copy_nary (nary, valid_info);
3628 FOR_EACH_HTAB_ELEMENT (optimistic_info->phis, phi, vn_phi_t, hi)
3629 copy_phi (phi, valid_info);
3630 FOR_EACH_HTAB_ELEMENT (optimistic_info->references, ref, vn_reference_t, hi)
3631 copy_reference (ref, valid_info);
3633 current_info = valid_info;
3636 DEF_VEC_O(ssa_op_iter);
3637 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
3639 /* Pop the components of the found SCC for NAME off the SCC stack
3640 and process them. Returns true if all went well, false if
3641 we run into resource limits. */
3643 static bool
3644 extract_and_process_scc_for_name (tree name)
3646 VEC (tree, heap) *scc = NULL;
3647 tree x;
3649 /* Found an SCC, pop the components off the SCC stack and
3650 process them. */
3653 x = VEC_pop (tree, sccstack);
3655 VN_INFO (x)->on_sccstack = false;
3656 VEC_safe_push (tree, heap, scc, x);
3657 } while (x != name);
3659 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
3660 if (VEC_length (tree, scc)
3661 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
3663 if (dump_file)
3664 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
3665 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
3666 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
3667 return false;
3670 if (VEC_length (tree, scc) > 1)
3671 sort_scc (scc);
3673 if (dump_file && (dump_flags & TDF_DETAILS))
3674 print_scc (dump_file, scc);
3676 process_scc (scc);
3678 VEC_free (tree, heap, scc);
3680 return true;
3683 /* Depth first search on NAME to discover and process SCC's in the SSA
3684 graph.
3685 Execution of this algorithm relies on the fact that the SCC's are
3686 popped off the stack in topological order.
3687 Returns true if successful, false if we stopped processing SCC's due
3688 to resource constraints. */
3690 static bool
3691 DFS (tree name)
3693 VEC(ssa_op_iter, heap) *itervec = NULL;
3694 VEC(tree, heap) *namevec = NULL;
3695 use_operand_p usep = NULL;
3696 gimple defstmt;
3697 tree use;
3698 ssa_op_iter iter;
3700 start_over:
3701 /* SCC info */
3702 VN_INFO (name)->dfsnum = next_dfs_num++;
3703 VN_INFO (name)->visited = true;
3704 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
3706 VEC_safe_push (tree, heap, sccstack, name);
3707 VN_INFO (name)->on_sccstack = true;
3708 defstmt = SSA_NAME_DEF_STMT (name);
3710 /* Recursively DFS on our operands, looking for SCC's. */
3711 if (!gimple_nop_p (defstmt))
3713 /* Push a new iterator. */
3714 if (gimple_code (defstmt) == GIMPLE_PHI)
3715 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
3716 else
3717 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
3719 else
3720 clear_and_done_ssa_iter (&iter);
3722 while (1)
3724 /* If we are done processing uses of a name, go up the stack
3725 of iterators and process SCCs as we found them. */
3726 if (op_iter_done (&iter))
3728 /* See if we found an SCC. */
3729 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
3730 if (!extract_and_process_scc_for_name (name))
3732 VEC_free (tree, heap, namevec);
3733 VEC_free (ssa_op_iter, heap, itervec);
3734 return false;
3737 /* Check if we are done. */
3738 if (VEC_empty (tree, namevec))
3740 VEC_free (tree, heap, namevec);
3741 VEC_free (ssa_op_iter, heap, itervec);
3742 return true;
3745 /* Restore the last use walker and continue walking there. */
3746 use = name;
3747 name = VEC_pop (tree, namevec);
3748 memcpy (&iter, VEC_last (ssa_op_iter, itervec),
3749 sizeof (ssa_op_iter));
3750 VEC_pop (ssa_op_iter, itervec);
3751 goto continue_walking;
3754 use = USE_FROM_PTR (usep);
3756 /* Since we handle phi nodes, we will sometimes get
3757 invariants in the use expression. */
3758 if (TREE_CODE (use) == SSA_NAME)
3760 if (! (VN_INFO (use)->visited))
3762 /* Recurse by pushing the current use walking state on
3763 the stack and starting over. */
3764 VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
3765 VEC_safe_push(tree, heap, namevec, name);
3766 name = use;
3767 goto start_over;
3769 continue_walking:
3770 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3771 VN_INFO (use)->low);
3773 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3774 && VN_INFO (use)->on_sccstack)
3776 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3777 VN_INFO (name)->low);
3781 usep = op_iter_next_use (&iter);
3785 /* Allocate a value number table. */
3787 static void
3788 allocate_vn_table (vn_tables_t table)
3790 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
3791 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
3792 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
3793 free_reference);
3795 gcc_obstack_init (&table->nary_obstack);
3796 table->phis_pool = create_alloc_pool ("VN phis",
3797 sizeof (struct vn_phi_s),
3798 30);
3799 table->references_pool = create_alloc_pool ("VN references",
3800 sizeof (struct vn_reference_s),
3801 30);
3804 /* Free a value number table. */
3806 static void
3807 free_vn_table (vn_tables_t table)
3809 htab_delete (table->phis);
3810 htab_delete (table->nary);
3811 htab_delete (table->references);
3812 obstack_free (&table->nary_obstack, NULL);
3813 free_alloc_pool (table->phis_pool);
3814 free_alloc_pool (table->references_pool);
3817 static void
3818 init_scc_vn (void)
3820 size_t i;
3821 int j;
3822 int *rpo_numbers_temp;
3824 calculate_dominance_info (CDI_DOMINATORS);
3825 sccstack = NULL;
3826 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
3827 free);
3829 constant_value_ids = BITMAP_ALLOC (NULL);
3831 next_dfs_num = 1;
3832 next_value_id = 1;
3834 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
3835 /* VEC_alloc doesn't actually grow it to the right size, it just
3836 preallocates the space to do so. */
3837 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
3838 gcc_obstack_init (&vn_ssa_aux_obstack);
3840 shared_lookup_phiargs = NULL;
3841 shared_lookup_references = NULL;
3842 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3843 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3844 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
3846 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3847 the i'th block in RPO order is bb. We want to map bb's to RPO
3848 numbers, so we need to rearrange this array. */
3849 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
3850 rpo_numbers[rpo_numbers_temp[j]] = j;
3852 XDELETE (rpo_numbers_temp);
3854 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
3856 /* Create the VN_INFO structures, and initialize value numbers to
3857 TOP. */
3858 for (i = 0; i < num_ssa_names; i++)
3860 tree name = ssa_name (i);
3861 if (name)
3863 VN_INFO_GET (name)->valnum = VN_TOP;
3864 VN_INFO (name)->expr = NULL_TREE;
3865 VN_INFO (name)->value_id = 0;
3869 renumber_gimple_stmt_uids ();
3871 /* Create the valid and optimistic value numbering tables. */
3872 valid_info = XCNEW (struct vn_tables_s);
3873 allocate_vn_table (valid_info);
3874 optimistic_info = XCNEW (struct vn_tables_s);
3875 allocate_vn_table (optimistic_info);
3878 void
3879 free_scc_vn (void)
3881 size_t i;
3883 htab_delete (constant_to_value_id);
3884 BITMAP_FREE (constant_value_ids);
3885 VEC_free (tree, heap, shared_lookup_phiargs);
3886 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
3887 XDELETEVEC (rpo_numbers);
3889 for (i = 0; i < num_ssa_names; i++)
3891 tree name = ssa_name (i);
3892 if (name
3893 && VN_INFO (name)->needs_insertion)
3894 release_ssa_name (name);
3896 obstack_free (&vn_ssa_aux_obstack, NULL);
3897 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
3899 VEC_free (tree, heap, sccstack);
3900 free_vn_table (valid_info);
3901 XDELETE (valid_info);
3902 free_vn_table (optimistic_info);
3903 XDELETE (optimistic_info);
3906 /* Set *ID if we computed something useful in RESULT. */
3908 static void
3909 set_value_id_for_result (tree result, unsigned int *id)
3911 if (result)
3913 if (TREE_CODE (result) == SSA_NAME)
3914 *id = VN_INFO (result)->value_id;
3915 else if (is_gimple_min_invariant (result))
3916 *id = get_or_alloc_constant_value_id (result);
3920 /* Set the value ids in the valid hash tables. */
3922 static void
3923 set_hashtable_value_ids (void)
3925 htab_iterator hi;
3926 vn_nary_op_t vno;
3927 vn_reference_t vr;
3928 vn_phi_t vp;
3930 /* Now set the value ids of the things we had put in the hash
3931 table. */
3933 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
3934 vno, vn_nary_op_t, hi)
3935 set_value_id_for_result (vno->result, &vno->value_id);
3937 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
3938 vp, vn_phi_t, hi)
3939 set_value_id_for_result (vp->result, &vp->value_id);
3941 FOR_EACH_HTAB_ELEMENT (valid_info->references,
3942 vr, vn_reference_t, hi)
3943 set_value_id_for_result (vr->result, &vr->value_id);
3946 /* Do SCCVN. Returns true if it finished, false if we bailed out
3947 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
3948 how we use the alias oracle walking during the VN process. */
3950 bool
3951 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
3953 size_t i;
3954 tree param;
3955 bool changed = true;
3957 default_vn_walk_kind = default_vn_walk_kind_;
3959 init_scc_vn ();
3960 current_info = valid_info;
3962 for (param = DECL_ARGUMENTS (current_function_decl);
3963 param;
3964 param = DECL_CHAIN (param))
3966 if (gimple_default_def (cfun, param) != NULL)
3968 tree def = gimple_default_def (cfun, param);
3969 VN_INFO (def)->valnum = def;
3973 for (i = 1; i < num_ssa_names; ++i)
3975 tree name = ssa_name (i);
3976 if (name
3977 && VN_INFO (name)->visited == false
3978 && !has_zero_uses (name))
3979 if (!DFS (name))
3981 free_scc_vn ();
3982 return false;
3986 /* Initialize the value ids. */
3988 for (i = 1; i < num_ssa_names; ++i)
3990 tree name = ssa_name (i);
3991 vn_ssa_aux_t info;
3992 if (!name)
3993 continue;
3994 info = VN_INFO (name);
3995 if (info->valnum == name
3996 || info->valnum == VN_TOP)
3997 info->value_id = get_next_value_id ();
3998 else if (is_gimple_min_invariant (info->valnum))
3999 info->value_id = get_or_alloc_constant_value_id (info->valnum);
4002 /* Propagate until they stop changing. */
4003 while (changed)
4005 changed = false;
4006 for (i = 1; i < num_ssa_names; ++i)
4008 tree name = ssa_name (i);
4009 vn_ssa_aux_t info;
4010 if (!name)
4011 continue;
4012 info = VN_INFO (name);
4013 if (TREE_CODE (info->valnum) == SSA_NAME
4014 && info->valnum != name
4015 && info->value_id != VN_INFO (info->valnum)->value_id)
4017 changed = true;
4018 info->value_id = VN_INFO (info->valnum)->value_id;
4023 set_hashtable_value_ids ();
4025 if (dump_file && (dump_flags & TDF_DETAILS))
4027 fprintf (dump_file, "Value numbers:\n");
4028 for (i = 0; i < num_ssa_names; i++)
4030 tree name = ssa_name (i);
4031 if (name
4032 && VN_INFO (name)->visited
4033 && SSA_VAL (name) != name)
4035 print_generic_expr (dump_file, name, 0);
4036 fprintf (dump_file, " = ");
4037 print_generic_expr (dump_file, SSA_VAL (name), 0);
4038 fprintf (dump_file, "\n");
4043 return true;
4046 /* Return the maximum value id we have ever seen. */
4048 unsigned int
4049 get_max_value_id (void)
4051 return next_value_id;
4054 /* Return the next unique value id. */
4056 unsigned int
4057 get_next_value_id (void)
4059 return next_value_id++;
4063 /* Compare two expressions E1 and E2 and return true if they are equal. */
4065 bool
4066 expressions_equal_p (tree e1, tree e2)
4068 /* The obvious case. */
4069 if (e1 == e2)
4070 return true;
4072 /* If only one of them is null, they cannot be equal. */
4073 if (!e1 || !e2)
4074 return false;
4076 /* Now perform the actual comparison. */
4077 if (TREE_CODE (e1) == TREE_CODE (e2)
4078 && operand_equal_p (e1, e2, OEP_PURE_SAME))
4079 return true;
4081 return false;
4085 /* Return true if the nary operation NARY may trap. This is a copy
4086 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4088 bool
4089 vn_nary_may_trap (vn_nary_op_t nary)
4091 tree type;
4092 tree rhs2 = NULL_TREE;
4093 bool honor_nans = false;
4094 bool honor_snans = false;
4095 bool fp_operation = false;
4096 bool honor_trapv = false;
4097 bool handled, ret;
4098 unsigned i;
4100 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4101 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4102 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4104 type = nary->type;
4105 fp_operation = FLOAT_TYPE_P (type);
4106 if (fp_operation)
4108 honor_nans = flag_trapping_math && !flag_finite_math_only;
4109 honor_snans = flag_signaling_nans != 0;
4111 else if (INTEGRAL_TYPE_P (type)
4112 && TYPE_OVERFLOW_TRAPS (type))
4113 honor_trapv = true;
4115 if (nary->length >= 2)
4116 rhs2 = nary->op[1];
4117 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4118 honor_trapv,
4119 honor_nans, honor_snans, rhs2,
4120 &handled);
4121 if (handled
4122 && ret)
4123 return true;
4125 for (i = 0; i < nary->length; ++i)
4126 if (tree_could_trap_p (nary->op[i]))
4127 return true;
4129 return false;