Merge from branches/gcc-4_8-branch up to rev 207411.
[official-gcc.git] / gcc-4_8-branch / gcc / tree-ssa-sccvn.c
blob5dce65afe851a668e5e271b4145c4abeb4d36168
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2013 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "basic-block.h"
27 #include "gimple-pretty-print.h"
28 #include "tree-inline.h"
29 #include "tree-flow.h"
30 #include "gimple.h"
31 #include "dumpfile.h"
32 #include "hashtab.h"
33 #include "alloc-pool.h"
34 #include "flags.h"
35 #include "bitmap.h"
36 #include "cfgloop.h"
37 #include "params.h"
38 #include "tree-ssa-propagate.h"
39 #include "tree-ssa-sccvn.h"
40 #include "gimple-fold.h"
42 /* This algorithm is based on the SCC algorithm presented by Keith
43 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
44 (http://citeseer.ist.psu.edu/41805.html). In
45 straight line code, it is equivalent to a regular hash based value
46 numbering that is performed in reverse postorder.
48 For code with cycles, there are two alternatives, both of which
49 require keeping the hashtables separate from the actual list of
50 value numbers for SSA names.
52 1. Iterate value numbering in an RPO walk of the blocks, removing
53 all the entries from the hashtable after each iteration (but
54 keeping the SSA name->value number mapping between iterations).
55 Iterate until it does not change.
57 2. Perform value numbering as part of an SCC walk on the SSA graph,
58 iterating only the cycles in the SSA graph until they do not change
59 (using a separate, optimistic hashtable for value numbering the SCC
60 operands).
62 The second is not just faster in practice (because most SSA graph
63 cycles do not involve all the variables in the graph), it also has
64 some nice properties.
66 One of these nice properties is that when we pop an SCC off the
67 stack, we are guaranteed to have processed all the operands coming from
68 *outside of that SCC*, so we do not need to do anything special to
69 ensure they have value numbers.
71 Another nice property is that the SCC walk is done as part of a DFS
72 of the SSA graph, which makes it easy to perform combining and
73 simplifying operations at the same time.
75 The code below is deliberately written in a way that makes it easy
76 to separate the SCC walk from the other work it does.
78 In order to propagate constants through the code, we track which
79 expressions contain constants, and use those while folding. In
80 theory, we could also track expressions whose value numbers are
81 replaced, in case we end up folding based on expression
82 identities.
84 In order to value number memory, we assign value numbers to vuses.
85 This enables us to note that, for example, stores to the same
86 address of the same value from the same starting memory states are
87 equivalent.
88 TODO:
90 1. We can iterate only the changing portions of the SCC's, but
91 I have not seen an SCC big enough for this to be a win.
92 2. If you differentiate between phi nodes for loops and phi nodes
93 for if-then-else, you can properly consider phi nodes in different
94 blocks for equivalence.
95 3. We could value number vuses in more cases, particularly, whole
96 structure copies.
99 /* The set of hashtables and alloc_pool's for their items. */
101 typedef struct vn_tables_s
103 htab_t nary;
104 htab_t phis;
105 htab_t references;
106 struct obstack nary_obstack;
107 alloc_pool phis_pool;
108 alloc_pool references_pool;
109 } *vn_tables_t;
111 static htab_t constant_to_value_id;
112 static bitmap constant_value_ids;
115 /* Valid hashtables storing information we have proven to be
116 correct. */
118 static vn_tables_t valid_info;
120 /* Optimistic hashtables storing information we are making assumptions about
121 during iterations. */
123 static vn_tables_t optimistic_info;
125 /* Pointer to the set of hashtables that is currently being used.
126 Should always point to either the optimistic_info, or the
127 valid_info. */
129 static vn_tables_t current_info;
132 /* Reverse post order index for each basic block. */
134 static int *rpo_numbers;
136 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
138 /* This represents the top of the VN lattice, which is the universal
139 value. */
141 tree VN_TOP;
143 /* Unique counter for our value ids. */
145 static unsigned int next_value_id;
147 /* Next DFS number and the stack for strongly connected component
148 detection. */
150 static unsigned int next_dfs_num;
151 static vec<tree> sccstack;
155 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
156 are allocated on an obstack for locality reasons, and to free them
157 without looping over the vec. */
159 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
160 static struct obstack vn_ssa_aux_obstack;
162 /* Return the value numbering information for a given SSA name. */
164 vn_ssa_aux_t
165 VN_INFO (tree name)
167 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
168 gcc_checking_assert (res);
169 return res;
172 /* Set the value numbering info for a given SSA name to a given
173 value. */
175 static inline void
176 VN_INFO_SET (tree name, vn_ssa_aux_t value)
178 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
181 /* Initialize the value numbering info for a given SSA name.
182 This should be called just once for every SSA name. */
184 vn_ssa_aux_t
185 VN_INFO_GET (tree name)
187 vn_ssa_aux_t newinfo;
189 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
190 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
191 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
192 vn_ssa_aux_table.safe_grow (SSA_NAME_VERSION (name) + 1);
193 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
194 return newinfo;
198 /* Get the representative expression for the SSA_NAME NAME. Returns
199 the representative SSA_NAME if there is no expression associated with it. */
201 tree
202 vn_get_expr_for (tree name)
204 vn_ssa_aux_t vn = VN_INFO (name);
205 gimple def_stmt;
206 tree expr = NULL_TREE;
207 enum tree_code code;
209 if (vn->valnum == VN_TOP)
210 return name;
212 /* If the value-number is a constant it is the representative
213 expression. */
214 if (TREE_CODE (vn->valnum) != SSA_NAME)
215 return vn->valnum;
217 /* Get to the information of the value of this SSA_NAME. */
218 vn = VN_INFO (vn->valnum);
220 /* If the value-number is a constant it is the representative
221 expression. */
222 if (TREE_CODE (vn->valnum) != SSA_NAME)
223 return vn->valnum;
225 /* Else if we have an expression, return it. */
226 if (vn->expr != NULL_TREE)
227 return vn->expr;
229 /* Otherwise use the defining statement to build the expression. */
230 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
232 /* If the value number is not an assignment use it directly. */
233 if (!is_gimple_assign (def_stmt))
234 return vn->valnum;
236 /* FIXME tuples. This is incomplete and likely will miss some
237 simplifications. */
238 code = gimple_assign_rhs_code (def_stmt);
239 switch (TREE_CODE_CLASS (code))
241 case tcc_reference:
242 if ((code == REALPART_EXPR
243 || code == IMAGPART_EXPR
244 || code == VIEW_CONVERT_EXPR)
245 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (def_stmt),
246 0)) == SSA_NAME)
247 expr = fold_build1 (code,
248 gimple_expr_type (def_stmt),
249 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
250 break;
252 case tcc_unary:
253 expr = fold_build1 (code,
254 gimple_expr_type (def_stmt),
255 gimple_assign_rhs1 (def_stmt));
256 break;
258 case tcc_binary:
259 expr = fold_build2 (code,
260 gimple_expr_type (def_stmt),
261 gimple_assign_rhs1 (def_stmt),
262 gimple_assign_rhs2 (def_stmt));
263 break;
265 case tcc_exceptional:
266 if (code == CONSTRUCTOR
267 && TREE_CODE
268 (TREE_TYPE (gimple_assign_rhs1 (def_stmt))) == VECTOR_TYPE)
269 expr = gimple_assign_rhs1 (def_stmt);
270 break;
272 default:;
274 if (expr == NULL_TREE)
275 return vn->valnum;
277 /* Cache the expression. */
278 vn->expr = expr;
280 return expr;
283 /* Return the vn_kind the expression computed by the stmt should be
284 associated with. */
286 enum vn_kind
287 vn_get_stmt_kind (gimple stmt)
289 switch (gimple_code (stmt))
291 case GIMPLE_CALL:
292 return VN_REFERENCE;
293 case GIMPLE_PHI:
294 return VN_PHI;
295 case GIMPLE_ASSIGN:
297 enum tree_code code = gimple_assign_rhs_code (stmt);
298 tree rhs1 = gimple_assign_rhs1 (stmt);
299 switch (get_gimple_rhs_class (code))
301 case GIMPLE_UNARY_RHS:
302 case GIMPLE_BINARY_RHS:
303 case GIMPLE_TERNARY_RHS:
304 return VN_NARY;
305 case GIMPLE_SINGLE_RHS:
306 switch (TREE_CODE_CLASS (code))
308 case tcc_reference:
309 /* VOP-less references can go through unary case. */
310 if ((code == REALPART_EXPR
311 || code == IMAGPART_EXPR
312 || code == VIEW_CONVERT_EXPR
313 || code == BIT_FIELD_REF)
314 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
315 return VN_NARY;
317 /* Fallthrough. */
318 case tcc_declaration:
319 return VN_REFERENCE;
321 case tcc_constant:
322 return VN_CONSTANT;
324 default:
325 if (code == ADDR_EXPR)
326 return (is_gimple_min_invariant (rhs1)
327 ? VN_CONSTANT : VN_REFERENCE);
328 else if (code == CONSTRUCTOR)
329 return VN_NARY;
330 return VN_NONE;
332 default:
333 return VN_NONE;
336 default:
337 return VN_NONE;
341 /* Free a phi operation structure VP. */
343 static void
344 free_phi (void *vp)
346 vn_phi_t phi = (vn_phi_t) vp;
347 phi->phiargs.release ();
350 /* Free a reference operation structure VP. */
352 static void
353 free_reference (void *vp)
355 vn_reference_t vr = (vn_reference_t) vp;
356 vr->operands.release ();
359 /* Hash table equality function for vn_constant_t. */
361 static int
362 vn_constant_eq (const void *p1, const void *p2)
364 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
365 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
367 if (vc1->hashcode != vc2->hashcode)
368 return false;
370 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
373 /* Hash table hash function for vn_constant_t. */
375 static hashval_t
376 vn_constant_hash (const void *p1)
378 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
379 return vc1->hashcode;
382 /* Lookup a value id for CONSTANT and return it. If it does not
383 exist returns 0. */
385 unsigned int
386 get_constant_value_id (tree constant)
388 void **slot;
389 struct vn_constant_s vc;
391 vc.hashcode = vn_hash_constant_with_type (constant);
392 vc.constant = constant;
393 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
394 vc.hashcode, NO_INSERT);
395 if (slot)
396 return ((vn_constant_t)*slot)->value_id;
397 return 0;
400 /* Lookup a value id for CONSTANT, and if it does not exist, create a
401 new one and return it. If it does exist, return it. */
403 unsigned int
404 get_or_alloc_constant_value_id (tree constant)
406 void **slot;
407 struct vn_constant_s vc;
408 vn_constant_t vcp;
410 vc.hashcode = vn_hash_constant_with_type (constant);
411 vc.constant = constant;
412 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
413 vc.hashcode, INSERT);
414 if (*slot)
415 return ((vn_constant_t)*slot)->value_id;
417 vcp = XNEW (struct vn_constant_s);
418 vcp->hashcode = vc.hashcode;
419 vcp->constant = constant;
420 vcp->value_id = get_next_value_id ();
421 *slot = (void *) vcp;
422 bitmap_set_bit (constant_value_ids, vcp->value_id);
423 return vcp->value_id;
426 /* Return true if V is a value id for a constant. */
428 bool
429 value_id_constant_p (unsigned int v)
431 return bitmap_bit_p (constant_value_ids, v);
434 /* Compare two reference operands P1 and P2 for equality. Return true if
435 they are equal, and false otherwise. */
437 static int
438 vn_reference_op_eq (const void *p1, const void *p2)
440 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
441 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
443 return (vro1->opcode == vro2->opcode
444 /* We do not care for differences in type qualification. */
445 && (vro1->type == vro2->type
446 || (vro1->type && vro2->type
447 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
448 TYPE_MAIN_VARIANT (vro2->type))))
449 && expressions_equal_p (vro1->op0, vro2->op0)
450 && expressions_equal_p (vro1->op1, vro2->op1)
451 && expressions_equal_p (vro1->op2, vro2->op2));
454 /* Compute the hash for a reference operand VRO1. */
456 static hashval_t
457 vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result)
459 result = iterative_hash_hashval_t (vro1->opcode, result);
460 if (vro1->op0)
461 result = iterative_hash_expr (vro1->op0, result);
462 if (vro1->op1)
463 result = iterative_hash_expr (vro1->op1, result);
464 if (vro1->op2)
465 result = iterative_hash_expr (vro1->op2, result);
466 return result;
469 /* Return the hashcode for a given reference operation P1. */
471 static hashval_t
472 vn_reference_hash (const void *p1)
474 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
475 return vr1->hashcode;
478 /* Compute a hash for the reference operation VR1 and return it. */
480 hashval_t
481 vn_reference_compute_hash (const vn_reference_t vr1)
483 hashval_t result = 0;
484 int i;
485 vn_reference_op_t vro;
486 HOST_WIDE_INT off = -1;
487 bool deref = false;
489 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
491 if (vro->opcode == MEM_REF)
492 deref = true;
493 else if (vro->opcode != ADDR_EXPR)
494 deref = false;
495 if (vro->off != -1)
497 if (off == -1)
498 off = 0;
499 off += vro->off;
501 else
503 if (off != -1
504 && off != 0)
505 result = iterative_hash_hashval_t (off, result);
506 off = -1;
507 if (deref
508 && vro->opcode == ADDR_EXPR)
510 if (vro->op0)
512 tree op = TREE_OPERAND (vro->op0, 0);
513 result = iterative_hash_hashval_t (TREE_CODE (op), result);
514 result = iterative_hash_expr (op, result);
517 else
518 result = vn_reference_op_compute_hash (vro, result);
521 if (vr1->vuse)
522 result += SSA_NAME_VERSION (vr1->vuse);
524 return result;
527 /* Return true if reference operations P1 and P2 are equivalent. This
528 means they have the same set of operands and vuses. */
531 vn_reference_eq (const void *p1, const void *p2)
533 unsigned i, j;
535 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
536 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
537 if (vr1->hashcode != vr2->hashcode)
538 return false;
540 /* Early out if this is not a hash collision. */
541 if (vr1->hashcode != vr2->hashcode)
542 return false;
544 /* The VOP needs to be the same. */
545 if (vr1->vuse != vr2->vuse)
546 return false;
548 /* If the operands are the same we are done. */
549 if (vr1->operands == vr2->operands)
550 return true;
552 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
553 return false;
555 if (INTEGRAL_TYPE_P (vr1->type)
556 && INTEGRAL_TYPE_P (vr2->type))
558 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
559 return false;
561 else if (INTEGRAL_TYPE_P (vr1->type)
562 && (TYPE_PRECISION (vr1->type)
563 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
564 return false;
565 else if (INTEGRAL_TYPE_P (vr2->type)
566 && (TYPE_PRECISION (vr2->type)
567 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
568 return false;
570 i = 0;
571 j = 0;
574 HOST_WIDE_INT off1 = 0, off2 = 0;
575 vn_reference_op_t vro1, vro2;
576 vn_reference_op_s tem1, tem2;
577 bool deref1 = false, deref2 = false;
578 for (; vr1->operands.iterate (i, &vro1); i++)
580 if (vro1->opcode == MEM_REF)
581 deref1 = true;
582 if (vro1->off == -1)
583 break;
584 off1 += vro1->off;
586 for (; vr2->operands.iterate (j, &vro2); j++)
588 if (vro2->opcode == MEM_REF)
589 deref2 = true;
590 if (vro2->off == -1)
591 break;
592 off2 += vro2->off;
594 if (off1 != off2)
595 return false;
596 if (deref1 && vro1->opcode == ADDR_EXPR)
598 memset (&tem1, 0, sizeof (tem1));
599 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
600 tem1.type = TREE_TYPE (tem1.op0);
601 tem1.opcode = TREE_CODE (tem1.op0);
602 vro1 = &tem1;
603 deref1 = false;
605 if (deref2 && vro2->opcode == ADDR_EXPR)
607 memset (&tem2, 0, sizeof (tem2));
608 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
609 tem2.type = TREE_TYPE (tem2.op0);
610 tem2.opcode = TREE_CODE (tem2.op0);
611 vro2 = &tem2;
612 deref2 = false;
614 if (deref1 != deref2)
615 return false;
616 if (!vn_reference_op_eq (vro1, vro2))
617 return false;
618 ++j;
619 ++i;
621 while (vr1->operands.length () != i
622 || vr2->operands.length () != j);
624 return true;
627 /* Copy the operations present in load/store REF into RESULT, a vector of
628 vn_reference_op_s's. */
630 void
631 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
633 if (TREE_CODE (ref) == TARGET_MEM_REF)
635 vn_reference_op_s temp;
637 memset (&temp, 0, sizeof (temp));
638 temp.type = TREE_TYPE (ref);
639 temp.opcode = TREE_CODE (ref);
640 temp.op0 = TMR_INDEX (ref);
641 temp.op1 = TMR_STEP (ref);
642 temp.op2 = TMR_OFFSET (ref);
643 temp.off = -1;
644 result->safe_push (temp);
646 memset (&temp, 0, sizeof (temp));
647 temp.type = NULL_TREE;
648 temp.opcode = ERROR_MARK;
649 temp.op0 = TMR_INDEX2 (ref);
650 temp.off = -1;
651 result->safe_push (temp);
653 memset (&temp, 0, sizeof (temp));
654 temp.type = NULL_TREE;
655 temp.opcode = TREE_CODE (TMR_BASE (ref));
656 temp.op0 = TMR_BASE (ref);
657 temp.off = -1;
658 result->safe_push (temp);
659 return;
662 /* For non-calls, store the information that makes up the address. */
663 tree orig = ref;
664 while (ref)
666 vn_reference_op_s temp;
668 memset (&temp, 0, sizeof (temp));
669 temp.type = TREE_TYPE (ref);
670 temp.opcode = TREE_CODE (ref);
671 temp.off = -1;
673 switch (temp.opcode)
675 case MODIFY_EXPR:
676 temp.op0 = TREE_OPERAND (ref, 1);
677 break;
678 case WITH_SIZE_EXPR:
679 temp.op0 = TREE_OPERAND (ref, 1);
680 temp.off = 0;
681 break;
682 case MEM_REF:
683 /* The base address gets its own vn_reference_op_s structure. */
684 temp.op0 = TREE_OPERAND (ref, 1);
685 if (host_integerp (TREE_OPERAND (ref, 1), 0))
686 temp.off = TREE_INT_CST_LOW (TREE_OPERAND (ref, 1));
687 break;
688 case BIT_FIELD_REF:
689 /* Record bits and position. */
690 temp.op0 = TREE_OPERAND (ref, 1);
691 temp.op1 = TREE_OPERAND (ref, 2);
692 break;
693 case COMPONENT_REF:
694 /* The field decl is enough to unambiguously specify the field,
695 a matching type is not necessary and a mismatching type
696 is always a spurious difference. */
697 temp.type = NULL_TREE;
698 temp.op0 = TREE_OPERAND (ref, 1);
699 temp.op1 = TREE_OPERAND (ref, 2);
701 tree this_offset = component_ref_field_offset (ref);
702 if (this_offset
703 && TREE_CODE (this_offset) == INTEGER_CST)
705 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
706 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
708 double_int off
709 = tree_to_double_int (this_offset)
710 + tree_to_double_int (bit_offset)
711 .arshift (BITS_PER_UNIT == 8
712 ? 3 : exact_log2 (BITS_PER_UNIT),
713 HOST_BITS_PER_DOUBLE_INT);
714 if (off.fits_shwi ()
715 /* Probibit value-numbering zero offset components
716 of addresses the same before the pass folding
717 __builtin_object_size had a chance to run
718 (checking cfun->after_inlining does the
719 trick here). */
720 && (TREE_CODE (orig) != ADDR_EXPR
721 || !off.is_zero ()
722 || cfun->after_inlining))
723 temp.off = off.low;
727 break;
728 case ARRAY_RANGE_REF:
729 case ARRAY_REF:
730 /* Record index as operand. */
731 temp.op0 = TREE_OPERAND (ref, 1);
732 /* Always record lower bounds and element size. */
733 temp.op1 = array_ref_low_bound (ref);
734 temp.op2 = array_ref_element_size (ref);
735 if (TREE_CODE (temp.op0) == INTEGER_CST
736 && TREE_CODE (temp.op1) == INTEGER_CST
737 && TREE_CODE (temp.op2) == INTEGER_CST)
739 double_int off = tree_to_double_int (temp.op0);
740 off += -tree_to_double_int (temp.op1);
741 off *= tree_to_double_int (temp.op2);
742 if (off.fits_shwi ())
743 temp.off = off.low;
745 break;
746 case VAR_DECL:
747 if (DECL_HARD_REGISTER (ref))
749 temp.op0 = ref;
750 break;
752 /* Fallthru. */
753 case PARM_DECL:
754 case CONST_DECL:
755 case RESULT_DECL:
756 /* Canonicalize decls to MEM[&decl] which is what we end up with
757 when valueizing MEM[ptr] with ptr = &decl. */
758 temp.opcode = MEM_REF;
759 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
760 temp.off = 0;
761 result->safe_push (temp);
762 temp.opcode = ADDR_EXPR;
763 temp.op0 = build_fold_addr_expr (ref);
764 temp.type = TREE_TYPE (temp.op0);
765 temp.off = -1;
766 break;
767 case STRING_CST:
768 case INTEGER_CST:
769 case COMPLEX_CST:
770 case VECTOR_CST:
771 case REAL_CST:
772 case FIXED_CST:
773 case CONSTRUCTOR:
774 case SSA_NAME:
775 temp.op0 = ref;
776 break;
777 case ADDR_EXPR:
778 if (is_gimple_min_invariant (ref))
780 temp.op0 = ref;
781 break;
783 /* Fallthrough. */
784 /* These are only interesting for their operands, their
785 existence, and their type. They will never be the last
786 ref in the chain of references (IE they require an
787 operand), so we don't have to put anything
788 for op* as it will be handled by the iteration */
789 case REALPART_EXPR:
790 case VIEW_CONVERT_EXPR:
791 temp.off = 0;
792 break;
793 case IMAGPART_EXPR:
794 /* This is only interesting for its constant offset. */
795 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
796 break;
797 default:
798 gcc_unreachable ();
800 result->safe_push (temp);
802 if (REFERENCE_CLASS_P (ref)
803 || TREE_CODE (ref) == MODIFY_EXPR
804 || TREE_CODE (ref) == WITH_SIZE_EXPR
805 || (TREE_CODE (ref) == ADDR_EXPR
806 && !is_gimple_min_invariant (ref)))
807 ref = TREE_OPERAND (ref, 0);
808 else
809 ref = NULL_TREE;
813 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
814 operands in *OPS, the reference alias set SET and the reference type TYPE.
815 Return true if something useful was produced. */
817 bool
818 ao_ref_init_from_vn_reference (ao_ref *ref,
819 alias_set_type set, tree type,
820 vec<vn_reference_op_s> ops)
822 vn_reference_op_t op;
823 unsigned i;
824 tree base = NULL_TREE;
825 tree *op0_p = &base;
826 HOST_WIDE_INT offset = 0;
827 HOST_WIDE_INT max_size;
828 HOST_WIDE_INT size = -1;
829 tree size_tree = NULL_TREE;
830 alias_set_type base_alias_set = -1;
832 /* First get the final access size from just the outermost expression. */
833 op = &ops[0];
834 if (op->opcode == COMPONENT_REF)
835 size_tree = DECL_SIZE (op->op0);
836 else if (op->opcode == BIT_FIELD_REF)
837 size_tree = op->op0;
838 else
840 enum machine_mode mode = TYPE_MODE (type);
841 if (mode == BLKmode)
842 size_tree = TYPE_SIZE (type);
843 else
844 size = GET_MODE_BITSIZE (mode);
846 if (size_tree != NULL_TREE)
848 if (!host_integerp (size_tree, 1))
849 size = -1;
850 else
851 size = TREE_INT_CST_LOW (size_tree);
854 /* Initially, maxsize is the same as the accessed element size.
855 In the following it will only grow (or become -1). */
856 max_size = size;
858 /* Compute cumulative bit-offset for nested component-refs and array-refs,
859 and find the ultimate containing object. */
860 FOR_EACH_VEC_ELT (ops, i, op)
862 switch (op->opcode)
864 /* These may be in the reference ops, but we cannot do anything
865 sensible with them here. */
866 case ADDR_EXPR:
867 /* Apart from ADDR_EXPR arguments to MEM_REF. */
868 if (base != NULL_TREE
869 && TREE_CODE (base) == MEM_REF
870 && op->op0
871 && DECL_P (TREE_OPERAND (op->op0, 0)))
873 vn_reference_op_t pop = &ops[i-1];
874 base = TREE_OPERAND (op->op0, 0);
875 if (pop->off == -1)
877 max_size = -1;
878 offset = 0;
880 else
881 offset += pop->off * BITS_PER_UNIT;
882 op0_p = NULL;
883 break;
885 /* Fallthru. */
886 case CALL_EXPR:
887 return false;
889 /* Record the base objects. */
890 case MEM_REF:
891 base_alias_set = get_deref_alias_set (op->op0);
892 *op0_p = build2 (MEM_REF, op->type,
893 NULL_TREE, op->op0);
894 op0_p = &TREE_OPERAND (*op0_p, 0);
895 break;
897 case VAR_DECL:
898 case PARM_DECL:
899 case RESULT_DECL:
900 case SSA_NAME:
901 *op0_p = op->op0;
902 op0_p = NULL;
903 break;
905 /* And now the usual component-reference style ops. */
906 case BIT_FIELD_REF:
907 offset += tree_low_cst (op->op1, 0);
908 break;
910 case COMPONENT_REF:
912 tree field = op->op0;
913 /* We do not have a complete COMPONENT_REF tree here so we
914 cannot use component_ref_field_offset. Do the interesting
915 parts manually. */
917 if (op->op1
918 || !host_integerp (DECL_FIELD_OFFSET (field), 1))
919 max_size = -1;
920 else
922 offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
923 * BITS_PER_UNIT);
924 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
926 break;
929 case ARRAY_RANGE_REF:
930 case ARRAY_REF:
931 /* We recorded the lower bound and the element size. */
932 if (!host_integerp (op->op0, 0)
933 || !host_integerp (op->op1, 0)
934 || !host_integerp (op->op2, 0))
935 max_size = -1;
936 else
938 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
939 hindex -= TREE_INT_CST_LOW (op->op1);
940 hindex *= TREE_INT_CST_LOW (op->op2);
941 hindex *= BITS_PER_UNIT;
942 offset += hindex;
944 break;
946 case REALPART_EXPR:
947 break;
949 case IMAGPART_EXPR:
950 offset += size;
951 break;
953 case VIEW_CONVERT_EXPR:
954 break;
956 case STRING_CST:
957 case INTEGER_CST:
958 case COMPLEX_CST:
959 case VECTOR_CST:
960 case REAL_CST:
961 case CONSTRUCTOR:
962 case CONST_DECL:
963 return false;
965 default:
966 return false;
970 if (base == NULL_TREE)
971 return false;
973 ref->ref = NULL_TREE;
974 ref->base = base;
975 ref->offset = offset;
976 ref->size = size;
977 ref->max_size = max_size;
978 ref->ref_alias_set = set;
979 if (base_alias_set != -1)
980 ref->base_alias_set = base_alias_set;
981 else
982 ref->base_alias_set = get_alias_set (base);
983 /* We discount volatiles from value-numbering elsewhere. */
984 ref->volatile_p = false;
986 return true;
989 /* Copy the operations present in load/store/call REF into RESULT, a vector of
990 vn_reference_op_s's. */
992 void
993 copy_reference_ops_from_call (gimple call,
994 vec<vn_reference_op_s> *result)
996 vn_reference_op_s temp;
997 unsigned i;
998 tree lhs = gimple_call_lhs (call);
1000 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1001 different. By adding the lhs here in the vector, we ensure that the
1002 hashcode is different, guaranteeing a different value number. */
1003 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1005 memset (&temp, 0, sizeof (temp));
1006 temp.opcode = MODIFY_EXPR;
1007 temp.type = TREE_TYPE (lhs);
1008 temp.op0 = lhs;
1009 temp.off = -1;
1010 result->safe_push (temp);
1013 /* Copy the type, opcode, function being called and static chain. */
1014 memset (&temp, 0, sizeof (temp));
1015 temp.type = gimple_call_return_type (call);
1016 temp.opcode = CALL_EXPR;
1017 temp.op0 = gimple_call_fn (call);
1018 temp.op1 = gimple_call_chain (call);
1019 temp.off = -1;
1020 result->safe_push (temp);
1022 /* Copy the call arguments. As they can be references as well,
1023 just chain them together. */
1024 for (i = 0; i < gimple_call_num_args (call); ++i)
1026 tree callarg = gimple_call_arg (call, i);
1027 copy_reference_ops_from_ref (callarg, result);
1031 /* Create a vector of vn_reference_op_s structures from REF, a
1032 REFERENCE_CLASS_P tree. The vector is not shared. */
1034 static vec<vn_reference_op_s>
1035 create_reference_ops_from_ref (tree ref)
1037 vec<vn_reference_op_s> result = vNULL;
1039 copy_reference_ops_from_ref (ref, &result);
1040 return result;
1043 /* Create a vector of vn_reference_op_s structures from CALL, a
1044 call statement. The vector is not shared. */
1046 static vec<vn_reference_op_s>
1047 create_reference_ops_from_call (gimple call)
1049 vec<vn_reference_op_s> result = vNULL;
1051 copy_reference_ops_from_call (call, &result);
1052 return result;
1055 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1056 *I_P to point to the last element of the replacement. */
1057 void
1058 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1059 unsigned int *i_p)
1061 unsigned int i = *i_p;
1062 vn_reference_op_t op = &(*ops)[i];
1063 vn_reference_op_t mem_op = &(*ops)[i - 1];
1064 tree addr_base;
1065 HOST_WIDE_INT addr_offset = 0;
1067 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1068 from .foo.bar to the preceding MEM_REF offset and replace the
1069 address with &OBJ. */
1070 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1071 &addr_offset);
1072 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1073 if (addr_base != TREE_OPERAND (op->op0, 0))
1075 double_int off = tree_to_double_int (mem_op->op0);
1076 off = off.sext (TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1077 off += double_int::from_shwi (addr_offset);
1078 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1079 op->op0 = build_fold_addr_expr (addr_base);
1080 if (host_integerp (mem_op->op0, 0))
1081 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1082 else
1083 mem_op->off = -1;
1087 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1088 *I_P to point to the last element of the replacement. */
1089 static void
1090 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1091 unsigned int *i_p)
1093 unsigned int i = *i_p;
1094 vn_reference_op_t op = &(*ops)[i];
1095 vn_reference_op_t mem_op = &(*ops)[i - 1];
1096 gimple def_stmt;
1097 enum tree_code code;
1098 double_int off;
1100 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1101 if (!is_gimple_assign (def_stmt))
1102 return;
1104 code = gimple_assign_rhs_code (def_stmt);
1105 if (code != ADDR_EXPR
1106 && code != POINTER_PLUS_EXPR)
1107 return;
1109 off = tree_to_double_int (mem_op->op0);
1110 off = off.sext (TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1112 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1113 from .foo.bar to the preceding MEM_REF offset and replace the
1114 address with &OBJ. */
1115 if (code == ADDR_EXPR)
1117 tree addr, addr_base;
1118 HOST_WIDE_INT addr_offset;
1120 addr = gimple_assign_rhs1 (def_stmt);
1121 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1122 &addr_offset);
1123 if (!addr_base
1124 || TREE_CODE (addr_base) != MEM_REF)
1125 return;
1127 off += double_int::from_shwi (addr_offset);
1128 off += mem_ref_offset (addr_base);
1129 op->op0 = TREE_OPERAND (addr_base, 0);
1131 else
1133 tree ptr, ptroff;
1134 ptr = gimple_assign_rhs1 (def_stmt);
1135 ptroff = gimple_assign_rhs2 (def_stmt);
1136 if (TREE_CODE (ptr) != SSA_NAME
1137 || TREE_CODE (ptroff) != INTEGER_CST)
1138 return;
1140 off += tree_to_double_int (ptroff);
1141 op->op0 = ptr;
1144 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1145 if (host_integerp (mem_op->op0, 0))
1146 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1147 else
1148 mem_op->off = -1;
1149 if (TREE_CODE (op->op0) == SSA_NAME)
1150 op->op0 = SSA_VAL (op->op0);
1151 if (TREE_CODE (op->op0) != SSA_NAME)
1152 op->opcode = TREE_CODE (op->op0);
1154 /* And recurse. */
1155 if (TREE_CODE (op->op0) == SSA_NAME)
1156 vn_reference_maybe_forwprop_address (ops, i_p);
1157 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1158 vn_reference_fold_indirect (ops, i_p);
1161 /* Optimize the reference REF to a constant if possible or return
1162 NULL_TREE if not. */
1164 tree
1165 fully_constant_vn_reference_p (vn_reference_t ref)
1167 vec<vn_reference_op_s> operands = ref->operands;
1168 vn_reference_op_t op;
1170 /* Try to simplify the translated expression if it is
1171 a call to a builtin function with at most two arguments. */
1172 op = &operands[0];
1173 if (op->opcode == CALL_EXPR
1174 && TREE_CODE (op->op0) == ADDR_EXPR
1175 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1176 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1177 && operands.length () >= 2
1178 && operands.length () <= 3)
1180 vn_reference_op_t arg0, arg1 = NULL;
1181 bool anyconst = false;
1182 arg0 = &operands[1];
1183 if (operands.length () > 2)
1184 arg1 = &operands[2];
1185 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1186 || (arg0->opcode == ADDR_EXPR
1187 && is_gimple_min_invariant (arg0->op0)))
1188 anyconst = true;
1189 if (arg1
1190 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1191 || (arg1->opcode == ADDR_EXPR
1192 && is_gimple_min_invariant (arg1->op0))))
1193 anyconst = true;
1194 if (anyconst)
1196 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1197 arg1 ? 2 : 1,
1198 arg0->op0,
1199 arg1 ? arg1->op0 : NULL);
1200 if (folded
1201 && TREE_CODE (folded) == NOP_EXPR)
1202 folded = TREE_OPERAND (folded, 0);
1203 if (folded
1204 && is_gimple_min_invariant (folded))
1205 return folded;
1209 /* Simplify reads from constant strings. */
1210 else if (op->opcode == ARRAY_REF
1211 && TREE_CODE (op->op0) == INTEGER_CST
1212 && integer_zerop (op->op1)
1213 && operands.length () == 2)
1215 vn_reference_op_t arg0;
1216 arg0 = &operands[1];
1217 if (arg0->opcode == STRING_CST
1218 && (TYPE_MODE (op->type)
1219 == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
1220 && GET_MODE_CLASS (TYPE_MODE (op->type)) == MODE_INT
1221 && GET_MODE_SIZE (TYPE_MODE (op->type)) == 1
1222 && tree_int_cst_sgn (op->op0) >= 0
1223 && compare_tree_int (op->op0, TREE_STRING_LENGTH (arg0->op0)) < 0)
1224 return build_int_cst_type (op->type,
1225 (TREE_STRING_POINTER (arg0->op0)
1226 [TREE_INT_CST_LOW (op->op0)]));
1229 return NULL_TREE;
1232 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1233 structures into their value numbers. This is done in-place, and
1234 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1235 whether any operands were valueized. */
1237 static vec<vn_reference_op_s>
1238 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1240 vn_reference_op_t vro;
1241 unsigned int i;
1243 *valueized_anything = false;
1245 FOR_EACH_VEC_ELT (orig, i, vro)
1247 if (vro->opcode == SSA_NAME
1248 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1250 tree tem = SSA_VAL (vro->op0);
1251 if (tem != vro->op0)
1253 *valueized_anything = true;
1254 vro->op0 = tem;
1256 /* If it transforms from an SSA_NAME to a constant, update
1257 the opcode. */
1258 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1259 vro->opcode = TREE_CODE (vro->op0);
1261 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1263 tree tem = SSA_VAL (vro->op1);
1264 if (tem != vro->op1)
1266 *valueized_anything = true;
1267 vro->op1 = tem;
1270 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1272 tree tem = SSA_VAL (vro->op2);
1273 if (tem != vro->op2)
1275 *valueized_anything = true;
1276 vro->op2 = tem;
1279 /* If it transforms from an SSA_NAME to an address, fold with
1280 a preceding indirect reference. */
1281 if (i > 0
1282 && vro->op0
1283 && TREE_CODE (vro->op0) == ADDR_EXPR
1284 && orig[i - 1].opcode == MEM_REF)
1285 vn_reference_fold_indirect (&orig, &i);
1286 else if (i > 0
1287 && vro->opcode == SSA_NAME
1288 && orig[i - 1].opcode == MEM_REF)
1289 vn_reference_maybe_forwprop_address (&orig, &i);
1290 /* If it transforms a non-constant ARRAY_REF into a constant
1291 one, adjust the constant offset. */
1292 else if (vro->opcode == ARRAY_REF
1293 && vro->off == -1
1294 && TREE_CODE (vro->op0) == INTEGER_CST
1295 && TREE_CODE (vro->op1) == INTEGER_CST
1296 && TREE_CODE (vro->op2) == INTEGER_CST)
1298 double_int off = tree_to_double_int (vro->op0);
1299 off += -tree_to_double_int (vro->op1);
1300 off *= tree_to_double_int (vro->op2);
1301 if (off.fits_shwi ())
1302 vro->off = off.low;
1306 return orig;
1309 static vec<vn_reference_op_s>
1310 valueize_refs (vec<vn_reference_op_s> orig)
1312 bool tem;
1313 return valueize_refs_1 (orig, &tem);
1316 static vec<vn_reference_op_s> shared_lookup_references;
1318 /* Create a vector of vn_reference_op_s structures from REF, a
1319 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1320 this function. *VALUEIZED_ANYTHING will specify whether any
1321 operands were valueized. */
1323 static vec<vn_reference_op_s>
1324 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1326 if (!ref)
1327 return vNULL;
1328 shared_lookup_references.truncate (0);
1329 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1330 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1331 valueized_anything);
1332 return shared_lookup_references;
1335 /* Create a vector of vn_reference_op_s structures from CALL, a
1336 call statement. The vector is shared among all callers of
1337 this function. */
1339 static vec<vn_reference_op_s>
1340 valueize_shared_reference_ops_from_call (gimple call)
1342 if (!call)
1343 return vNULL;
1344 shared_lookup_references.truncate (0);
1345 copy_reference_ops_from_call (call, &shared_lookup_references);
1346 shared_lookup_references = valueize_refs (shared_lookup_references);
1347 return shared_lookup_references;
1350 /* Lookup a SCCVN reference operation VR in the current hash table.
1351 Returns the resulting value number if it exists in the hash table,
1352 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1353 vn_reference_t stored in the hashtable if something is found. */
1355 static tree
1356 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1358 void **slot;
1359 hashval_t hash;
1361 hash = vr->hashcode;
1362 slot = htab_find_slot_with_hash (current_info->references, vr,
1363 hash, NO_INSERT);
1364 if (!slot && current_info == optimistic_info)
1365 slot = htab_find_slot_with_hash (valid_info->references, vr,
1366 hash, NO_INSERT);
1367 if (slot)
1369 if (vnresult)
1370 *vnresult = (vn_reference_t)*slot;
1371 return ((vn_reference_t)*slot)->result;
1374 return NULL_TREE;
1377 static tree *last_vuse_ptr;
1378 static vn_lookup_kind vn_walk_kind;
1379 static vn_lookup_kind default_vn_walk_kind;
1381 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1382 with the current VUSE and performs the expression lookup. */
1384 static void *
1385 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1386 unsigned int cnt, void *vr_)
1388 vn_reference_t vr = (vn_reference_t)vr_;
1389 void **slot;
1390 hashval_t hash;
1392 /* This bounds the stmt walks we perform on reference lookups
1393 to O(1) instead of O(N) where N is the number of dominating
1394 stores. */
1395 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1396 return (void *)-1;
1398 if (last_vuse_ptr)
1399 *last_vuse_ptr = vuse;
1401 /* Fixup vuse and hash. */
1402 if (vr->vuse)
1403 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1404 vr->vuse = SSA_VAL (vuse);
1405 if (vr->vuse)
1406 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1408 hash = vr->hashcode;
1409 slot = htab_find_slot_with_hash (current_info->references, vr,
1410 hash, NO_INSERT);
1411 if (!slot && current_info == optimistic_info)
1412 slot = htab_find_slot_with_hash (valid_info->references, vr,
1413 hash, NO_INSERT);
1414 if (slot)
1415 return *slot;
1417 return NULL;
1420 /* Lookup an existing or insert a new vn_reference entry into the
1421 value table for the VUSE, SET, TYPE, OPERANDS reference which
1422 has the value VALUE which is either a constant or an SSA name. */
1424 static vn_reference_t
1425 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1426 alias_set_type set,
1427 tree type,
1428 vec<vn_reference_op_s,
1429 va_heap> operands,
1430 tree value)
1432 struct vn_reference_s vr1;
1433 vn_reference_t result;
1434 unsigned value_id;
1435 vr1.vuse = vuse;
1436 vr1.operands = operands;
1437 vr1.type = type;
1438 vr1.set = set;
1439 vr1.hashcode = vn_reference_compute_hash (&vr1);
1440 if (vn_reference_lookup_1 (&vr1, &result))
1441 return result;
1442 if (TREE_CODE (value) == SSA_NAME)
1443 value_id = VN_INFO (value)->value_id;
1444 else
1445 value_id = get_or_alloc_constant_value_id (value);
1446 return vn_reference_insert_pieces (vuse, set, type,
1447 operands.copy (), value, value_id);
1450 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1451 from the statement defining VUSE and if not successful tries to
1452 translate *REFP and VR_ through an aggregate copy at the definition
1453 of VUSE. */
1455 static void *
1456 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
1458 vn_reference_t vr = (vn_reference_t)vr_;
1459 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1460 tree base;
1461 HOST_WIDE_INT offset, maxsize;
1462 static vec<vn_reference_op_s>
1463 lhs_ops = vNULL;
1464 ao_ref lhs_ref;
1465 bool lhs_ref_ok = false;
1467 /* First try to disambiguate after value-replacing in the definitions LHS. */
1468 if (is_gimple_assign (def_stmt))
1470 vec<vn_reference_op_s> tem;
1471 tree lhs = gimple_assign_lhs (def_stmt);
1472 bool valueized_anything = false;
1473 /* Avoid re-allocation overhead. */
1474 lhs_ops.truncate (0);
1475 copy_reference_ops_from_ref (lhs, &lhs_ops);
1476 tem = lhs_ops;
1477 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1478 gcc_assert (lhs_ops == tem);
1479 if (valueized_anything)
1481 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1482 get_alias_set (lhs),
1483 TREE_TYPE (lhs), lhs_ops);
1484 if (lhs_ref_ok
1485 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1486 return NULL;
1488 else
1490 ao_ref_init (&lhs_ref, lhs);
1491 lhs_ref_ok = true;
1495 base = ao_ref_base (ref);
1496 offset = ref->offset;
1497 maxsize = ref->max_size;
1499 /* If we cannot constrain the size of the reference we cannot
1500 test if anything kills it. */
1501 if (maxsize == -1)
1502 return (void *)-1;
1504 /* We can't deduce anything useful from clobbers. */
1505 if (gimple_clobber_p (def_stmt))
1506 return (void *)-1;
1508 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1509 from that definition.
1510 1) Memset. */
1511 if (is_gimple_reg_type (vr->type)
1512 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1513 && integer_zerop (gimple_call_arg (def_stmt, 1))
1514 && host_integerp (gimple_call_arg (def_stmt, 2), 1)
1515 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1517 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1518 tree base2;
1519 HOST_WIDE_INT offset2, size2, maxsize2;
1520 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1521 size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
1522 if ((unsigned HOST_WIDE_INT)size2 / 8
1523 == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
1524 && maxsize2 != -1
1525 && operand_equal_p (base, base2, 0)
1526 && offset2 <= offset
1527 && offset2 + size2 >= offset + maxsize)
1529 tree val = build_zero_cst (vr->type);
1530 return vn_reference_lookup_or_insert_for_pieces
1531 (vuse, vr->set, vr->type, vr->operands, val);
1535 /* 2) Assignment from an empty CONSTRUCTOR. */
1536 else if (is_gimple_reg_type (vr->type)
1537 && gimple_assign_single_p (def_stmt)
1538 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1539 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1541 tree base2;
1542 HOST_WIDE_INT offset2, size2, maxsize2;
1543 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1544 &offset2, &size2, &maxsize2);
1545 if (maxsize2 != -1
1546 && operand_equal_p (base, base2, 0)
1547 && offset2 <= offset
1548 && offset2 + size2 >= offset + maxsize)
1550 tree val = build_zero_cst (vr->type);
1551 return vn_reference_lookup_or_insert_for_pieces
1552 (vuse, vr->set, vr->type, vr->operands, val);
1556 /* 3) Assignment from a constant. We can use folds native encode/interpret
1557 routines to extract the assigned bits. */
1558 else if (vn_walk_kind == VN_WALKREWRITE
1559 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1560 && ref->size == maxsize
1561 && maxsize % BITS_PER_UNIT == 0
1562 && offset % BITS_PER_UNIT == 0
1563 && is_gimple_reg_type (vr->type)
1564 && gimple_assign_single_p (def_stmt)
1565 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
1567 tree base2;
1568 HOST_WIDE_INT offset2, size2, maxsize2;
1569 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1570 &offset2, &size2, &maxsize2);
1571 if (maxsize2 != -1
1572 && maxsize2 == size2
1573 && size2 % BITS_PER_UNIT == 0
1574 && offset2 % BITS_PER_UNIT == 0
1575 && operand_equal_p (base, base2, 0)
1576 && offset2 <= offset
1577 && offset2 + size2 >= offset + maxsize)
1579 /* We support up to 512-bit values (for V8DFmode). */
1580 unsigned char buffer[64];
1581 int len;
1583 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1584 buffer, sizeof (buffer));
1585 if (len > 0)
1587 tree val = native_interpret_expr (vr->type,
1588 buffer
1589 + ((offset - offset2)
1590 / BITS_PER_UNIT),
1591 ref->size / BITS_PER_UNIT);
1592 if (val)
1593 return vn_reference_lookup_or_insert_for_pieces
1594 (vuse, vr->set, vr->type, vr->operands, val);
1599 /* 4) Assignment from an SSA name which definition we may be able
1600 to access pieces from. */
1601 else if (ref->size == maxsize
1602 && is_gimple_reg_type (vr->type)
1603 && gimple_assign_single_p (def_stmt)
1604 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
1606 tree rhs1 = gimple_assign_rhs1 (def_stmt);
1607 gimple def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
1608 if (is_gimple_assign (def_stmt2)
1609 && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
1610 || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
1611 && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1))))
1613 tree base2;
1614 HOST_WIDE_INT offset2, size2, maxsize2, off;
1615 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1616 &offset2, &size2, &maxsize2);
1617 off = offset - offset2;
1618 if (maxsize2 != -1
1619 && maxsize2 == size2
1620 && operand_equal_p (base, base2, 0)
1621 && offset2 <= offset
1622 && offset2 + size2 >= offset + maxsize)
1624 tree val = NULL_TREE;
1625 HOST_WIDE_INT elsz
1626 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1))));
1627 if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR)
1629 if (off == 0)
1630 val = gimple_assign_rhs1 (def_stmt2);
1631 else if (off == elsz)
1632 val = gimple_assign_rhs2 (def_stmt2);
1634 else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR
1635 && off % elsz == 0)
1637 tree ctor = gimple_assign_rhs1 (def_stmt2);
1638 unsigned i = off / elsz;
1639 if (i < CONSTRUCTOR_NELTS (ctor))
1641 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
1642 if (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
1644 if (TREE_CODE (TREE_TYPE (elt->value))
1645 != VECTOR_TYPE)
1646 val = elt->value;
1650 if (val)
1651 return vn_reference_lookup_or_insert_for_pieces
1652 (vuse, vr->set, vr->type, vr->operands, val);
1657 /* 5) For aggregate copies translate the reference through them if
1658 the copy kills ref. */
1659 else if (vn_walk_kind == VN_WALKREWRITE
1660 && gimple_assign_single_p (def_stmt)
1661 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1662 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1663 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1665 tree base2;
1666 HOST_WIDE_INT offset2, size2, maxsize2;
1667 int i, j;
1668 vec<vn_reference_op_s>
1669 rhs = vNULL;
1670 vn_reference_op_t vro;
1671 ao_ref r;
1673 if (!lhs_ref_ok)
1674 return (void *)-1;
1676 /* See if the assignment kills REF. */
1677 base2 = ao_ref_base (&lhs_ref);
1678 offset2 = lhs_ref.offset;
1679 size2 = lhs_ref.size;
1680 maxsize2 = lhs_ref.max_size;
1681 if (maxsize2 == -1
1682 || (base != base2 && !operand_equal_p (base, base2, 0))
1683 || offset2 > offset
1684 || offset2 + size2 < offset + maxsize)
1685 return (void *)-1;
1687 /* Find the common base of ref and the lhs. lhs_ops already
1688 contains valueized operands for the lhs. */
1689 i = vr->operands.length () - 1;
1690 j = lhs_ops.length () - 1;
1691 while (j >= 0 && i >= 0
1692 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
1694 i--;
1695 j--;
1698 /* ??? The innermost op should always be a MEM_REF and we already
1699 checked that the assignment to the lhs kills vr. Thus for
1700 aggregate copies using char[] types the vn_reference_op_eq
1701 may fail when comparing types for compatibility. But we really
1702 don't care here - further lookups with the rewritten operands
1703 will simply fail if we messed up types too badly. */
1704 if (j == 0 && i >= 0
1705 && lhs_ops[0].opcode == MEM_REF
1706 && lhs_ops[0].off != -1
1707 && (lhs_ops[0].off == vr->operands[i].off))
1708 i--, j--;
1710 /* i now points to the first additional op.
1711 ??? LHS may not be completely contained in VR, one or more
1712 VIEW_CONVERT_EXPRs could be in its way. We could at least
1713 try handling outermost VIEW_CONVERT_EXPRs. */
1714 if (j != -1)
1715 return (void *)-1;
1717 /* Now re-write REF to be based on the rhs of the assignment. */
1718 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1719 /* We need to pre-pend vr->operands[0..i] to rhs. */
1720 if (i + 1 + rhs.length () > vr->operands.length ())
1722 vec<vn_reference_op_s> old = vr->operands;
1723 vr->operands.safe_grow (i + 1 + rhs.length ());
1724 if (old == shared_lookup_references
1725 && vr->operands != old)
1726 shared_lookup_references = vNULL;
1728 else
1729 vr->operands.truncate (i + 1 + rhs.length ());
1730 FOR_EACH_VEC_ELT (rhs, j, vro)
1731 vr->operands[i + 1 + j] = *vro;
1732 rhs.release ();
1733 vr->operands = valueize_refs (vr->operands);
1734 vr->hashcode = vn_reference_compute_hash (vr);
1736 /* Adjust *ref from the new operands. */
1737 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1738 return (void *)-1;
1739 /* This can happen with bitfields. */
1740 if (ref->size != r.size)
1741 return (void *)-1;
1742 *ref = r;
1744 /* Do not update last seen VUSE after translating. */
1745 last_vuse_ptr = NULL;
1747 /* Keep looking for the adjusted *REF / VR pair. */
1748 return NULL;
1751 /* 6) For memcpy copies translate the reference through them if
1752 the copy kills ref. */
1753 else if (vn_walk_kind == VN_WALKREWRITE
1754 && is_gimple_reg_type (vr->type)
1755 /* ??? Handle BCOPY as well. */
1756 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
1757 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
1758 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
1759 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
1760 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
1761 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
1762 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
1763 && host_integerp (gimple_call_arg (def_stmt, 2), 1))
1765 tree lhs, rhs;
1766 ao_ref r;
1767 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
1768 vn_reference_op_s op;
1769 HOST_WIDE_INT at;
1772 /* Only handle non-variable, addressable refs. */
1773 if (ref->size != maxsize
1774 || offset % BITS_PER_UNIT != 0
1775 || ref->size % BITS_PER_UNIT != 0)
1776 return (void *)-1;
1778 /* Extract a pointer base and an offset for the destination. */
1779 lhs = gimple_call_arg (def_stmt, 0);
1780 lhs_offset = 0;
1781 if (TREE_CODE (lhs) == SSA_NAME)
1782 lhs = SSA_VAL (lhs);
1783 if (TREE_CODE (lhs) == ADDR_EXPR)
1785 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
1786 &lhs_offset);
1787 if (!tem)
1788 return (void *)-1;
1789 if (TREE_CODE (tem) == MEM_REF
1790 && host_integerp (TREE_OPERAND (tem, 1), 1))
1792 lhs = TREE_OPERAND (tem, 0);
1793 lhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1795 else if (DECL_P (tem))
1796 lhs = build_fold_addr_expr (tem);
1797 else
1798 return (void *)-1;
1800 if (TREE_CODE (lhs) != SSA_NAME
1801 && TREE_CODE (lhs) != ADDR_EXPR)
1802 return (void *)-1;
1804 /* Extract a pointer base and an offset for the source. */
1805 rhs = gimple_call_arg (def_stmt, 1);
1806 rhs_offset = 0;
1807 if (TREE_CODE (rhs) == SSA_NAME)
1808 rhs = SSA_VAL (rhs);
1809 if (TREE_CODE (rhs) == ADDR_EXPR)
1811 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
1812 &rhs_offset);
1813 if (!tem)
1814 return (void *)-1;
1815 if (TREE_CODE (tem) == MEM_REF
1816 && host_integerp (TREE_OPERAND (tem, 1), 1))
1818 rhs = TREE_OPERAND (tem, 0);
1819 rhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1821 else if (DECL_P (tem))
1822 rhs = build_fold_addr_expr (tem);
1823 else
1824 return (void *)-1;
1826 if (TREE_CODE (rhs) != SSA_NAME
1827 && TREE_CODE (rhs) != ADDR_EXPR)
1828 return (void *)-1;
1830 copy_size = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2));
1832 /* The bases of the destination and the references have to agree. */
1833 if ((TREE_CODE (base) != MEM_REF
1834 && !DECL_P (base))
1835 || (TREE_CODE (base) == MEM_REF
1836 && (TREE_OPERAND (base, 0) != lhs
1837 || !host_integerp (TREE_OPERAND (base, 1), 1)))
1838 || (DECL_P (base)
1839 && (TREE_CODE (lhs) != ADDR_EXPR
1840 || TREE_OPERAND (lhs, 0) != base)))
1841 return (void *)-1;
1843 /* And the access has to be contained within the memcpy destination. */
1844 at = offset / BITS_PER_UNIT;
1845 if (TREE_CODE (base) == MEM_REF)
1846 at += TREE_INT_CST_LOW (TREE_OPERAND (base, 1));
1847 if (lhs_offset > at
1848 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
1849 return (void *)-1;
1851 /* Make room for 2 operands in the new reference. */
1852 if (vr->operands.length () < 2)
1854 vec<vn_reference_op_s> old = vr->operands;
1855 vr->operands.safe_grow_cleared (2);
1856 if (old == shared_lookup_references
1857 && vr->operands != old)
1858 shared_lookup_references.create (0);
1860 else
1861 vr->operands.truncate (2);
1863 /* The looked-through reference is a simple MEM_REF. */
1864 memset (&op, 0, sizeof (op));
1865 op.type = vr->type;
1866 op.opcode = MEM_REF;
1867 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
1868 op.off = at - lhs_offset + rhs_offset;
1869 vr->operands[0] = op;
1870 op.type = TREE_TYPE (rhs);
1871 op.opcode = TREE_CODE (rhs);
1872 op.op0 = rhs;
1873 op.off = -1;
1874 vr->operands[1] = op;
1875 vr->hashcode = vn_reference_compute_hash (vr);
1877 /* Adjust *ref from the new operands. */
1878 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1879 return (void *)-1;
1880 /* This can happen with bitfields. */
1881 if (ref->size != r.size)
1882 return (void *)-1;
1883 *ref = r;
1885 /* Do not update last seen VUSE after translating. */
1886 last_vuse_ptr = NULL;
1888 /* Keep looking for the adjusted *REF / VR pair. */
1889 return NULL;
1892 /* Bail out and stop walking. */
1893 return (void *)-1;
1896 /* Lookup a reference operation by it's parts, in the current hash table.
1897 Returns the resulting value number if it exists in the hash table,
1898 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1899 vn_reference_t stored in the hashtable if something is found. */
1901 tree
1902 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
1903 vec<vn_reference_op_s> operands,
1904 vn_reference_t *vnresult, vn_lookup_kind kind)
1906 struct vn_reference_s vr1;
1907 vn_reference_t tmp;
1908 tree cst;
1910 if (!vnresult)
1911 vnresult = &tmp;
1912 *vnresult = NULL;
1914 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1915 shared_lookup_references.truncate (0);
1916 shared_lookup_references.safe_grow (operands.length ());
1917 memcpy (shared_lookup_references.address (),
1918 operands.address (),
1919 sizeof (vn_reference_op_s)
1920 * operands.length ());
1921 vr1.operands = operands = shared_lookup_references
1922 = valueize_refs (shared_lookup_references);
1923 vr1.type = type;
1924 vr1.set = set;
1925 vr1.hashcode = vn_reference_compute_hash (&vr1);
1926 if ((cst = fully_constant_vn_reference_p (&vr1)))
1927 return cst;
1929 vn_reference_lookup_1 (&vr1, vnresult);
1930 if (!*vnresult
1931 && kind != VN_NOWALK
1932 && vr1.vuse)
1934 ao_ref r;
1935 vn_walk_kind = kind;
1936 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
1937 *vnresult =
1938 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1939 vn_reference_lookup_2,
1940 vn_reference_lookup_3, &vr1);
1941 if (vr1.operands != operands)
1942 vr1.operands.release ();
1945 if (*vnresult)
1946 return (*vnresult)->result;
1948 return NULL_TREE;
1951 /* Lookup OP in the current hash table, and return the resulting value
1952 number if it exists in the hash table. Return NULL_TREE if it does
1953 not exist in the hash table or if the result field of the structure
1954 was NULL.. VNRESULT will be filled in with the vn_reference_t
1955 stored in the hashtable if one exists. */
1957 tree
1958 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
1959 vn_reference_t *vnresult)
1961 vec<vn_reference_op_s> operands;
1962 struct vn_reference_s vr1;
1963 tree cst;
1964 bool valuezied_anything;
1966 if (vnresult)
1967 *vnresult = NULL;
1969 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1970 vr1.operands = operands
1971 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
1972 vr1.type = TREE_TYPE (op);
1973 vr1.set = get_alias_set (op);
1974 vr1.hashcode = vn_reference_compute_hash (&vr1);
1975 if ((cst = fully_constant_vn_reference_p (&vr1)))
1976 return cst;
1978 if (kind != VN_NOWALK
1979 && vr1.vuse)
1981 vn_reference_t wvnresult;
1982 ao_ref r;
1983 /* Make sure to use a valueized reference if we valueized anything.
1984 Otherwise preserve the full reference for advanced TBAA. */
1985 if (!valuezied_anything
1986 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
1987 vr1.operands))
1988 ao_ref_init (&r, op);
1989 vn_walk_kind = kind;
1990 wvnresult =
1991 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1992 vn_reference_lookup_2,
1993 vn_reference_lookup_3, &vr1);
1994 if (vr1.operands != operands)
1995 vr1.operands.release ();
1996 if (wvnresult)
1998 if (vnresult)
1999 *vnresult = wvnresult;
2000 return wvnresult->result;
2003 return NULL_TREE;
2006 return vn_reference_lookup_1 (&vr1, vnresult);
2010 /* Insert OP into the current hash table with a value number of
2011 RESULT, and return the resulting reference structure we created. */
2013 vn_reference_t
2014 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2016 void **slot;
2017 vn_reference_t vr1;
2019 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
2020 if (TREE_CODE (result) == SSA_NAME)
2021 vr1->value_id = VN_INFO (result)->value_id;
2022 else
2023 vr1->value_id = get_or_alloc_constant_value_id (result);
2024 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2025 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
2026 vr1->type = TREE_TYPE (op);
2027 vr1->set = get_alias_set (op);
2028 vr1->hashcode = vn_reference_compute_hash (vr1);
2029 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2030 vr1->result_vdef = vdef;
2032 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
2033 INSERT);
2035 /* Because we lookup stores using vuses, and value number failures
2036 using the vdefs (see visit_reference_op_store for how and why),
2037 it's possible that on failure we may try to insert an already
2038 inserted store. This is not wrong, there is no ssa name for a
2039 store that we could use as a differentiator anyway. Thus, unlike
2040 the other lookup functions, you cannot gcc_assert (!*slot)
2041 here. */
2043 /* But free the old slot in case of a collision. */
2044 if (*slot)
2045 free_reference (*slot);
2047 *slot = vr1;
2048 return vr1;
2051 /* Insert a reference by it's pieces into the current hash table with
2052 a value number of RESULT. Return the resulting reference
2053 structure we created. */
2055 vn_reference_t
2056 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2057 vec<vn_reference_op_s> operands,
2058 tree result, unsigned int value_id)
2061 void **slot;
2062 vn_reference_t vr1;
2064 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
2065 vr1->value_id = value_id;
2066 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2067 vr1->operands = valueize_refs (operands);
2068 vr1->type = type;
2069 vr1->set = set;
2070 vr1->hashcode = vn_reference_compute_hash (vr1);
2071 if (result && TREE_CODE (result) == SSA_NAME)
2072 result = SSA_VAL (result);
2073 vr1->result = result;
2075 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
2076 INSERT);
2078 /* At this point we should have all the things inserted that we have
2079 seen before, and we should never try inserting something that
2080 already exists. */
2081 gcc_assert (!*slot);
2082 if (*slot)
2083 free_reference (*slot);
2085 *slot = vr1;
2086 return vr1;
2089 /* Compute and return the hash value for nary operation VBO1. */
2091 hashval_t
2092 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2094 hashval_t hash;
2095 unsigned i;
2097 for (i = 0; i < vno1->length; ++i)
2098 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2099 vno1->op[i] = SSA_VAL (vno1->op[i]);
2101 if (vno1->length == 2
2102 && commutative_tree_code (vno1->opcode)
2103 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2105 tree temp = vno1->op[0];
2106 vno1->op[0] = vno1->op[1];
2107 vno1->op[1] = temp;
2110 hash = iterative_hash_hashval_t (vno1->opcode, 0);
2111 for (i = 0; i < vno1->length; ++i)
2112 hash = iterative_hash_expr (vno1->op[i], hash);
2114 return hash;
2117 /* Return the computed hashcode for nary operation P1. */
2119 static hashval_t
2120 vn_nary_op_hash (const void *p1)
2122 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
2123 return vno1->hashcode;
2126 /* Compare nary operations P1 and P2 and return true if they are
2127 equivalent. */
2130 vn_nary_op_eq (const void *p1, const void *p2)
2132 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
2133 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
2134 unsigned i;
2136 if (vno1->hashcode != vno2->hashcode)
2137 return false;
2139 if (vno1->length != vno2->length)
2140 return false;
2142 if (vno1->opcode != vno2->opcode
2143 || !types_compatible_p (vno1->type, vno2->type))
2144 return false;
2146 for (i = 0; i < vno1->length; ++i)
2147 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2148 return false;
2150 return true;
2153 /* Initialize VNO from the pieces provided. */
2155 static void
2156 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2157 enum tree_code code, tree type, tree *ops)
2159 vno->opcode = code;
2160 vno->length = length;
2161 vno->type = type;
2162 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2165 /* Initialize VNO from OP. */
2167 static void
2168 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2170 unsigned i;
2172 vno->opcode = TREE_CODE (op);
2173 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2174 vno->type = TREE_TYPE (op);
2175 for (i = 0; i < vno->length; ++i)
2176 vno->op[i] = TREE_OPERAND (op, i);
2179 /* Return the number of operands for a vn_nary ops structure from STMT. */
2181 static unsigned int
2182 vn_nary_length_from_stmt (gimple stmt)
2184 switch (gimple_assign_rhs_code (stmt))
2186 case REALPART_EXPR:
2187 case IMAGPART_EXPR:
2188 case VIEW_CONVERT_EXPR:
2189 return 1;
2191 case BIT_FIELD_REF:
2192 return 3;
2194 case CONSTRUCTOR:
2195 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2197 default:
2198 return gimple_num_ops (stmt) - 1;
2202 /* Initialize VNO from STMT. */
2204 static void
2205 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
2207 unsigned i;
2209 vno->opcode = gimple_assign_rhs_code (stmt);
2210 vno->type = gimple_expr_type (stmt);
2211 switch (vno->opcode)
2213 case REALPART_EXPR:
2214 case IMAGPART_EXPR:
2215 case VIEW_CONVERT_EXPR:
2216 vno->length = 1;
2217 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2218 break;
2220 case BIT_FIELD_REF:
2221 vno->length = 3;
2222 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2223 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2224 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2225 break;
2227 case CONSTRUCTOR:
2228 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2229 for (i = 0; i < vno->length; ++i)
2230 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2231 break;
2233 default:
2234 gcc_checking_assert (!gimple_assign_single_p (stmt));
2235 vno->length = gimple_num_ops (stmt) - 1;
2236 for (i = 0; i < vno->length; ++i)
2237 vno->op[i] = gimple_op (stmt, i + 1);
2241 /* Compute the hashcode for VNO and look for it in the hash table;
2242 return the resulting value number if it exists in the hash table.
2243 Return NULL_TREE if it does not exist in the hash table or if the
2244 result field of the operation is NULL. VNRESULT will contain the
2245 vn_nary_op_t from the hashtable if it exists. */
2247 static tree
2248 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2250 void **slot;
2252 if (vnresult)
2253 *vnresult = NULL;
2255 vno->hashcode = vn_nary_op_compute_hash (vno);
2256 slot = htab_find_slot_with_hash (current_info->nary, vno, vno->hashcode,
2257 NO_INSERT);
2258 if (!slot && current_info == optimistic_info)
2259 slot = htab_find_slot_with_hash (valid_info->nary, vno, vno->hashcode,
2260 NO_INSERT);
2261 if (!slot)
2262 return NULL_TREE;
2263 if (vnresult)
2264 *vnresult = (vn_nary_op_t)*slot;
2265 return ((vn_nary_op_t)*slot)->result;
2268 /* Lookup a n-ary operation by its pieces and return the resulting value
2269 number if it exists in the hash table. Return NULL_TREE if it does
2270 not exist in the hash table or if the result field of the operation
2271 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2272 if it exists. */
2274 tree
2275 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2276 tree type, tree *ops, vn_nary_op_t *vnresult)
2278 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2279 sizeof_vn_nary_op (length));
2280 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2281 return vn_nary_op_lookup_1 (vno1, vnresult);
2284 /* Lookup OP in the current hash table, and return the resulting value
2285 number if it exists in the hash table. Return NULL_TREE if it does
2286 not exist in the hash table or if the result field of the operation
2287 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2288 if it exists. */
2290 tree
2291 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2293 vn_nary_op_t vno1
2294 = XALLOCAVAR (struct vn_nary_op_s,
2295 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2296 init_vn_nary_op_from_op (vno1, op);
2297 return vn_nary_op_lookup_1 (vno1, vnresult);
2300 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2301 value number if it exists in the hash table. Return NULL_TREE if
2302 it does not exist in the hash table. VNRESULT will contain the
2303 vn_nary_op_t from the hashtable if it exists. */
2305 tree
2306 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
2308 vn_nary_op_t vno1
2309 = XALLOCAVAR (struct vn_nary_op_s,
2310 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2311 init_vn_nary_op_from_stmt (vno1, stmt);
2312 return vn_nary_op_lookup_1 (vno1, vnresult);
2315 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2317 static vn_nary_op_t
2318 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2320 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2323 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2324 obstack. */
2326 static vn_nary_op_t
2327 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2329 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2330 &current_info->nary_obstack);
2332 vno1->value_id = value_id;
2333 vno1->length = length;
2334 vno1->result = result;
2336 return vno1;
2339 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2340 VNO->HASHCODE first. */
2342 static vn_nary_op_t
2343 vn_nary_op_insert_into (vn_nary_op_t vno, htab_t table, bool compute_hash)
2345 void **slot;
2347 if (compute_hash)
2348 vno->hashcode = vn_nary_op_compute_hash (vno);
2350 slot = htab_find_slot_with_hash (table, vno, vno->hashcode, INSERT);
2351 gcc_assert (!*slot);
2353 *slot = vno;
2354 return vno;
2357 /* Insert a n-ary operation into the current hash table using it's
2358 pieces. Return the vn_nary_op_t structure we created and put in
2359 the hashtable. */
2361 vn_nary_op_t
2362 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2363 tree type, tree *ops,
2364 tree result, unsigned int value_id)
2366 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2367 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2368 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2371 /* Insert OP into the current hash table with a value number of
2372 RESULT. Return the vn_nary_op_t structure we created and put in
2373 the hashtable. */
2375 vn_nary_op_t
2376 vn_nary_op_insert (tree op, tree result)
2378 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2379 vn_nary_op_t vno1;
2381 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2382 init_vn_nary_op_from_op (vno1, op);
2383 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2386 /* Insert the rhs of STMT into the current hash table with a value number of
2387 RESULT. */
2389 vn_nary_op_t
2390 vn_nary_op_insert_stmt (gimple stmt, tree result)
2392 vn_nary_op_t vno1
2393 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2394 result, VN_INFO (result)->value_id);
2395 init_vn_nary_op_from_stmt (vno1, stmt);
2396 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2399 /* Compute a hashcode for PHI operation VP1 and return it. */
2401 static inline hashval_t
2402 vn_phi_compute_hash (vn_phi_t vp1)
2404 hashval_t result;
2405 int i;
2406 tree phi1op;
2407 tree type;
2409 result = vp1->block->index;
2411 /* If all PHI arguments are constants we need to distinguish
2412 the PHI node via its type. */
2413 type = vp1->type;
2414 result += vn_hash_type (type);
2416 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2418 if (phi1op == VN_TOP)
2419 continue;
2420 result = iterative_hash_expr (phi1op, result);
2423 return result;
2426 /* Return the computed hashcode for phi operation P1. */
2428 static hashval_t
2429 vn_phi_hash (const void *p1)
2431 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
2432 return vp1->hashcode;
2435 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2437 static int
2438 vn_phi_eq (const void *p1, const void *p2)
2440 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
2441 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
2443 if (vp1->hashcode != vp2->hashcode)
2444 return false;
2446 if (vp1->block == vp2->block)
2448 int i;
2449 tree phi1op;
2451 /* If the PHI nodes do not have compatible types
2452 they are not the same. */
2453 if (!types_compatible_p (vp1->type, vp2->type))
2454 return false;
2456 /* Any phi in the same block will have it's arguments in the
2457 same edge order, because of how we store phi nodes. */
2458 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2460 tree phi2op = vp2->phiargs[i];
2461 if (phi1op == VN_TOP || phi2op == VN_TOP)
2462 continue;
2463 if (!expressions_equal_p (phi1op, phi2op))
2464 return false;
2466 return true;
2468 return false;
2471 static vec<tree> shared_lookup_phiargs;
2473 /* Lookup PHI in the current hash table, and return the resulting
2474 value number if it exists in the hash table. Return NULL_TREE if
2475 it does not exist in the hash table. */
2477 static tree
2478 vn_phi_lookup (gimple phi)
2480 void **slot;
2481 struct vn_phi_s vp1;
2482 unsigned i;
2484 shared_lookup_phiargs.truncate (0);
2486 /* Canonicalize the SSA_NAME's to their value number. */
2487 for (i = 0; i < gimple_phi_num_args (phi); i++)
2489 tree def = PHI_ARG_DEF (phi, i);
2490 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2491 shared_lookup_phiargs.safe_push (def);
2493 vp1.type = TREE_TYPE (gimple_phi_result (phi));
2494 vp1.phiargs = shared_lookup_phiargs;
2495 vp1.block = gimple_bb (phi);
2496 vp1.hashcode = vn_phi_compute_hash (&vp1);
2497 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
2498 NO_INSERT);
2499 if (!slot && current_info == optimistic_info)
2500 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
2501 NO_INSERT);
2502 if (!slot)
2503 return NULL_TREE;
2504 return ((vn_phi_t)*slot)->result;
2507 /* Insert PHI into the current hash table with a value number of
2508 RESULT. */
2510 static vn_phi_t
2511 vn_phi_insert (gimple phi, tree result)
2513 void **slot;
2514 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
2515 unsigned i;
2516 vec<tree> args = vNULL;
2518 /* Canonicalize the SSA_NAME's to their value number. */
2519 for (i = 0; i < gimple_phi_num_args (phi); i++)
2521 tree def = PHI_ARG_DEF (phi, i);
2522 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2523 args.safe_push (def);
2525 vp1->value_id = VN_INFO (result)->value_id;
2526 vp1->type = TREE_TYPE (gimple_phi_result (phi));
2527 vp1->phiargs = args;
2528 vp1->block = gimple_bb (phi);
2529 vp1->result = result;
2530 vp1->hashcode = vn_phi_compute_hash (vp1);
2532 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
2533 INSERT);
2535 /* Because we iterate over phi operations more than once, it's
2536 possible the slot might already exist here, hence no assert.*/
2537 *slot = vp1;
2538 return vp1;
2542 /* Print set of components in strongly connected component SCC to OUT. */
2544 static void
2545 print_scc (FILE *out, vec<tree> scc)
2547 tree var;
2548 unsigned int i;
2550 fprintf (out, "SCC consists of:");
2551 FOR_EACH_VEC_ELT (scc, i, var)
2553 fprintf (out, " ");
2554 print_generic_expr (out, var, 0);
2556 fprintf (out, "\n");
2559 /* Set the value number of FROM to TO, return true if it has changed
2560 as a result. */
2562 static inline bool
2563 set_ssa_val_to (tree from, tree to)
2565 tree currval = SSA_VAL (from);
2566 HOST_WIDE_INT toff, coff;
2568 if (from != to)
2570 if (currval == from)
2572 if (dump_file && (dump_flags & TDF_DETAILS))
2574 fprintf (dump_file, "Not changing value number of ");
2575 print_generic_expr (dump_file, from, 0);
2576 fprintf (dump_file, " from VARYING to ");
2577 print_generic_expr (dump_file, to, 0);
2578 fprintf (dump_file, "\n");
2580 return false;
2582 else if (TREE_CODE (to) == SSA_NAME
2583 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2584 to = from;
2587 /* The only thing we allow as value numbers are VN_TOP, ssa_names
2588 and invariants. So assert that here. */
2589 gcc_assert (to != NULL_TREE
2590 && (to == VN_TOP
2591 || TREE_CODE (to) == SSA_NAME
2592 || is_gimple_min_invariant (to)));
2594 if (dump_file && (dump_flags & TDF_DETAILS))
2596 fprintf (dump_file, "Setting value number of ");
2597 print_generic_expr (dump_file, from, 0);
2598 fprintf (dump_file, " to ");
2599 print_generic_expr (dump_file, to, 0);
2602 if (currval != to
2603 && !operand_equal_p (currval, to, 0)
2604 /* ??? For addresses involving volatile objects or types operand_equal_p
2605 does not reliably detect ADDR_EXPRs as equal. We know we are only
2606 getting invariant gimple addresses here, so can use
2607 get_addr_base_and_unit_offset to do this comparison. */
2608 && !(TREE_CODE (currval) == ADDR_EXPR
2609 && TREE_CODE (to) == ADDR_EXPR
2610 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
2611 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
2612 && coff == toff))
2614 VN_INFO (from)->valnum = to;
2615 if (dump_file && (dump_flags & TDF_DETAILS))
2616 fprintf (dump_file, " (changed)\n");
2617 return true;
2619 if (dump_file && (dump_flags & TDF_DETAILS))
2620 fprintf (dump_file, "\n");
2621 return false;
2624 /* Mark as processed all the definitions in the defining stmt of USE, or
2625 the USE itself. */
2627 static void
2628 mark_use_processed (tree use)
2630 ssa_op_iter iter;
2631 def_operand_p defp;
2632 gimple stmt = SSA_NAME_DEF_STMT (use);
2634 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
2636 VN_INFO (use)->use_processed = true;
2637 return;
2640 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2642 tree def = DEF_FROM_PTR (defp);
2644 VN_INFO (def)->use_processed = true;
2648 /* Set all definitions in STMT to value number to themselves.
2649 Return true if a value number changed. */
2651 static bool
2652 defs_to_varying (gimple stmt)
2654 bool changed = false;
2655 ssa_op_iter iter;
2656 def_operand_p defp;
2658 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2660 tree def = DEF_FROM_PTR (defp);
2661 changed |= set_ssa_val_to (def, def);
2663 return changed;
2666 static bool expr_has_constants (tree expr);
2667 static tree valueize_expr (tree expr);
2669 /* Visit a copy between LHS and RHS, return true if the value number
2670 changed. */
2672 static bool
2673 visit_copy (tree lhs, tree rhs)
2675 /* Follow chains of copies to their destination. */
2676 while (TREE_CODE (rhs) == SSA_NAME
2677 && SSA_VAL (rhs) != rhs)
2678 rhs = SSA_VAL (rhs);
2680 /* The copy may have a more interesting constant filled expression
2681 (we don't, since we know our RHS is just an SSA name). */
2682 if (TREE_CODE (rhs) == SSA_NAME)
2684 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2685 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2688 return set_ssa_val_to (lhs, rhs);
2691 /* Visit a nary operator RHS, value number it, and return true if the
2692 value number of LHS has changed as a result. */
2694 static bool
2695 visit_nary_op (tree lhs, gimple stmt)
2697 bool changed = false;
2698 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2700 if (result)
2701 changed = set_ssa_val_to (lhs, result);
2702 else
2704 changed = set_ssa_val_to (lhs, lhs);
2705 vn_nary_op_insert_stmt (stmt, lhs);
2708 return changed;
2711 /* Visit a call STMT storing into LHS. Return true if the value number
2712 of the LHS has changed as a result. */
2714 static bool
2715 visit_reference_op_call (tree lhs, gimple stmt)
2717 bool changed = false;
2718 struct vn_reference_s vr1;
2719 vn_reference_t vnresult = NULL;
2720 tree vuse = gimple_vuse (stmt);
2721 tree vdef = gimple_vdef (stmt);
2723 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
2724 if (lhs && TREE_CODE (lhs) != SSA_NAME)
2725 lhs = NULL_TREE;
2727 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2728 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
2729 vr1.type = gimple_expr_type (stmt);
2730 vr1.set = 0;
2731 vr1.hashcode = vn_reference_compute_hash (&vr1);
2732 vn_reference_lookup_1 (&vr1, &vnresult);
2734 if (vnresult)
2736 if (vnresult->result_vdef)
2737 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
2739 if (!vnresult->result && lhs)
2740 vnresult->result = lhs;
2742 if (vnresult->result && lhs)
2744 changed |= set_ssa_val_to (lhs, vnresult->result);
2746 if (VN_INFO (vnresult->result)->has_constants)
2747 VN_INFO (lhs)->has_constants = true;
2750 else
2752 void **slot;
2753 vn_reference_t vr2;
2754 if (vdef)
2755 changed |= set_ssa_val_to (vdef, vdef);
2756 if (lhs)
2757 changed |= set_ssa_val_to (lhs, lhs);
2758 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
2759 vr2->vuse = vr1.vuse;
2760 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
2761 vr2->type = vr1.type;
2762 vr2->set = vr1.set;
2763 vr2->hashcode = vr1.hashcode;
2764 vr2->result = lhs;
2765 vr2->result_vdef = vdef;
2766 slot = htab_find_slot_with_hash (current_info->references,
2767 vr2, vr2->hashcode, INSERT);
2768 if (*slot)
2769 free_reference (*slot);
2770 *slot = vr2;
2773 return changed;
2776 /* Visit a load from a reference operator RHS, part of STMT, value number it,
2777 and return true if the value number of the LHS has changed as a result. */
2779 static bool
2780 visit_reference_op_load (tree lhs, tree op, gimple stmt)
2782 bool changed = false;
2783 tree last_vuse;
2784 tree result;
2786 last_vuse = gimple_vuse (stmt);
2787 last_vuse_ptr = &last_vuse;
2788 result = vn_reference_lookup (op, gimple_vuse (stmt),
2789 default_vn_walk_kind, NULL);
2790 last_vuse_ptr = NULL;
2792 /* If we have a VCE, try looking up its operand as it might be stored in
2793 a different type. */
2794 if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
2795 result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
2796 default_vn_walk_kind, NULL);
2798 /* We handle type-punning through unions by value-numbering based
2799 on offset and size of the access. Be prepared to handle a
2800 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
2801 if (result
2802 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
2804 /* We will be setting the value number of lhs to the value number
2805 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2806 So first simplify and lookup this expression to see if it
2807 is already available. */
2808 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
2809 if ((CONVERT_EXPR_P (val)
2810 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
2811 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
2813 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
2814 if ((CONVERT_EXPR_P (tem)
2815 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
2816 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
2817 TREE_TYPE (val), tem)))
2818 val = tem;
2820 result = val;
2821 if (!is_gimple_min_invariant (val)
2822 && TREE_CODE (val) != SSA_NAME)
2823 result = vn_nary_op_lookup (val, NULL);
2824 /* If the expression is not yet available, value-number lhs to
2825 a new SSA_NAME we create. */
2826 if (!result)
2828 result = make_temp_ssa_name (TREE_TYPE (lhs), gimple_build_nop (),
2829 "vntemp");
2830 /* Initialize value-number information properly. */
2831 VN_INFO_GET (result)->valnum = result;
2832 VN_INFO (result)->value_id = get_next_value_id ();
2833 VN_INFO (result)->expr = val;
2834 VN_INFO (result)->has_constants = expr_has_constants (val);
2835 VN_INFO (result)->needs_insertion = true;
2836 /* As all "inserted" statements are singleton SCCs, insert
2837 to the valid table. This is strictly needed to
2838 avoid re-generating new value SSA_NAMEs for the same
2839 expression during SCC iteration over and over (the
2840 optimistic table gets cleared after each iteration).
2841 We do not need to insert into the optimistic table, as
2842 lookups there will fall back to the valid table. */
2843 if (current_info == optimistic_info)
2845 current_info = valid_info;
2846 vn_nary_op_insert (val, result);
2847 current_info = optimistic_info;
2849 else
2850 vn_nary_op_insert (val, result);
2851 if (dump_file && (dump_flags & TDF_DETAILS))
2853 fprintf (dump_file, "Inserting name ");
2854 print_generic_expr (dump_file, result, 0);
2855 fprintf (dump_file, " for expression ");
2856 print_generic_expr (dump_file, val, 0);
2857 fprintf (dump_file, "\n");
2862 if (result)
2864 changed = set_ssa_val_to (lhs, result);
2865 if (TREE_CODE (result) == SSA_NAME
2866 && VN_INFO (result)->has_constants)
2868 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2869 VN_INFO (lhs)->has_constants = true;
2872 else
2874 changed = set_ssa_val_to (lhs, lhs);
2875 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
2878 return changed;
2882 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2883 and return true if the value number of the LHS has changed as a result. */
2885 static bool
2886 visit_reference_op_store (tree lhs, tree op, gimple stmt)
2888 bool changed = false;
2889 vn_reference_t vnresult = NULL;
2890 tree result, assign;
2891 bool resultsame = false;
2892 tree vuse = gimple_vuse (stmt);
2893 tree vdef = gimple_vdef (stmt);
2895 /* First we want to lookup using the *vuses* from the store and see
2896 if there the last store to this location with the same address
2897 had the same value.
2899 The vuses represent the memory state before the store. If the
2900 memory state, address, and value of the store is the same as the
2901 last store to this location, then this store will produce the
2902 same memory state as that store.
2904 In this case the vdef versions for this store are value numbered to those
2905 vuse versions, since they represent the same memory state after
2906 this store.
2908 Otherwise, the vdefs for the store are used when inserting into
2909 the table, since the store generates a new memory state. */
2911 result = vn_reference_lookup (lhs, vuse, VN_NOWALK, NULL);
2913 if (result)
2915 if (TREE_CODE (result) == SSA_NAME)
2916 result = SSA_VAL (result);
2917 if (TREE_CODE (op) == SSA_NAME)
2918 op = SSA_VAL (op);
2919 resultsame = expressions_equal_p (result, op);
2922 if (!result || !resultsame)
2924 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
2925 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult);
2926 if (vnresult)
2928 VN_INFO (vdef)->use_processed = true;
2929 return set_ssa_val_to (vdef, vnresult->result_vdef);
2933 if (!result || !resultsame)
2935 if (dump_file && (dump_flags & TDF_DETAILS))
2937 fprintf (dump_file, "No store match\n");
2938 fprintf (dump_file, "Value numbering store ");
2939 print_generic_expr (dump_file, lhs, 0);
2940 fprintf (dump_file, " to ");
2941 print_generic_expr (dump_file, op, 0);
2942 fprintf (dump_file, "\n");
2944 /* Have to set value numbers before insert, since insert is
2945 going to valueize the references in-place. */
2946 if (vdef)
2948 changed |= set_ssa_val_to (vdef, vdef);
2951 /* Do not insert structure copies into the tables. */
2952 if (is_gimple_min_invariant (op)
2953 || is_gimple_reg (op))
2954 vn_reference_insert (lhs, op, vdef, NULL);
2956 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
2957 vn_reference_insert (assign, lhs, vuse, vdef);
2959 else
2961 /* We had a match, so value number the vdef to have the value
2962 number of the vuse it came from. */
2964 if (dump_file && (dump_flags & TDF_DETAILS))
2965 fprintf (dump_file, "Store matched earlier value,"
2966 "value numbering store vdefs to matching vuses.\n");
2968 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
2971 return changed;
2974 /* Visit and value number PHI, return true if the value number
2975 changed. */
2977 static bool
2978 visit_phi (gimple phi)
2980 bool changed = false;
2981 tree result;
2982 tree sameval = VN_TOP;
2983 bool allsame = true;
2984 unsigned i;
2986 /* TODO: We could check for this in init_sccvn, and replace this
2987 with a gcc_assert. */
2988 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
2989 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2991 /* See if all non-TOP arguments have the same value. TOP is
2992 equivalent to everything, so we can ignore it. */
2993 for (i = 0; i < gimple_phi_num_args (phi); i++)
2995 tree def = PHI_ARG_DEF (phi, i);
2997 if (TREE_CODE (def) == SSA_NAME)
2998 def = SSA_VAL (def);
2999 if (def == VN_TOP)
3000 continue;
3001 if (sameval == VN_TOP)
3003 sameval = def;
3005 else
3007 if (!expressions_equal_p (def, sameval))
3009 allsame = false;
3010 break;
3015 /* If all value numbered to the same value, the phi node has that
3016 value. */
3017 if (allsame)
3019 if (is_gimple_min_invariant (sameval))
3021 VN_INFO (PHI_RESULT (phi))->has_constants = true;
3022 VN_INFO (PHI_RESULT (phi))->expr = sameval;
3024 else
3026 VN_INFO (PHI_RESULT (phi))->has_constants = false;
3027 VN_INFO (PHI_RESULT (phi))->expr = sameval;
3030 if (TREE_CODE (sameval) == SSA_NAME)
3031 return visit_copy (PHI_RESULT (phi), sameval);
3033 return set_ssa_val_to (PHI_RESULT (phi), sameval);
3036 /* Otherwise, see if it is equivalent to a phi node in this block. */
3037 result = vn_phi_lookup (phi);
3038 if (result)
3040 if (TREE_CODE (result) == SSA_NAME)
3041 changed = visit_copy (PHI_RESULT (phi), result);
3042 else
3043 changed = set_ssa_val_to (PHI_RESULT (phi), result);
3045 else
3047 vn_phi_insert (phi, PHI_RESULT (phi));
3048 VN_INFO (PHI_RESULT (phi))->has_constants = false;
3049 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
3050 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3053 return changed;
3056 /* Return true if EXPR contains constants. */
3058 static bool
3059 expr_has_constants (tree expr)
3061 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
3063 case tcc_unary:
3064 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
3066 case tcc_binary:
3067 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
3068 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
3069 /* Constants inside reference ops are rarely interesting, but
3070 it can take a lot of looking to find them. */
3071 case tcc_reference:
3072 case tcc_declaration:
3073 return false;
3074 default:
3075 return is_gimple_min_invariant (expr);
3077 return false;
3080 /* Return true if STMT contains constants. */
3082 static bool
3083 stmt_has_constants (gimple stmt)
3085 if (gimple_code (stmt) != GIMPLE_ASSIGN)
3086 return false;
3088 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
3090 case GIMPLE_UNARY_RHS:
3091 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
3093 case GIMPLE_BINARY_RHS:
3094 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
3095 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
3096 case GIMPLE_TERNARY_RHS:
3097 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
3098 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt))
3099 || is_gimple_min_invariant (gimple_assign_rhs3 (stmt)));
3100 case GIMPLE_SINGLE_RHS:
3101 /* Constants inside reference ops are rarely interesting, but
3102 it can take a lot of looking to find them. */
3103 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
3104 default:
3105 gcc_unreachable ();
3107 return false;
3110 /* Replace SSA_NAMES in expr with their value numbers, and return the
3111 result.
3112 This is performed in place. */
3114 static tree
3115 valueize_expr (tree expr)
3117 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
3119 case tcc_binary:
3120 TREE_OPERAND (expr, 1) = vn_valueize (TREE_OPERAND (expr, 1));
3121 /* Fallthru. */
3122 case tcc_unary:
3123 TREE_OPERAND (expr, 0) = vn_valueize (TREE_OPERAND (expr, 0));
3124 break;
3125 default:;
3127 return expr;
3130 /* Simplify the binary expression RHS, and return the result if
3131 simplified. */
3133 static tree
3134 simplify_binary_expression (gimple stmt)
3136 tree result = NULL_TREE;
3137 tree op0 = gimple_assign_rhs1 (stmt);
3138 tree op1 = gimple_assign_rhs2 (stmt);
3139 enum tree_code code = gimple_assign_rhs_code (stmt);
3141 /* This will not catch every single case we could combine, but will
3142 catch those with constants. The goal here is to simultaneously
3143 combine constants between expressions, but avoid infinite
3144 expansion of expressions during simplification. */
3145 if (TREE_CODE (op0) == SSA_NAME)
3147 if (VN_INFO (op0)->has_constants
3148 || TREE_CODE_CLASS (code) == tcc_comparison
3149 || code == COMPLEX_EXPR)
3150 op0 = valueize_expr (vn_get_expr_for (op0));
3151 else
3152 op0 = vn_valueize (op0);
3155 if (TREE_CODE (op1) == SSA_NAME)
3157 if (VN_INFO (op1)->has_constants
3158 || code == COMPLEX_EXPR)
3159 op1 = valueize_expr (vn_get_expr_for (op1));
3160 else
3161 op1 = vn_valueize (op1);
3164 /* Pointer plus constant can be represented as invariant address.
3165 Do so to allow further propatation, see also tree forwprop. */
3166 if (code == POINTER_PLUS_EXPR
3167 && host_integerp (op1, 1)
3168 && TREE_CODE (op0) == ADDR_EXPR
3169 && is_gimple_min_invariant (op0))
3170 return build_invariant_address (TREE_TYPE (op0),
3171 TREE_OPERAND (op0, 0),
3172 TREE_INT_CST_LOW (op1));
3174 /* Avoid folding if nothing changed. */
3175 if (op0 == gimple_assign_rhs1 (stmt)
3176 && op1 == gimple_assign_rhs2 (stmt))
3177 return NULL_TREE;
3179 fold_defer_overflow_warnings ();
3181 result = fold_binary (code, gimple_expr_type (stmt), op0, op1);
3182 if (result)
3183 STRIP_USELESS_TYPE_CONVERSION (result);
3185 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
3186 stmt, 0);
3188 /* Make sure result is not a complex expression consisting
3189 of operators of operators (IE (a + b) + (a + c))
3190 Otherwise, we will end up with unbounded expressions if
3191 fold does anything at all. */
3192 if (result && valid_gimple_rhs_p (result))
3193 return result;
3195 return NULL_TREE;
3198 /* Simplify the unary expression RHS, and return the result if
3199 simplified. */
3201 static tree
3202 simplify_unary_expression (gimple stmt)
3204 tree result = NULL_TREE;
3205 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
3206 enum tree_code code = gimple_assign_rhs_code (stmt);
3208 /* We handle some tcc_reference codes here that are all
3209 GIMPLE_ASSIGN_SINGLE codes. */
3210 if (code == REALPART_EXPR
3211 || code == IMAGPART_EXPR
3212 || code == VIEW_CONVERT_EXPR
3213 || code == BIT_FIELD_REF)
3214 op0 = TREE_OPERAND (op0, 0);
3216 if (TREE_CODE (op0) != SSA_NAME)
3217 return NULL_TREE;
3219 orig_op0 = op0;
3220 if (VN_INFO (op0)->has_constants)
3221 op0 = valueize_expr (vn_get_expr_for (op0));
3222 else if (CONVERT_EXPR_CODE_P (code)
3223 || code == REALPART_EXPR
3224 || code == IMAGPART_EXPR
3225 || code == VIEW_CONVERT_EXPR
3226 || code == BIT_FIELD_REF)
3228 /* We want to do tree-combining on conversion-like expressions.
3229 Make sure we feed only SSA_NAMEs or constants to fold though. */
3230 tree tem = valueize_expr (vn_get_expr_for (op0));
3231 if (UNARY_CLASS_P (tem)
3232 || BINARY_CLASS_P (tem)
3233 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
3234 || TREE_CODE (tem) == SSA_NAME
3235 || TREE_CODE (tem) == CONSTRUCTOR
3236 || is_gimple_min_invariant (tem))
3237 op0 = tem;
3240 /* Avoid folding if nothing changed, but remember the expression. */
3241 if (op0 == orig_op0)
3242 return NULL_TREE;
3244 if (code == BIT_FIELD_REF)
3246 tree rhs = gimple_assign_rhs1 (stmt);
3247 result = fold_ternary (BIT_FIELD_REF, TREE_TYPE (rhs),
3248 op0, TREE_OPERAND (rhs, 1), TREE_OPERAND (rhs, 2));
3250 else
3251 result = fold_unary_ignore_overflow (code, gimple_expr_type (stmt), op0);
3252 if (result)
3254 STRIP_USELESS_TYPE_CONVERSION (result);
3255 if (valid_gimple_rhs_p (result))
3256 return result;
3259 return NULL_TREE;
3262 /* Try to simplify RHS using equivalences and constant folding. */
3264 static tree
3265 try_to_simplify (gimple stmt)
3267 enum tree_code code = gimple_assign_rhs_code (stmt);
3268 tree tem;
3270 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3271 in this case, there is no point in doing extra work. */
3272 if (code == SSA_NAME)
3273 return NULL_TREE;
3275 /* First try constant folding based on our current lattice. */
3276 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize);
3277 if (tem
3278 && (TREE_CODE (tem) == SSA_NAME
3279 || is_gimple_min_invariant (tem)))
3280 return tem;
3282 /* If that didn't work try combining multiple statements. */
3283 switch (TREE_CODE_CLASS (code))
3285 case tcc_reference:
3286 /* Fallthrough for some unary codes that can operate on registers. */
3287 if (!(code == REALPART_EXPR
3288 || code == IMAGPART_EXPR
3289 || code == VIEW_CONVERT_EXPR
3290 || code == BIT_FIELD_REF))
3291 break;
3292 /* We could do a little more with unary ops, if they expand
3293 into binary ops, but it's debatable whether it is worth it. */
3294 case tcc_unary:
3295 return simplify_unary_expression (stmt);
3297 case tcc_comparison:
3298 case tcc_binary:
3299 return simplify_binary_expression (stmt);
3301 default:
3302 break;
3305 return NULL_TREE;
3308 /* Visit and value number USE, return true if the value number
3309 changed. */
3311 static bool
3312 visit_use (tree use)
3314 bool changed = false;
3315 gimple stmt = SSA_NAME_DEF_STMT (use);
3317 mark_use_processed (use);
3319 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3320 if (dump_file && (dump_flags & TDF_DETAILS)
3321 && !SSA_NAME_IS_DEFAULT_DEF (use))
3323 fprintf (dump_file, "Value numbering ");
3324 print_generic_expr (dump_file, use, 0);
3325 fprintf (dump_file, " stmt = ");
3326 print_gimple_stmt (dump_file, stmt, 0, 0);
3329 /* Handle uninitialized uses. */
3330 if (SSA_NAME_IS_DEFAULT_DEF (use))
3331 changed = set_ssa_val_to (use, use);
3332 else
3334 if (gimple_code (stmt) == GIMPLE_PHI)
3335 changed = visit_phi (stmt);
3336 else if (gimple_has_volatile_ops (stmt))
3337 changed = defs_to_varying (stmt);
3338 else if (is_gimple_assign (stmt))
3340 enum tree_code code = gimple_assign_rhs_code (stmt);
3341 tree lhs = gimple_assign_lhs (stmt);
3342 tree rhs1 = gimple_assign_rhs1 (stmt);
3343 tree simplified;
3345 /* Shortcut for copies. Simplifying copies is pointless,
3346 since we copy the expression and value they represent. */
3347 if (code == SSA_NAME
3348 && TREE_CODE (lhs) == SSA_NAME)
3350 changed = visit_copy (lhs, rhs1);
3351 goto done;
3353 simplified = try_to_simplify (stmt);
3354 if (simplified)
3356 if (dump_file && (dump_flags & TDF_DETAILS))
3358 fprintf (dump_file, "RHS ");
3359 print_gimple_expr (dump_file, stmt, 0, 0);
3360 fprintf (dump_file, " simplified to ");
3361 print_generic_expr (dump_file, simplified, 0);
3362 if (TREE_CODE (lhs) == SSA_NAME)
3363 fprintf (dump_file, " has constants %d\n",
3364 expr_has_constants (simplified));
3365 else
3366 fprintf (dump_file, "\n");
3369 /* Setting value numbers to constants will occasionally
3370 screw up phi congruence because constants are not
3371 uniquely associated with a single ssa name that can be
3372 looked up. */
3373 if (simplified
3374 && is_gimple_min_invariant (simplified)
3375 && TREE_CODE (lhs) == SSA_NAME)
3377 VN_INFO (lhs)->expr = simplified;
3378 VN_INFO (lhs)->has_constants = true;
3379 changed = set_ssa_val_to (lhs, simplified);
3380 goto done;
3382 else if (simplified
3383 && TREE_CODE (simplified) == SSA_NAME
3384 && TREE_CODE (lhs) == SSA_NAME)
3386 changed = visit_copy (lhs, simplified);
3387 goto done;
3389 else if (simplified)
3391 if (TREE_CODE (lhs) == SSA_NAME)
3393 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
3394 /* We have to unshare the expression or else
3395 valuizing may change the IL stream. */
3396 VN_INFO (lhs)->expr = unshare_expr (simplified);
3399 else if (stmt_has_constants (stmt)
3400 && TREE_CODE (lhs) == SSA_NAME)
3401 VN_INFO (lhs)->has_constants = true;
3402 else if (TREE_CODE (lhs) == SSA_NAME)
3404 /* We reset expr and constantness here because we may
3405 have been value numbering optimistically, and
3406 iterating. They may become non-constant in this case,
3407 even if they were optimistically constant. */
3409 VN_INFO (lhs)->has_constants = false;
3410 VN_INFO (lhs)->expr = NULL_TREE;
3413 if ((TREE_CODE (lhs) == SSA_NAME
3414 /* We can substitute SSA_NAMEs that are live over
3415 abnormal edges with their constant value. */
3416 && !(gimple_assign_copy_p (stmt)
3417 && is_gimple_min_invariant (rhs1))
3418 && !(simplified
3419 && is_gimple_min_invariant (simplified))
3420 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3421 /* Stores or copies from SSA_NAMEs that are live over
3422 abnormal edges are a problem. */
3423 || (code == SSA_NAME
3424 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3425 changed = defs_to_varying (stmt);
3426 else if (REFERENCE_CLASS_P (lhs)
3427 || DECL_P (lhs))
3428 changed = visit_reference_op_store (lhs, rhs1, stmt);
3429 else if (TREE_CODE (lhs) == SSA_NAME)
3431 if ((gimple_assign_copy_p (stmt)
3432 && is_gimple_min_invariant (rhs1))
3433 || (simplified
3434 && is_gimple_min_invariant (simplified)))
3436 VN_INFO (lhs)->has_constants = true;
3437 if (simplified)
3438 changed = set_ssa_val_to (lhs, simplified);
3439 else
3440 changed = set_ssa_val_to (lhs, rhs1);
3442 else
3444 /* First try to lookup the simplified expression. */
3445 if (simplified)
3447 enum gimple_rhs_class rhs_class;
3450 rhs_class = get_gimple_rhs_class (TREE_CODE (simplified));
3451 if ((rhs_class == GIMPLE_UNARY_RHS
3452 || rhs_class == GIMPLE_BINARY_RHS
3453 || rhs_class == GIMPLE_TERNARY_RHS)
3454 && valid_gimple_rhs_p (simplified))
3456 tree result = vn_nary_op_lookup (simplified, NULL);
3457 if (result)
3459 changed = set_ssa_val_to (lhs, result);
3460 goto done;
3465 /* Otherwise visit the original statement. */
3466 switch (vn_get_stmt_kind (stmt))
3468 case VN_NARY:
3469 changed = visit_nary_op (lhs, stmt);
3470 break;
3471 case VN_REFERENCE:
3472 changed = visit_reference_op_load (lhs, rhs1, stmt);
3473 break;
3474 default:
3475 changed = defs_to_varying (stmt);
3476 break;
3480 else
3481 changed = defs_to_varying (stmt);
3483 else if (is_gimple_call (stmt))
3485 tree lhs = gimple_call_lhs (stmt);
3487 /* ??? We could try to simplify calls. */
3489 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3491 if (stmt_has_constants (stmt))
3492 VN_INFO (lhs)->has_constants = true;
3493 else
3495 /* We reset expr and constantness here because we may
3496 have been value numbering optimistically, and
3497 iterating. They may become non-constant in this case,
3498 even if they were optimistically constant. */
3499 VN_INFO (lhs)->has_constants = false;
3500 VN_INFO (lhs)->expr = NULL_TREE;
3503 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3505 changed = defs_to_varying (stmt);
3506 goto done;
3510 if (!gimple_call_internal_p (stmt)
3511 && (/* Calls to the same function with the same vuse
3512 and the same operands do not necessarily return the same
3513 value, unless they're pure or const. */
3514 gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST)
3515 /* If calls have a vdef, subsequent calls won't have
3516 the same incoming vuse. So, if 2 calls with vdef have the
3517 same vuse, we know they're not subsequent.
3518 We can value number 2 calls to the same function with the
3519 same vuse and the same operands which are not subsequent
3520 the same, because there is no code in the program that can
3521 compare the 2 values... */
3522 || (gimple_vdef (stmt)
3523 /* ... unless the call returns a pointer which does
3524 not alias with anything else. In which case the
3525 information that the values are distinct are encoded
3526 in the IL. */
3527 && !(gimple_call_return_flags (stmt) & ERF_NOALIAS))))
3528 changed = visit_reference_op_call (lhs, stmt);
3529 else
3530 changed = defs_to_varying (stmt);
3532 else
3533 changed = defs_to_varying (stmt);
3535 done:
3536 return changed;
3539 /* Compare two operands by reverse postorder index */
3541 static int
3542 compare_ops (const void *pa, const void *pb)
3544 const tree opa = *((const tree *)pa);
3545 const tree opb = *((const tree *)pb);
3546 gimple opstmta = SSA_NAME_DEF_STMT (opa);
3547 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
3548 basic_block bba;
3549 basic_block bbb;
3551 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3552 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3553 else if (gimple_nop_p (opstmta))
3554 return -1;
3555 else if (gimple_nop_p (opstmtb))
3556 return 1;
3558 bba = gimple_bb (opstmta);
3559 bbb = gimple_bb (opstmtb);
3561 if (!bba && !bbb)
3562 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3563 else if (!bba)
3564 return -1;
3565 else if (!bbb)
3566 return 1;
3568 if (bba == bbb)
3570 if (gimple_code (opstmta) == GIMPLE_PHI
3571 && gimple_code (opstmtb) == GIMPLE_PHI)
3572 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3573 else if (gimple_code (opstmta) == GIMPLE_PHI)
3574 return -1;
3575 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3576 return 1;
3577 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3578 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3579 else
3580 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3582 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3585 /* Sort an array containing members of a strongly connected component
3586 SCC so that the members are ordered by RPO number.
3587 This means that when the sort is complete, iterating through the
3588 array will give you the members in RPO order. */
3590 static void
3591 sort_scc (vec<tree> scc)
3593 scc.qsort (compare_ops);
3596 /* Insert the no longer used nary ONARY to the hash INFO. */
3598 static void
3599 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3601 size_t size = sizeof_vn_nary_op (onary->length);
3602 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3603 &info->nary_obstack);
3604 memcpy (nary, onary, size);
3605 vn_nary_op_insert_into (nary, info->nary, false);
3608 /* Insert the no longer used phi OPHI to the hash INFO. */
3610 static void
3611 copy_phi (vn_phi_t ophi, vn_tables_t info)
3613 vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
3614 void **slot;
3615 memcpy (phi, ophi, sizeof (*phi));
3616 ophi->phiargs.create (0);
3617 slot = htab_find_slot_with_hash (info->phis, phi, phi->hashcode, INSERT);
3618 gcc_assert (!*slot);
3619 *slot = phi;
3622 /* Insert the no longer used reference OREF to the hash INFO. */
3624 static void
3625 copy_reference (vn_reference_t oref, vn_tables_t info)
3627 vn_reference_t ref;
3628 void **slot;
3629 ref = (vn_reference_t) pool_alloc (info->references_pool);
3630 memcpy (ref, oref, sizeof (*ref));
3631 oref->operands.create (0);
3632 slot = htab_find_slot_with_hash (info->references, ref, ref->hashcode,
3633 INSERT);
3634 if (*slot)
3635 free_reference (*slot);
3636 *slot = ref;
3639 /* Process a strongly connected component in the SSA graph. */
3641 static void
3642 process_scc (vec<tree> scc)
3644 tree var;
3645 unsigned int i;
3646 unsigned int iterations = 0;
3647 bool changed = true;
3648 htab_iterator hi;
3649 vn_nary_op_t nary;
3650 vn_phi_t phi;
3651 vn_reference_t ref;
3653 /* If the SCC has a single member, just visit it. */
3654 if (scc.length () == 1)
3656 tree use = scc[0];
3657 if (VN_INFO (use)->use_processed)
3658 return;
3659 /* We need to make sure it doesn't form a cycle itself, which can
3660 happen for self-referential PHI nodes. In that case we would
3661 end up inserting an expression with VN_TOP operands into the
3662 valid table which makes us derive bogus equivalences later.
3663 The cheapest way to check this is to assume it for all PHI nodes. */
3664 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3665 /* Fallthru to iteration. */ ;
3666 else
3668 visit_use (use);
3669 return;
3673 /* Iterate over the SCC with the optimistic table until it stops
3674 changing. */
3675 current_info = optimistic_info;
3676 while (changed)
3678 changed = false;
3679 iterations++;
3680 if (dump_file && (dump_flags & TDF_DETAILS))
3681 fprintf (dump_file, "Starting iteration %d\n", iterations);
3682 /* As we are value-numbering optimistically we have to
3683 clear the expression tables and the simplified expressions
3684 in each iteration until we converge. */
3685 htab_empty (optimistic_info->nary);
3686 htab_empty (optimistic_info->phis);
3687 htab_empty (optimistic_info->references);
3688 obstack_free (&optimistic_info->nary_obstack, NULL);
3689 gcc_obstack_init (&optimistic_info->nary_obstack);
3690 empty_alloc_pool (optimistic_info->phis_pool);
3691 empty_alloc_pool (optimistic_info->references_pool);
3692 FOR_EACH_VEC_ELT (scc, i, var)
3693 VN_INFO (var)->expr = NULL_TREE;
3694 FOR_EACH_VEC_ELT (scc, i, var)
3695 changed |= visit_use (var);
3698 statistics_histogram_event (cfun, "SCC iterations", iterations);
3700 /* Finally, copy the contents of the no longer used optimistic
3701 table to the valid table. */
3702 FOR_EACH_HTAB_ELEMENT (optimistic_info->nary, nary, vn_nary_op_t, hi)
3703 copy_nary (nary, valid_info);
3704 FOR_EACH_HTAB_ELEMENT (optimistic_info->phis, phi, vn_phi_t, hi)
3705 copy_phi (phi, valid_info);
3706 FOR_EACH_HTAB_ELEMENT (optimistic_info->references, ref, vn_reference_t, hi)
3707 copy_reference (ref, valid_info);
3709 current_info = valid_info;
3713 /* Pop the components of the found SCC for NAME off the SCC stack
3714 and process them. Returns true if all went well, false if
3715 we run into resource limits. */
3717 static bool
3718 extract_and_process_scc_for_name (tree name)
3720 vec<tree> scc = vNULL;
3721 tree x;
3723 /* Found an SCC, pop the components off the SCC stack and
3724 process them. */
3727 x = sccstack.pop ();
3729 VN_INFO (x)->on_sccstack = false;
3730 scc.safe_push (x);
3731 } while (x != name);
3733 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
3734 if (scc.length ()
3735 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
3737 if (dump_file)
3738 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
3739 "SCC size %u exceeding %u\n", scc.length (),
3740 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
3742 scc.release ();
3743 return false;
3746 if (scc.length () > 1)
3747 sort_scc (scc);
3749 if (dump_file && (dump_flags & TDF_DETAILS))
3750 print_scc (dump_file, scc);
3752 process_scc (scc);
3754 scc.release ();
3756 return true;
3759 /* Depth first search on NAME to discover and process SCC's in the SSA
3760 graph.
3761 Execution of this algorithm relies on the fact that the SCC's are
3762 popped off the stack in topological order.
3763 Returns true if successful, false if we stopped processing SCC's due
3764 to resource constraints. */
3766 static bool
3767 DFS (tree name)
3769 vec<ssa_op_iter> itervec = vNULL;
3770 vec<tree> namevec = vNULL;
3771 use_operand_p usep = NULL;
3772 gimple defstmt;
3773 tree use;
3774 ssa_op_iter iter;
3776 start_over:
3777 /* SCC info */
3778 VN_INFO (name)->dfsnum = next_dfs_num++;
3779 VN_INFO (name)->visited = true;
3780 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
3782 sccstack.safe_push (name);
3783 VN_INFO (name)->on_sccstack = true;
3784 defstmt = SSA_NAME_DEF_STMT (name);
3786 /* Recursively DFS on our operands, looking for SCC's. */
3787 if (!gimple_nop_p (defstmt))
3789 /* Push a new iterator. */
3790 if (gimple_code (defstmt) == GIMPLE_PHI)
3791 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
3792 else
3793 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
3795 else
3796 clear_and_done_ssa_iter (&iter);
3798 while (1)
3800 /* If we are done processing uses of a name, go up the stack
3801 of iterators and process SCCs as we found them. */
3802 if (op_iter_done (&iter))
3804 /* See if we found an SCC. */
3805 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
3806 if (!extract_and_process_scc_for_name (name))
3808 namevec.release ();
3809 itervec.release ();
3810 return false;
3813 /* Check if we are done. */
3814 if (namevec.is_empty ())
3816 namevec.release ();
3817 itervec.release ();
3818 return true;
3821 /* Restore the last use walker and continue walking there. */
3822 use = name;
3823 name = namevec.pop ();
3824 memcpy (&iter, &itervec.last (),
3825 sizeof (ssa_op_iter));
3826 itervec.pop ();
3827 goto continue_walking;
3830 use = USE_FROM_PTR (usep);
3832 /* Since we handle phi nodes, we will sometimes get
3833 invariants in the use expression. */
3834 if (TREE_CODE (use) == SSA_NAME)
3836 if (! (VN_INFO (use)->visited))
3838 /* Recurse by pushing the current use walking state on
3839 the stack and starting over. */
3840 itervec.safe_push (iter);
3841 namevec.safe_push (name);
3842 name = use;
3843 goto start_over;
3845 continue_walking:
3846 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3847 VN_INFO (use)->low);
3849 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3850 && VN_INFO (use)->on_sccstack)
3852 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3853 VN_INFO (name)->low);
3857 usep = op_iter_next_use (&iter);
3861 /* Allocate a value number table. */
3863 static void
3864 allocate_vn_table (vn_tables_t table)
3866 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
3867 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
3868 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
3869 free_reference);
3871 gcc_obstack_init (&table->nary_obstack);
3872 table->phis_pool = create_alloc_pool ("VN phis",
3873 sizeof (struct vn_phi_s),
3874 30);
3875 table->references_pool = create_alloc_pool ("VN references",
3876 sizeof (struct vn_reference_s),
3877 30);
3880 /* Free a value number table. */
3882 static void
3883 free_vn_table (vn_tables_t table)
3885 htab_delete (table->phis);
3886 htab_delete (table->nary);
3887 htab_delete (table->references);
3888 obstack_free (&table->nary_obstack, NULL);
3889 free_alloc_pool (table->phis_pool);
3890 free_alloc_pool (table->references_pool);
3893 static void
3894 init_scc_vn (void)
3896 size_t i;
3897 int j;
3898 int *rpo_numbers_temp;
3900 calculate_dominance_info (CDI_DOMINATORS);
3901 sccstack.create (0);
3902 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
3903 free);
3905 constant_value_ids = BITMAP_ALLOC (NULL);
3907 next_dfs_num = 1;
3908 next_value_id = 1;
3910 vn_ssa_aux_table.create (num_ssa_names + 1);
3911 /* VEC_alloc doesn't actually grow it to the right size, it just
3912 preallocates the space to do so. */
3913 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
3914 gcc_obstack_init (&vn_ssa_aux_obstack);
3916 shared_lookup_phiargs.create (0);
3917 shared_lookup_references.create (0);
3918 rpo_numbers = XNEWVEC (int, last_basic_block);
3919 rpo_numbers_temp = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
3920 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
3922 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3923 the i'th block in RPO order is bb. We want to map bb's to RPO
3924 numbers, so we need to rearrange this array. */
3925 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
3926 rpo_numbers[rpo_numbers_temp[j]] = j;
3928 XDELETE (rpo_numbers_temp);
3930 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
3932 /* Create the VN_INFO structures, and initialize value numbers to
3933 TOP. */
3934 for (i = 0; i < num_ssa_names; i++)
3936 tree name = ssa_name (i);
3937 if (name)
3939 VN_INFO_GET (name)->valnum = VN_TOP;
3940 VN_INFO (name)->expr = NULL_TREE;
3941 VN_INFO (name)->value_id = 0;
3945 renumber_gimple_stmt_uids ();
3947 /* Create the valid and optimistic value numbering tables. */
3948 valid_info = XCNEW (struct vn_tables_s);
3949 allocate_vn_table (valid_info);
3950 optimistic_info = XCNEW (struct vn_tables_s);
3951 allocate_vn_table (optimistic_info);
3954 void
3955 free_scc_vn (void)
3957 size_t i;
3959 htab_delete (constant_to_value_id);
3960 BITMAP_FREE (constant_value_ids);
3961 shared_lookup_phiargs.release ();
3962 shared_lookup_references.release ();
3963 XDELETEVEC (rpo_numbers);
3965 for (i = 0; i < num_ssa_names; i++)
3967 tree name = ssa_name (i);
3968 if (name
3969 && VN_INFO (name)->needs_insertion)
3970 release_ssa_name (name);
3972 obstack_free (&vn_ssa_aux_obstack, NULL);
3973 vn_ssa_aux_table.release ();
3975 sccstack.release ();
3976 free_vn_table (valid_info);
3977 XDELETE (valid_info);
3978 free_vn_table (optimistic_info);
3979 XDELETE (optimistic_info);
3982 /* Set *ID according to RESULT. */
3984 static void
3985 set_value_id_for_result (tree result, unsigned int *id)
3987 if (result && TREE_CODE (result) == SSA_NAME)
3988 *id = VN_INFO (result)->value_id;
3989 else if (result && is_gimple_min_invariant (result))
3990 *id = get_or_alloc_constant_value_id (result);
3991 else
3992 *id = get_next_value_id ();
3995 /* Set the value ids in the valid hash tables. */
3997 static void
3998 set_hashtable_value_ids (void)
4000 htab_iterator hi;
4001 vn_nary_op_t vno;
4002 vn_reference_t vr;
4003 vn_phi_t vp;
4005 /* Now set the value ids of the things we had put in the hash
4006 table. */
4008 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
4009 vno, vn_nary_op_t, hi)
4010 set_value_id_for_result (vno->result, &vno->value_id);
4012 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
4013 vp, vn_phi_t, hi)
4014 set_value_id_for_result (vp->result, &vp->value_id);
4016 FOR_EACH_HTAB_ELEMENT (valid_info->references,
4017 vr, vn_reference_t, hi)
4018 set_value_id_for_result (vr->result, &vr->value_id);
4021 /* Do SCCVN. Returns true if it finished, false if we bailed out
4022 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
4023 how we use the alias oracle walking during the VN process. */
4025 bool
4026 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
4028 size_t i;
4029 tree param;
4031 default_vn_walk_kind = default_vn_walk_kind_;
4033 init_scc_vn ();
4034 current_info = valid_info;
4036 for (param = DECL_ARGUMENTS (current_function_decl);
4037 param;
4038 param = DECL_CHAIN (param))
4040 tree def = ssa_default_def (cfun, param);
4041 if (def)
4042 VN_INFO (def)->valnum = def;
4045 for (i = 1; i < num_ssa_names; ++i)
4047 tree name = ssa_name (i);
4048 if (name
4049 && VN_INFO (name)->visited == false
4050 && !has_zero_uses (name))
4051 if (!DFS (name))
4053 free_scc_vn ();
4054 return false;
4058 /* Initialize the value ids. */
4060 for (i = 1; i < num_ssa_names; ++i)
4062 tree name = ssa_name (i);
4063 vn_ssa_aux_t info;
4064 if (!name)
4065 continue;
4066 info = VN_INFO (name);
4067 if (info->valnum == name
4068 || info->valnum == VN_TOP)
4069 info->value_id = get_next_value_id ();
4070 else if (is_gimple_min_invariant (info->valnum))
4071 info->value_id = get_or_alloc_constant_value_id (info->valnum);
4074 /* Propagate. */
4075 for (i = 1; i < num_ssa_names; ++i)
4077 tree name = ssa_name (i);
4078 vn_ssa_aux_t info;
4079 if (!name)
4080 continue;
4081 info = VN_INFO (name);
4082 if (TREE_CODE (info->valnum) == SSA_NAME
4083 && info->valnum != name
4084 && info->value_id != VN_INFO (info->valnum)->value_id)
4085 info->value_id = VN_INFO (info->valnum)->value_id;
4088 set_hashtable_value_ids ();
4090 if (dump_file && (dump_flags & TDF_DETAILS))
4092 fprintf (dump_file, "Value numbers:\n");
4093 for (i = 0; i < num_ssa_names; i++)
4095 tree name = ssa_name (i);
4096 if (name
4097 && VN_INFO (name)->visited
4098 && SSA_VAL (name) != name)
4100 print_generic_expr (dump_file, name, 0);
4101 fprintf (dump_file, " = ");
4102 print_generic_expr (dump_file, SSA_VAL (name), 0);
4103 fprintf (dump_file, "\n");
4108 return true;
4111 /* Return the maximum value id we have ever seen. */
4113 unsigned int
4114 get_max_value_id (void)
4116 return next_value_id;
4119 /* Return the next unique value id. */
4121 unsigned int
4122 get_next_value_id (void)
4124 return next_value_id++;
4128 /* Compare two expressions E1 and E2 and return true if they are equal. */
4130 bool
4131 expressions_equal_p (tree e1, tree e2)
4133 /* The obvious case. */
4134 if (e1 == e2)
4135 return true;
4137 /* If only one of them is null, they cannot be equal. */
4138 if (!e1 || !e2)
4139 return false;
4141 /* Now perform the actual comparison. */
4142 if (TREE_CODE (e1) == TREE_CODE (e2)
4143 && operand_equal_p (e1, e2, OEP_PURE_SAME))
4144 return true;
4146 return false;
4150 /* Return true if the nary operation NARY may trap. This is a copy
4151 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4153 bool
4154 vn_nary_may_trap (vn_nary_op_t nary)
4156 tree type;
4157 tree rhs2 = NULL_TREE;
4158 bool honor_nans = false;
4159 bool honor_snans = false;
4160 bool fp_operation = false;
4161 bool honor_trapv = false;
4162 bool handled, ret;
4163 unsigned i;
4165 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4166 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4167 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4169 type = nary->type;
4170 fp_operation = FLOAT_TYPE_P (type);
4171 if (fp_operation)
4173 honor_nans = flag_trapping_math && !flag_finite_math_only;
4174 honor_snans = flag_signaling_nans != 0;
4176 else if (INTEGRAL_TYPE_P (type)
4177 && TYPE_OVERFLOW_TRAPS (type))
4178 honor_trapv = true;
4180 if (nary->length >= 2)
4181 rhs2 = nary->op[1];
4182 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4183 honor_trapv,
4184 honor_nans, honor_snans, rhs2,
4185 &handled);
4186 if (handled
4187 && ret)
4188 return true;
4190 for (i = 0; i < nary->length; ++i)
4191 if (tree_could_trap_p (nary->op[i]))
4192 return true;
4194 return false;