Fix ChangeLog record for 171649:
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blobddb1ba6640ebeeb4fcb782daca411066e9bd616e
1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "basic-block.h"
28 #include "tree-pretty-print.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "gimple.h"
33 #include "tree-dump.h"
34 #include "timevar.h"
35 #include "fibheap.h"
36 #include "hashtab.h"
37 #include "tree-iterator.h"
38 #include "alloc-pool.h"
39 #include "tree-pass.h"
40 #include "flags.h"
41 #include "bitmap.h"
42 #include "langhooks.h"
43 #include "cfgloop.h"
44 #include "params.h"
45 #include "tree-ssa-propagate.h"
46 #include "tree-ssa-sccvn.h"
47 #include "gimple-fold.h"
49 /* This algorithm is based on the SCC algorithm presented by Keith
50 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51 (http://citeseer.ist.psu.edu/41805.html). In
52 straight line code, it is equivalent to a regular hash based value
53 numbering that is performed in reverse postorder.
55 For code with cycles, there are two alternatives, both of which
56 require keeping the hashtables separate from the actual list of
57 value numbers for SSA names.
59 1. Iterate value numbering in an RPO walk of the blocks, removing
60 all the entries from the hashtable after each iteration (but
61 keeping the SSA name->value number mapping between iterations).
62 Iterate until it does not change.
64 2. Perform value numbering as part of an SCC walk on the SSA graph,
65 iterating only the cycles in the SSA graph until they do not change
66 (using a separate, optimistic hashtable for value numbering the SCC
67 operands).
69 The second is not just faster in practice (because most SSA graph
70 cycles do not involve all the variables in the graph), it also has
71 some nice properties.
73 One of these nice properties is that when we pop an SCC off the
74 stack, we are guaranteed to have processed all the operands coming from
75 *outside of that SCC*, so we do not need to do anything special to
76 ensure they have value numbers.
78 Another nice property is that the SCC walk is done as part of a DFS
79 of the SSA graph, which makes it easy to perform combining and
80 simplifying operations at the same time.
82 The code below is deliberately written in a way that makes it easy
83 to separate the SCC walk from the other work it does.
85 In order to propagate constants through the code, we track which
86 expressions contain constants, and use those while folding. In
87 theory, we could also track expressions whose value numbers are
88 replaced, in case we end up folding based on expression
89 identities.
91 In order to value number memory, we assign value numbers to vuses.
92 This enables us to note that, for example, stores to the same
93 address of the same value from the same starting memory states are
94 equivalent.
95 TODO:
97 1. We can iterate only the changing portions of the SCC's, but
98 I have not seen an SCC big enough for this to be a win.
99 2. If you differentiate between phi nodes for loops and phi nodes
100 for if-then-else, you can properly consider phi nodes in different
101 blocks for equivalence.
102 3. We could value number vuses in more cases, particularly, whole
103 structure copies.
106 /* The set of hashtables and alloc_pool's for their items. */
108 typedef struct vn_tables_s
110 htab_t nary;
111 htab_t phis;
112 htab_t references;
113 struct obstack nary_obstack;
114 alloc_pool phis_pool;
115 alloc_pool references_pool;
116 } *vn_tables_t;
118 static htab_t constant_to_value_id;
119 static bitmap constant_value_ids;
122 /* Valid hashtables storing information we have proven to be
123 correct. */
125 static vn_tables_t valid_info;
127 /* Optimistic hashtables storing information we are making assumptions about
128 during iterations. */
130 static vn_tables_t optimistic_info;
132 /* Pointer to the set of hashtables that is currently being used.
133 Should always point to either the optimistic_info, or the
134 valid_info. */
136 static vn_tables_t current_info;
139 /* Reverse post order index for each basic block. */
141 static int *rpo_numbers;
143 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
145 /* This represents the top of the VN lattice, which is the universal
146 value. */
148 tree VN_TOP;
150 /* Unique counter for our value ids. */
152 static unsigned int next_value_id;
154 /* Next DFS number and the stack for strongly connected component
155 detection. */
157 static unsigned int next_dfs_num;
158 static VEC (tree, heap) *sccstack;
161 DEF_VEC_P(vn_ssa_aux_t);
162 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
164 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
165 are allocated on an obstack for locality reasons, and to free them
166 without looping over the VEC. */
168 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
169 static struct obstack vn_ssa_aux_obstack;
171 /* Return the value numbering information for a given SSA name. */
173 vn_ssa_aux_t
174 VN_INFO (tree name)
176 vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
177 SSA_NAME_VERSION (name));
178 gcc_checking_assert (res);
179 return res;
182 /* Set the value numbering info for a given SSA name to a given
183 value. */
185 static inline void
186 VN_INFO_SET (tree name, vn_ssa_aux_t value)
188 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
189 SSA_NAME_VERSION (name), value);
192 /* Initialize the value numbering info for a given SSA name.
193 This should be called just once for every SSA name. */
195 vn_ssa_aux_t
196 VN_INFO_GET (tree name)
198 vn_ssa_aux_t newinfo;
200 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
201 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
202 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
203 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
204 SSA_NAME_VERSION (name) + 1);
205 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
206 SSA_NAME_VERSION (name), newinfo);
207 return newinfo;
211 /* Get the representative expression for the SSA_NAME NAME. Returns
212 the representative SSA_NAME if there is no expression associated with it. */
214 tree
215 vn_get_expr_for (tree name)
217 vn_ssa_aux_t vn = VN_INFO (name);
218 gimple def_stmt;
219 tree expr = NULL_TREE;
220 enum tree_code code;
222 if (vn->valnum == VN_TOP)
223 return name;
225 /* If the value-number is a constant it is the representative
226 expression. */
227 if (TREE_CODE (vn->valnum) != SSA_NAME)
228 return vn->valnum;
230 /* Get to the information of the value of this SSA_NAME. */
231 vn = VN_INFO (vn->valnum);
233 /* If the value-number is a constant it is the representative
234 expression. */
235 if (TREE_CODE (vn->valnum) != SSA_NAME)
236 return vn->valnum;
238 /* Else if we have an expression, return it. */
239 if (vn->expr != NULL_TREE)
240 return vn->expr;
242 /* Otherwise use the defining statement to build the expression. */
243 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
245 /* If the value number is not an assignment use it directly. */
246 if (!is_gimple_assign (def_stmt))
247 return vn->valnum;
249 /* FIXME tuples. This is incomplete and likely will miss some
250 simplifications. */
251 code = gimple_assign_rhs_code (def_stmt);
252 switch (TREE_CODE_CLASS (code))
254 case tcc_reference:
255 if ((code == REALPART_EXPR
256 || code == IMAGPART_EXPR
257 || code == VIEW_CONVERT_EXPR)
258 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (def_stmt),
259 0)) == SSA_NAME)
260 expr = fold_build1 (code,
261 gimple_expr_type (def_stmt),
262 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
263 break;
265 case tcc_unary:
266 expr = fold_build1 (code,
267 gimple_expr_type (def_stmt),
268 gimple_assign_rhs1 (def_stmt));
269 break;
271 case tcc_binary:
272 expr = fold_build2 (code,
273 gimple_expr_type (def_stmt),
274 gimple_assign_rhs1 (def_stmt),
275 gimple_assign_rhs2 (def_stmt));
276 break;
278 case tcc_exceptional:
279 if (code == CONSTRUCTOR
280 && TREE_CODE
281 (TREE_TYPE (gimple_assign_rhs1 (def_stmt))) == VECTOR_TYPE)
282 expr = gimple_assign_rhs1 (def_stmt);
283 break;
285 default:;
287 if (expr == NULL_TREE)
288 return vn->valnum;
290 /* Cache the expression. */
291 vn->expr = expr;
293 return expr;
297 /* Free a phi operation structure VP. */
299 static void
300 free_phi (void *vp)
302 vn_phi_t phi = (vn_phi_t) vp;
303 VEC_free (tree, heap, phi->phiargs);
306 /* Free a reference operation structure VP. */
308 static void
309 free_reference (void *vp)
311 vn_reference_t vr = (vn_reference_t) vp;
312 VEC_free (vn_reference_op_s, heap, vr->operands);
315 /* Hash table equality function for vn_constant_t. */
317 static int
318 vn_constant_eq (const void *p1, const void *p2)
320 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
321 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
323 if (vc1->hashcode != vc2->hashcode)
324 return false;
326 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
329 /* Hash table hash function for vn_constant_t. */
331 static hashval_t
332 vn_constant_hash (const void *p1)
334 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
335 return vc1->hashcode;
338 /* Lookup a value id for CONSTANT and return it. If it does not
339 exist returns 0. */
341 unsigned int
342 get_constant_value_id (tree constant)
344 void **slot;
345 struct vn_constant_s vc;
347 vc.hashcode = vn_hash_constant_with_type (constant);
348 vc.constant = constant;
349 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
350 vc.hashcode, NO_INSERT);
351 if (slot)
352 return ((vn_constant_t)*slot)->value_id;
353 return 0;
356 /* Lookup a value id for CONSTANT, and if it does not exist, create a
357 new one and return it. If it does exist, return it. */
359 unsigned int
360 get_or_alloc_constant_value_id (tree constant)
362 void **slot;
363 struct vn_constant_s vc;
364 vn_constant_t vcp;
366 vc.hashcode = vn_hash_constant_with_type (constant);
367 vc.constant = constant;
368 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
369 vc.hashcode, INSERT);
370 if (*slot)
371 return ((vn_constant_t)*slot)->value_id;
373 vcp = XNEW (struct vn_constant_s);
374 vcp->hashcode = vc.hashcode;
375 vcp->constant = constant;
376 vcp->value_id = get_next_value_id ();
377 *slot = (void *) vcp;
378 bitmap_set_bit (constant_value_ids, vcp->value_id);
379 return vcp->value_id;
382 /* Return true if V is a value id for a constant. */
384 bool
385 value_id_constant_p (unsigned int v)
387 return bitmap_bit_p (constant_value_ids, v);
390 /* Compare two reference operands P1 and P2 for equality. Return true if
391 they are equal, and false otherwise. */
393 static int
394 vn_reference_op_eq (const void *p1, const void *p2)
396 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
397 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
399 return (vro1->opcode == vro2->opcode
400 /* We do not care for differences in type qualification. */
401 && (vro1->type == vro2->type
402 || (vro1->type && vro2->type
403 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
404 TYPE_MAIN_VARIANT (vro2->type))))
405 && expressions_equal_p (vro1->op0, vro2->op0)
406 && expressions_equal_p (vro1->op1, vro2->op1)
407 && expressions_equal_p (vro1->op2, vro2->op2));
410 /* Compute the hash for a reference operand VRO1. */
412 static hashval_t
413 vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result)
415 result = iterative_hash_hashval_t (vro1->opcode, result);
416 if (vro1->op0)
417 result = iterative_hash_expr (vro1->op0, result);
418 if (vro1->op1)
419 result = iterative_hash_expr (vro1->op1, result);
420 if (vro1->op2)
421 result = iterative_hash_expr (vro1->op2, result);
422 return result;
425 /* Return the hashcode for a given reference operation P1. */
427 static hashval_t
428 vn_reference_hash (const void *p1)
430 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
431 return vr1->hashcode;
434 /* Compute a hash for the reference operation VR1 and return it. */
436 hashval_t
437 vn_reference_compute_hash (const vn_reference_t vr1)
439 hashval_t result = 0;
440 int i;
441 vn_reference_op_t vro;
442 HOST_WIDE_INT off = -1;
443 bool deref = false;
445 FOR_EACH_VEC_ELT (vn_reference_op_s, vr1->operands, i, vro)
447 if (vro->opcode == MEM_REF)
448 deref = true;
449 else if (vro->opcode != ADDR_EXPR)
450 deref = false;
451 if (vro->off != -1)
453 if (off == -1)
454 off = 0;
455 off += vro->off;
457 else
459 if (off != -1
460 && off != 0)
461 result = iterative_hash_hashval_t (off, result);
462 off = -1;
463 if (deref
464 && vro->opcode == ADDR_EXPR)
466 if (vro->op0)
468 tree op = TREE_OPERAND (vro->op0, 0);
469 result = iterative_hash_hashval_t (TREE_CODE (op), result);
470 result = iterative_hash_expr (op, result);
473 else
474 result = vn_reference_op_compute_hash (vro, result);
477 if (vr1->vuse)
478 result += SSA_NAME_VERSION (vr1->vuse);
480 return result;
483 /* Return true if reference operations P1 and P2 are equivalent. This
484 means they have the same set of operands and vuses. */
487 vn_reference_eq (const void *p1, const void *p2)
489 unsigned i, j;
491 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
492 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
493 if (vr1->hashcode != vr2->hashcode)
494 return false;
496 /* Early out if this is not a hash collision. */
497 if (vr1->hashcode != vr2->hashcode)
498 return false;
500 /* The VOP needs to be the same. */
501 if (vr1->vuse != vr2->vuse)
502 return false;
504 /* If the operands are the same we are done. */
505 if (vr1->operands == vr2->operands)
506 return true;
508 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
509 return false;
511 if (INTEGRAL_TYPE_P (vr1->type)
512 && INTEGRAL_TYPE_P (vr2->type))
514 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
515 return false;
517 else if (INTEGRAL_TYPE_P (vr1->type)
518 && (TYPE_PRECISION (vr1->type)
519 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
520 return false;
521 else if (INTEGRAL_TYPE_P (vr2->type)
522 && (TYPE_PRECISION (vr2->type)
523 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
524 return false;
526 i = 0;
527 j = 0;
530 HOST_WIDE_INT off1 = 0, off2 = 0;
531 vn_reference_op_t vro1, vro2;
532 vn_reference_op_s tem1, tem2;
533 bool deref1 = false, deref2 = false;
534 for (; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro1); i++)
536 if (vro1->opcode == MEM_REF)
537 deref1 = true;
538 if (vro1->off == -1)
539 break;
540 off1 += vro1->off;
542 for (; VEC_iterate (vn_reference_op_s, vr2->operands, j, vro2); j++)
544 if (vro2->opcode == MEM_REF)
545 deref2 = true;
546 if (vro2->off == -1)
547 break;
548 off2 += vro2->off;
550 if (off1 != off2)
551 return false;
552 if (deref1 && vro1->opcode == ADDR_EXPR)
554 memset (&tem1, 0, sizeof (tem1));
555 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
556 tem1.type = TREE_TYPE (tem1.op0);
557 tem1.opcode = TREE_CODE (tem1.op0);
558 vro1 = &tem1;
559 deref1 = false;
561 if (deref2 && vro2->opcode == ADDR_EXPR)
563 memset (&tem2, 0, sizeof (tem2));
564 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
565 tem2.type = TREE_TYPE (tem2.op0);
566 tem2.opcode = TREE_CODE (tem2.op0);
567 vro2 = &tem2;
568 deref2 = false;
570 if (deref1 != deref2)
571 return false;
572 if (!vn_reference_op_eq (vro1, vro2))
573 return false;
574 ++j;
575 ++i;
577 while (VEC_length (vn_reference_op_s, vr1->operands) != i
578 || VEC_length (vn_reference_op_s, vr2->operands) != j);
580 return true;
583 /* Copy the operations present in load/store REF into RESULT, a vector of
584 vn_reference_op_s's. */
586 void
587 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
589 if (TREE_CODE (ref) == TARGET_MEM_REF)
591 vn_reference_op_s temp;
593 memset (&temp, 0, sizeof (temp));
594 temp.type = TREE_TYPE (ref);
595 temp.opcode = TREE_CODE (ref);
596 temp.op0 = TMR_INDEX (ref);
597 temp.op1 = TMR_STEP (ref);
598 temp.op2 = TMR_OFFSET (ref);
599 temp.off = -1;
600 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
602 memset (&temp, 0, sizeof (temp));
603 temp.type = NULL_TREE;
604 temp.opcode = ERROR_MARK;
605 temp.op0 = TMR_INDEX2 (ref);
606 temp.off = -1;
607 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
609 memset (&temp, 0, sizeof (temp));
610 temp.type = NULL_TREE;
611 temp.opcode = TREE_CODE (TMR_BASE (ref));
612 temp.op0 = TMR_BASE (ref);
613 temp.off = -1;
614 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
615 return;
618 /* For non-calls, store the information that makes up the address. */
620 while (ref)
622 vn_reference_op_s temp;
624 memset (&temp, 0, sizeof (temp));
625 temp.type = TREE_TYPE (ref);
626 temp.opcode = TREE_CODE (ref);
627 temp.off = -1;
629 switch (temp.opcode)
631 case WITH_SIZE_EXPR:
632 temp.op0 = TREE_OPERAND (ref, 1);
633 temp.off = 0;
634 break;
635 case MEM_REF:
636 /* The base address gets its own vn_reference_op_s structure. */
637 temp.op0 = TREE_OPERAND (ref, 1);
638 if (host_integerp (TREE_OPERAND (ref, 1), 0))
639 temp.off = TREE_INT_CST_LOW (TREE_OPERAND (ref, 1));
640 break;
641 case BIT_FIELD_REF:
642 /* Record bits and position. */
643 temp.op0 = TREE_OPERAND (ref, 1);
644 temp.op1 = TREE_OPERAND (ref, 2);
645 break;
646 case COMPONENT_REF:
647 /* The field decl is enough to unambiguously specify the field,
648 a matching type is not necessary and a mismatching type
649 is always a spurious difference. */
650 temp.type = NULL_TREE;
651 temp.op0 = TREE_OPERAND (ref, 1);
652 temp.op1 = TREE_OPERAND (ref, 2);
654 tree this_offset = component_ref_field_offset (ref);
655 if (this_offset
656 && TREE_CODE (this_offset) == INTEGER_CST)
658 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
659 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
661 double_int off
662 = double_int_add (tree_to_double_int (this_offset),
663 double_int_rshift
664 (tree_to_double_int (bit_offset),
665 BITS_PER_UNIT == 8
666 ? 3 : exact_log2 (BITS_PER_UNIT),
667 HOST_BITS_PER_DOUBLE_INT, true));
668 if (double_int_fits_in_shwi_p (off))
669 temp.off = off.low;
673 break;
674 case ARRAY_RANGE_REF:
675 case ARRAY_REF:
676 /* Record index as operand. */
677 temp.op0 = TREE_OPERAND (ref, 1);
678 /* Always record lower bounds and element size. */
679 temp.op1 = array_ref_low_bound (ref);
680 temp.op2 = array_ref_element_size (ref);
681 if (TREE_CODE (temp.op0) == INTEGER_CST
682 && TREE_CODE (temp.op1) == INTEGER_CST
683 && TREE_CODE (temp.op2) == INTEGER_CST)
685 double_int off = tree_to_double_int (temp.op0);
686 off = double_int_add (off,
687 double_int_neg
688 (tree_to_double_int (temp.op1)));
689 off = double_int_mul (off, tree_to_double_int (temp.op2));
690 if (double_int_fits_in_shwi_p (off))
691 temp.off = off.low;
693 break;
694 case VAR_DECL:
695 if (DECL_HARD_REGISTER (ref))
697 temp.op0 = ref;
698 break;
700 /* Fallthru. */
701 case PARM_DECL:
702 case CONST_DECL:
703 case RESULT_DECL:
704 /* Canonicalize decls to MEM[&decl] which is what we end up with
705 when valueizing MEM[ptr] with ptr = &decl. */
706 temp.opcode = MEM_REF;
707 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
708 temp.off = 0;
709 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
710 temp.opcode = ADDR_EXPR;
711 temp.op0 = build_fold_addr_expr (ref);
712 temp.type = TREE_TYPE (temp.op0);
713 temp.off = -1;
714 break;
715 case STRING_CST:
716 case INTEGER_CST:
717 case COMPLEX_CST:
718 case VECTOR_CST:
719 case REAL_CST:
720 case FIXED_CST:
721 case CONSTRUCTOR:
722 case SSA_NAME:
723 temp.op0 = ref;
724 break;
725 case ADDR_EXPR:
726 if (is_gimple_min_invariant (ref))
728 temp.op0 = ref;
729 break;
731 /* Fallthrough. */
732 /* These are only interesting for their operands, their
733 existence, and their type. They will never be the last
734 ref in the chain of references (IE they require an
735 operand), so we don't have to put anything
736 for op* as it will be handled by the iteration */
737 case REALPART_EXPR:
738 case VIEW_CONVERT_EXPR:
739 temp.off = 0;
740 break;
741 case IMAGPART_EXPR:
742 /* This is only interesting for its constant offset. */
743 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
744 break;
745 default:
746 gcc_unreachable ();
748 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
750 if (REFERENCE_CLASS_P (ref)
751 || TREE_CODE (ref) == WITH_SIZE_EXPR
752 || (TREE_CODE (ref) == ADDR_EXPR
753 && !is_gimple_min_invariant (ref)))
754 ref = TREE_OPERAND (ref, 0);
755 else
756 ref = NULL_TREE;
760 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
761 operands in *OPS, the reference alias set SET and the reference type TYPE.
762 Return true if something useful was produced. */
764 bool
765 ao_ref_init_from_vn_reference (ao_ref *ref,
766 alias_set_type set, tree type,
767 VEC (vn_reference_op_s, heap) *ops)
769 vn_reference_op_t op;
770 unsigned i;
771 tree base = NULL_TREE;
772 tree *op0_p = &base;
773 HOST_WIDE_INT offset = 0;
774 HOST_WIDE_INT max_size;
775 HOST_WIDE_INT size = -1;
776 tree size_tree = NULL_TREE;
777 alias_set_type base_alias_set = -1;
779 /* First get the final access size from just the outermost expression. */
780 op = VEC_index (vn_reference_op_s, ops, 0);
781 if (op->opcode == COMPONENT_REF)
782 size_tree = DECL_SIZE (op->op0);
783 else if (op->opcode == BIT_FIELD_REF)
784 size_tree = op->op0;
785 else
787 enum machine_mode mode = TYPE_MODE (type);
788 if (mode == BLKmode)
789 size_tree = TYPE_SIZE (type);
790 else
791 size = GET_MODE_BITSIZE (mode);
793 if (size_tree != NULL_TREE)
795 if (!host_integerp (size_tree, 1))
796 size = -1;
797 else
798 size = TREE_INT_CST_LOW (size_tree);
801 /* Initially, maxsize is the same as the accessed element size.
802 In the following it will only grow (or become -1). */
803 max_size = size;
805 /* Compute cumulative bit-offset for nested component-refs and array-refs,
806 and find the ultimate containing object. */
807 FOR_EACH_VEC_ELT (vn_reference_op_s, ops, i, op)
809 switch (op->opcode)
811 /* These may be in the reference ops, but we cannot do anything
812 sensible with them here. */
813 case ADDR_EXPR:
814 /* Apart from ADDR_EXPR arguments to MEM_REF. */
815 if (base != NULL_TREE
816 && TREE_CODE (base) == MEM_REF
817 && op->op0
818 && DECL_P (TREE_OPERAND (op->op0, 0)))
820 vn_reference_op_t pop = VEC_index (vn_reference_op_s, ops, i-1);
821 base = TREE_OPERAND (op->op0, 0);
822 if (pop->off == -1)
824 max_size = -1;
825 offset = 0;
827 else
828 offset += pop->off * BITS_PER_UNIT;
829 op0_p = NULL;
830 break;
832 /* Fallthru. */
833 case CALL_EXPR:
834 return false;
836 /* Record the base objects. */
837 case MEM_REF:
838 base_alias_set = get_deref_alias_set (op->op0);
839 *op0_p = build2 (MEM_REF, op->type,
840 NULL_TREE, op->op0);
841 op0_p = &TREE_OPERAND (*op0_p, 0);
842 break;
844 case VAR_DECL:
845 case PARM_DECL:
846 case RESULT_DECL:
847 case SSA_NAME:
848 *op0_p = op->op0;
849 op0_p = NULL;
850 break;
852 /* And now the usual component-reference style ops. */
853 case BIT_FIELD_REF:
854 offset += tree_low_cst (op->op1, 0);
855 break;
857 case COMPONENT_REF:
859 tree field = op->op0;
860 /* We do not have a complete COMPONENT_REF tree here so we
861 cannot use component_ref_field_offset. Do the interesting
862 parts manually. */
864 if (op->op1
865 || !host_integerp (DECL_FIELD_OFFSET (field), 1))
866 max_size = -1;
867 else
869 offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
870 * BITS_PER_UNIT);
871 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
873 break;
876 case ARRAY_RANGE_REF:
877 case ARRAY_REF:
878 /* We recorded the lower bound and the element size. */
879 if (!host_integerp (op->op0, 0)
880 || !host_integerp (op->op1, 0)
881 || !host_integerp (op->op2, 0))
882 max_size = -1;
883 else
885 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
886 hindex -= TREE_INT_CST_LOW (op->op1);
887 hindex *= TREE_INT_CST_LOW (op->op2);
888 hindex *= BITS_PER_UNIT;
889 offset += hindex;
891 break;
893 case REALPART_EXPR:
894 break;
896 case IMAGPART_EXPR:
897 offset += size;
898 break;
900 case VIEW_CONVERT_EXPR:
901 break;
903 case STRING_CST:
904 case INTEGER_CST:
905 case COMPLEX_CST:
906 case VECTOR_CST:
907 case REAL_CST:
908 case CONSTRUCTOR:
909 case CONST_DECL:
910 return false;
912 default:
913 return false;
917 if (base == NULL_TREE)
918 return false;
920 ref->ref = NULL_TREE;
921 ref->base = base;
922 ref->offset = offset;
923 ref->size = size;
924 ref->max_size = max_size;
925 ref->ref_alias_set = set;
926 if (base_alias_set != -1)
927 ref->base_alias_set = base_alias_set;
928 else
929 ref->base_alias_set = get_alias_set (base);
930 /* We discount volatiles from value-numbering elsewhere. */
931 ref->volatile_p = false;
933 return true;
936 /* Copy the operations present in load/store/call REF into RESULT, a vector of
937 vn_reference_op_s's. */
939 void
940 copy_reference_ops_from_call (gimple call,
941 VEC(vn_reference_op_s, heap) **result)
943 vn_reference_op_s temp;
944 unsigned i;
946 /* Copy the type, opcode, function being called and static chain. */
947 memset (&temp, 0, sizeof (temp));
948 temp.type = gimple_call_return_type (call);
949 temp.opcode = CALL_EXPR;
950 temp.op0 = gimple_call_fn (call);
951 temp.op1 = gimple_call_chain (call);
952 temp.off = -1;
953 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
955 /* Copy the call arguments. As they can be references as well,
956 just chain them together. */
957 for (i = 0; i < gimple_call_num_args (call); ++i)
959 tree callarg = gimple_call_arg (call, i);
960 copy_reference_ops_from_ref (callarg, result);
964 /* Create a vector of vn_reference_op_s structures from REF, a
965 REFERENCE_CLASS_P tree. The vector is not shared. */
967 static VEC(vn_reference_op_s, heap) *
968 create_reference_ops_from_ref (tree ref)
970 VEC (vn_reference_op_s, heap) *result = NULL;
972 copy_reference_ops_from_ref (ref, &result);
973 return result;
976 /* Create a vector of vn_reference_op_s structures from CALL, a
977 call statement. The vector is not shared. */
979 static VEC(vn_reference_op_s, heap) *
980 create_reference_ops_from_call (gimple call)
982 VEC (vn_reference_op_s, heap) *result = NULL;
984 copy_reference_ops_from_call (call, &result);
985 return result;
988 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
989 *I_P to point to the last element of the replacement. */
990 void
991 vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
992 unsigned int *i_p)
994 unsigned int i = *i_p;
995 vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
996 vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
997 tree addr_base;
998 HOST_WIDE_INT addr_offset;
1000 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1001 from .foo.bar to the preceeding MEM_REF offset and replace the
1002 address with &OBJ. */
1003 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1004 &addr_offset);
1005 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1006 if (addr_base != op->op0)
1008 double_int off = tree_to_double_int (mem_op->op0);
1009 off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1010 off = double_int_add (off, shwi_to_double_int (addr_offset));
1011 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1012 op->op0 = build_fold_addr_expr (addr_base);
1013 if (host_integerp (mem_op->op0, 0))
1014 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1015 else
1016 mem_op->off = -1;
1020 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1021 *I_P to point to the last element of the replacement. */
1022 static void
1023 vn_reference_maybe_forwprop_address (VEC (vn_reference_op_s, heap) **ops,
1024 unsigned int *i_p)
1026 unsigned int i = *i_p;
1027 vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
1028 vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
1029 gimple def_stmt;
1030 enum tree_code code;
1031 double_int off;
1033 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1034 if (!is_gimple_assign (def_stmt))
1035 return;
1037 code = gimple_assign_rhs_code (def_stmt);
1038 if (code != ADDR_EXPR
1039 && code != POINTER_PLUS_EXPR)
1040 return;
1042 off = tree_to_double_int (mem_op->op0);
1043 off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1045 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1046 from .foo.bar to the preceeding MEM_REF offset and replace the
1047 address with &OBJ. */
1048 if (code == ADDR_EXPR)
1050 tree addr, addr_base;
1051 HOST_WIDE_INT addr_offset;
1053 addr = gimple_assign_rhs1 (def_stmt);
1054 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1055 &addr_offset);
1056 if (!addr_base
1057 || TREE_CODE (addr_base) != MEM_REF)
1058 return;
1060 off = double_int_add (off, shwi_to_double_int (addr_offset));
1061 off = double_int_add (off, mem_ref_offset (addr_base));
1062 op->op0 = TREE_OPERAND (addr_base, 0);
1064 else
1066 tree ptr, ptroff;
1067 ptr = gimple_assign_rhs1 (def_stmt);
1068 ptroff = gimple_assign_rhs2 (def_stmt);
1069 if (TREE_CODE (ptr) != SSA_NAME
1070 || TREE_CODE (ptroff) != INTEGER_CST)
1071 return;
1073 off = double_int_add (off, tree_to_double_int (ptroff));
1074 op->op0 = ptr;
1077 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1078 if (host_integerp (mem_op->op0, 0))
1079 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1080 else
1081 mem_op->off = -1;
1082 if (TREE_CODE (op->op0) == SSA_NAME)
1083 op->op0 = SSA_VAL (op->op0);
1084 if (TREE_CODE (op->op0) != SSA_NAME)
1085 op->opcode = TREE_CODE (op->op0);
1087 /* And recurse. */
1088 if (TREE_CODE (op->op0) == SSA_NAME)
1089 vn_reference_maybe_forwprop_address (ops, i_p);
1090 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1091 vn_reference_fold_indirect (ops, i_p);
1094 /* Optimize the reference REF to a constant if possible or return
1095 NULL_TREE if not. */
1097 tree
1098 fully_constant_vn_reference_p (vn_reference_t ref)
1100 VEC (vn_reference_op_s, heap) *operands = ref->operands;
1101 vn_reference_op_t op;
1103 /* Try to simplify the translated expression if it is
1104 a call to a builtin function with at most two arguments. */
1105 op = VEC_index (vn_reference_op_s, operands, 0);
1106 if (op->opcode == CALL_EXPR
1107 && TREE_CODE (op->op0) == ADDR_EXPR
1108 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1109 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1110 && VEC_length (vn_reference_op_s, operands) >= 2
1111 && VEC_length (vn_reference_op_s, operands) <= 3)
1113 vn_reference_op_t arg0, arg1 = NULL;
1114 bool anyconst = false;
1115 arg0 = VEC_index (vn_reference_op_s, operands, 1);
1116 if (VEC_length (vn_reference_op_s, operands) > 2)
1117 arg1 = VEC_index (vn_reference_op_s, operands, 2);
1118 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1119 || (arg0->opcode == ADDR_EXPR
1120 && is_gimple_min_invariant (arg0->op0)))
1121 anyconst = true;
1122 if (arg1
1123 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1124 || (arg1->opcode == ADDR_EXPR
1125 && is_gimple_min_invariant (arg1->op0))))
1126 anyconst = true;
1127 if (anyconst)
1129 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1130 arg1 ? 2 : 1,
1131 arg0->op0,
1132 arg1 ? arg1->op0 : NULL);
1133 if (folded
1134 && TREE_CODE (folded) == NOP_EXPR)
1135 folded = TREE_OPERAND (folded, 0);
1136 if (folded
1137 && is_gimple_min_invariant (folded))
1138 return folded;
1142 /* Simplify reads from constant strings. */
1143 else if (op->opcode == ARRAY_REF
1144 && TREE_CODE (op->op0) == INTEGER_CST
1145 && integer_zerop (op->op1)
1146 && VEC_length (vn_reference_op_s, operands) == 2)
1148 vn_reference_op_t arg0;
1149 arg0 = VEC_index (vn_reference_op_s, operands, 1);
1150 if (arg0->opcode == STRING_CST
1151 && (TYPE_MODE (op->type)
1152 == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
1153 && GET_MODE_CLASS (TYPE_MODE (op->type)) == MODE_INT
1154 && GET_MODE_SIZE (TYPE_MODE (op->type)) == 1
1155 && compare_tree_int (op->op0, TREE_STRING_LENGTH (arg0->op0)) < 0)
1156 return build_int_cst_type (op->type,
1157 (TREE_STRING_POINTER (arg0->op0)
1158 [TREE_INT_CST_LOW (op->op0)]));
1161 return NULL_TREE;
1164 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1165 structures into their value numbers. This is done in-place, and
1166 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1167 whether any operands were valueized. */
1169 static VEC (vn_reference_op_s, heap) *
1170 valueize_refs_1 (VEC (vn_reference_op_s, heap) *orig, bool *valueized_anything)
1172 vn_reference_op_t vro;
1173 unsigned int i;
1175 *valueized_anything = false;
1177 FOR_EACH_VEC_ELT (vn_reference_op_s, orig, i, vro)
1179 if (vro->opcode == SSA_NAME
1180 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1182 tree tem = SSA_VAL (vro->op0);
1183 if (tem != vro->op0)
1185 *valueized_anything = true;
1186 vro->op0 = tem;
1188 /* If it transforms from an SSA_NAME to a constant, update
1189 the opcode. */
1190 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1191 vro->opcode = TREE_CODE (vro->op0);
1193 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1195 tree tem = SSA_VAL (vro->op1);
1196 if (tem != vro->op1)
1198 *valueized_anything = true;
1199 vro->op1 = tem;
1202 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1204 tree tem = SSA_VAL (vro->op2);
1205 if (tem != vro->op2)
1207 *valueized_anything = true;
1208 vro->op2 = tem;
1211 /* If it transforms from an SSA_NAME to an address, fold with
1212 a preceding indirect reference. */
1213 if (i > 0
1214 && vro->op0
1215 && TREE_CODE (vro->op0) == ADDR_EXPR
1216 && VEC_index (vn_reference_op_s,
1217 orig, i - 1)->opcode == MEM_REF)
1218 vn_reference_fold_indirect (&orig, &i);
1219 else if (i > 0
1220 && vro->opcode == SSA_NAME
1221 && VEC_index (vn_reference_op_s,
1222 orig, i - 1)->opcode == MEM_REF)
1223 vn_reference_maybe_forwprop_address (&orig, &i);
1224 /* If it transforms a non-constant ARRAY_REF into a constant
1225 one, adjust the constant offset. */
1226 else if (vro->opcode == ARRAY_REF
1227 && vro->off == -1
1228 && TREE_CODE (vro->op0) == INTEGER_CST
1229 && TREE_CODE (vro->op1) == INTEGER_CST
1230 && TREE_CODE (vro->op2) == INTEGER_CST)
1232 double_int off = tree_to_double_int (vro->op0);
1233 off = double_int_add (off,
1234 double_int_neg
1235 (tree_to_double_int (vro->op1)));
1236 off = double_int_mul (off, tree_to_double_int (vro->op2));
1237 if (double_int_fits_in_shwi_p (off))
1238 vro->off = off.low;
1242 return orig;
1245 static VEC (vn_reference_op_s, heap) *
1246 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
1248 bool tem;
1249 return valueize_refs_1 (orig, &tem);
1252 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
1254 /* Create a vector of vn_reference_op_s structures from REF, a
1255 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1256 this function. *VALUEIZED_ANYTHING will specify whether any
1257 operands were valueized. */
1259 static VEC(vn_reference_op_s, heap) *
1260 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1262 if (!ref)
1263 return NULL;
1264 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1265 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1266 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1267 valueized_anything);
1268 return shared_lookup_references;
1271 /* Create a vector of vn_reference_op_s structures from CALL, a
1272 call statement. The vector is shared among all callers of
1273 this function. */
1275 static VEC(vn_reference_op_s, heap) *
1276 valueize_shared_reference_ops_from_call (gimple call)
1278 if (!call)
1279 return NULL;
1280 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1281 copy_reference_ops_from_call (call, &shared_lookup_references);
1282 shared_lookup_references = valueize_refs (shared_lookup_references);
1283 return shared_lookup_references;
1286 /* Lookup a SCCVN reference operation VR in the current hash table.
1287 Returns the resulting value number if it exists in the hash table,
1288 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1289 vn_reference_t stored in the hashtable if something is found. */
1291 static tree
1292 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1294 void **slot;
1295 hashval_t hash;
1297 hash = vr->hashcode;
1298 slot = htab_find_slot_with_hash (current_info->references, vr,
1299 hash, NO_INSERT);
1300 if (!slot && current_info == optimistic_info)
1301 slot = htab_find_slot_with_hash (valid_info->references, vr,
1302 hash, NO_INSERT);
1303 if (slot)
1305 if (vnresult)
1306 *vnresult = (vn_reference_t)*slot;
1307 return ((vn_reference_t)*slot)->result;
1310 return NULL_TREE;
1313 static tree *last_vuse_ptr;
1314 static vn_lookup_kind vn_walk_kind;
1315 static vn_lookup_kind default_vn_walk_kind;
1317 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1318 with the current VUSE and performs the expression lookup. */
1320 static void *
1321 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
1323 vn_reference_t vr = (vn_reference_t)vr_;
1324 void **slot;
1325 hashval_t hash;
1327 if (last_vuse_ptr)
1328 *last_vuse_ptr = vuse;
1330 /* Fixup vuse and hash. */
1331 if (vr->vuse)
1332 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1333 vr->vuse = SSA_VAL (vuse);
1334 if (vr->vuse)
1335 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1337 hash = vr->hashcode;
1338 slot = htab_find_slot_with_hash (current_info->references, vr,
1339 hash, NO_INSERT);
1340 if (!slot && current_info == optimistic_info)
1341 slot = htab_find_slot_with_hash (valid_info->references, vr,
1342 hash, NO_INSERT);
1343 if (slot)
1344 return *slot;
1346 return NULL;
1349 /* Lookup an existing or insert a new vn_reference entry into the
1350 value table for the VUSE, SET, TYPE, OPERANDS reference which
1351 has the constant value CST. */
1353 static vn_reference_t
1354 vn_reference_lookup_or_insert_constant_for_pieces (tree vuse,
1355 alias_set_type set,
1356 tree type,
1357 VEC (vn_reference_op_s,
1358 heap) *operands,
1359 tree cst)
1361 struct vn_reference_s vr1;
1362 vn_reference_t result;
1363 vr1.vuse = vuse;
1364 vr1.operands = operands;
1365 vr1.type = type;
1366 vr1.set = set;
1367 vr1.hashcode = vn_reference_compute_hash (&vr1);
1368 if (vn_reference_lookup_1 (&vr1, &result))
1369 return result;
1370 return vn_reference_insert_pieces (vuse, set, type,
1371 VEC_copy (vn_reference_op_s, heap,
1372 operands), cst,
1373 get_or_alloc_constant_value_id (cst));
1376 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1377 from the statement defining VUSE and if not successful tries to
1378 translate *REFP and VR_ through an aggregate copy at the defintion
1379 of VUSE. */
1381 static void *
1382 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
1384 vn_reference_t vr = (vn_reference_t)vr_;
1385 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1386 tree base;
1387 HOST_WIDE_INT offset, maxsize;
1388 static VEC (vn_reference_op_s, heap) *lhs_ops = NULL;
1389 ao_ref lhs_ref;
1390 bool lhs_ref_ok = false;
1392 /* First try to disambiguate after value-replacing in the definitions LHS. */
1393 if (is_gimple_assign (def_stmt))
1395 VEC (vn_reference_op_s, heap) *tem;
1396 tree lhs = gimple_assign_lhs (def_stmt);
1397 bool valueized_anything = false;
1398 /* Avoid re-allocation overhead. */
1399 VEC_truncate (vn_reference_op_s, lhs_ops, 0);
1400 copy_reference_ops_from_ref (lhs, &lhs_ops);
1401 tem = lhs_ops;
1402 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1403 gcc_assert (lhs_ops == tem);
1404 if (valueized_anything)
1406 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1407 get_alias_set (lhs),
1408 TREE_TYPE (lhs), lhs_ops);
1409 if (lhs_ref_ok
1410 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1411 return NULL;
1413 else
1415 ao_ref_init (&lhs_ref, lhs);
1416 lhs_ref_ok = true;
1420 base = ao_ref_base (ref);
1421 offset = ref->offset;
1422 maxsize = ref->max_size;
1424 /* If we cannot constrain the size of the reference we cannot
1425 test if anything kills it. */
1426 if (maxsize == -1)
1427 return (void *)-1;
1429 /* We can't deduce anything useful from clobbers. */
1430 if (gimple_clobber_p (def_stmt))
1431 return (void *)-1;
1433 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1434 from that definition.
1435 1) Memset. */
1436 if (is_gimple_reg_type (vr->type)
1437 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1438 && integer_zerop (gimple_call_arg (def_stmt, 1))
1439 && host_integerp (gimple_call_arg (def_stmt, 2), 1)
1440 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1442 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1443 tree base2;
1444 HOST_WIDE_INT offset2, size2, maxsize2;
1445 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1446 size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
1447 if ((unsigned HOST_WIDE_INT)size2 / 8
1448 == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
1449 && maxsize2 != -1
1450 && operand_equal_p (base, base2, 0)
1451 && offset2 <= offset
1452 && offset2 + size2 >= offset + maxsize)
1454 tree val = build_zero_cst (vr->type);
1455 return vn_reference_lookup_or_insert_constant_for_pieces
1456 (vuse, vr->set, vr->type, vr->operands, val);
1460 /* 2) Assignment from an empty CONSTRUCTOR. */
1461 else if (is_gimple_reg_type (vr->type)
1462 && gimple_assign_single_p (def_stmt)
1463 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1464 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1466 tree base2;
1467 HOST_WIDE_INT offset2, size2, maxsize2;
1468 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1469 &offset2, &size2, &maxsize2);
1470 if (maxsize2 != -1
1471 && operand_equal_p (base, base2, 0)
1472 && offset2 <= offset
1473 && offset2 + size2 >= offset + maxsize)
1475 tree val = build_zero_cst (vr->type);
1476 return vn_reference_lookup_or_insert_constant_for_pieces
1477 (vuse, vr->set, vr->type, vr->operands, val);
1481 /* 3) Assignment from a constant. We can use folds native encode/interpret
1482 routines to extract the assigned bits. */
1483 else if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
1484 && ref->size == maxsize
1485 && maxsize % BITS_PER_UNIT == 0
1486 && offset % BITS_PER_UNIT == 0
1487 && is_gimple_reg_type (vr->type)
1488 && gimple_assign_single_p (def_stmt)
1489 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
1491 tree base2;
1492 HOST_WIDE_INT offset2, size2, maxsize2;
1493 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1494 &offset2, &size2, &maxsize2);
1495 if (maxsize2 != -1
1496 && maxsize2 == size2
1497 && size2 % BITS_PER_UNIT == 0
1498 && offset2 % BITS_PER_UNIT == 0
1499 && operand_equal_p (base, base2, 0)
1500 && offset2 <= offset
1501 && offset2 + size2 >= offset + maxsize)
1503 /* We support up to 512-bit values (for V8DFmode). */
1504 unsigned char buffer[64];
1505 int len;
1507 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1508 buffer, sizeof (buffer));
1509 if (len > 0)
1511 tree val = native_interpret_expr (vr->type,
1512 buffer
1513 + ((offset - offset2)
1514 / BITS_PER_UNIT),
1515 ref->size / BITS_PER_UNIT);
1516 if (val)
1517 return vn_reference_lookup_or_insert_constant_for_pieces
1518 (vuse, vr->set, vr->type, vr->operands, val);
1523 /* 4) Assignment from an SSA name which definition we may be able
1524 to access pieces from. */
1525 else if (ref->size == maxsize
1526 && is_gimple_reg_type (vr->type)
1527 && gimple_assign_single_p (def_stmt)
1528 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
1530 tree rhs1 = gimple_assign_rhs1 (def_stmt);
1531 gimple def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
1532 if (is_gimple_assign (def_stmt2)
1533 && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
1534 || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
1535 && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1))))
1537 tree base2;
1538 HOST_WIDE_INT offset2, size2, maxsize2, off;
1539 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1540 &offset2, &size2, &maxsize2);
1541 off = offset - offset2;
1542 if (maxsize2 != -1
1543 && maxsize2 == size2
1544 && operand_equal_p (base, base2, 0)
1545 && offset2 <= offset
1546 && offset2 + size2 >= offset + maxsize)
1548 tree val = NULL_TREE;
1549 HOST_WIDE_INT elsz
1550 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1))));
1551 if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR)
1553 if (off == 0)
1554 val = gimple_assign_rhs1 (def_stmt2);
1555 else if (off == elsz)
1556 val = gimple_assign_rhs2 (def_stmt2);
1558 else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR
1559 && off % elsz == 0)
1561 tree ctor = gimple_assign_rhs1 (def_stmt2);
1562 unsigned i = off / elsz;
1563 if (i < CONSTRUCTOR_NELTS (ctor))
1565 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
1566 if (compare_tree_int (elt->index, i) == 0)
1567 val = elt->value;
1570 if (val)
1571 return vn_reference_lookup_or_insert_constant_for_pieces
1572 (vuse, vr->set, vr->type, vr->operands, val);
1577 /* 5) For aggregate copies translate the reference through them if
1578 the copy kills ref. */
1579 else if (vn_walk_kind == VN_WALKREWRITE
1580 && gimple_assign_single_p (def_stmt)
1581 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1582 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1583 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1585 tree base2;
1586 HOST_WIDE_INT offset2, size2, maxsize2;
1587 int i, j;
1588 VEC (vn_reference_op_s, heap) *rhs = NULL;
1589 vn_reference_op_t vro;
1590 ao_ref r;
1592 if (!lhs_ref_ok)
1593 return (void *)-1;
1595 /* See if the assignment kills REF. */
1596 base2 = ao_ref_base (&lhs_ref);
1597 offset2 = lhs_ref.offset;
1598 size2 = lhs_ref.size;
1599 maxsize2 = lhs_ref.max_size;
1600 if (maxsize2 == -1
1601 || (base != base2 && !operand_equal_p (base, base2, 0))
1602 || offset2 > offset
1603 || offset2 + size2 < offset + maxsize)
1604 return (void *)-1;
1606 /* Find the common base of ref and the lhs. lhs_ops already
1607 contains valueized operands for the lhs. */
1608 i = VEC_length (vn_reference_op_s, vr->operands) - 1;
1609 j = VEC_length (vn_reference_op_s, lhs_ops) - 1;
1610 while (j >= 0 && i >= 0
1611 && vn_reference_op_eq (VEC_index (vn_reference_op_s,
1612 vr->operands, i),
1613 VEC_index (vn_reference_op_s, lhs_ops, j)))
1615 i--;
1616 j--;
1619 /* ??? The innermost op should always be a MEM_REF and we already
1620 checked that the assignment to the lhs kills vr. Thus for
1621 aggregate copies using char[] types the vn_reference_op_eq
1622 may fail when comparing types for compatibility. But we really
1623 don't care here - further lookups with the rewritten operands
1624 will simply fail if we messed up types too badly. */
1625 if (j == 0 && i >= 0
1626 && VEC_index (vn_reference_op_s, lhs_ops, 0)->opcode == MEM_REF
1627 && VEC_index (vn_reference_op_s, lhs_ops, 0)->off != -1
1628 && (VEC_index (vn_reference_op_s, lhs_ops, 0)->off
1629 == VEC_index (vn_reference_op_s, vr->operands, i)->off))
1630 i--, j--;
1632 /* i now points to the first additional op.
1633 ??? LHS may not be completely contained in VR, one or more
1634 VIEW_CONVERT_EXPRs could be in its way. We could at least
1635 try handling outermost VIEW_CONVERT_EXPRs. */
1636 if (j != -1)
1637 return (void *)-1;
1639 /* Now re-write REF to be based on the rhs of the assignment. */
1640 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1641 /* We need to pre-pend vr->operands[0..i] to rhs. */
1642 if (i + 1 + VEC_length (vn_reference_op_s, rhs)
1643 > VEC_length (vn_reference_op_s, vr->operands))
1645 VEC (vn_reference_op_s, heap) *old = vr->operands;
1646 VEC_safe_grow (vn_reference_op_s, heap, vr->operands,
1647 i + 1 + VEC_length (vn_reference_op_s, rhs));
1648 if (old == shared_lookup_references
1649 && vr->operands != old)
1650 shared_lookup_references = NULL;
1652 else
1653 VEC_truncate (vn_reference_op_s, vr->operands,
1654 i + 1 + VEC_length (vn_reference_op_s, rhs));
1655 FOR_EACH_VEC_ELT (vn_reference_op_s, rhs, j, vro)
1656 VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, vro);
1657 VEC_free (vn_reference_op_s, heap, rhs);
1658 vr->operands = valueize_refs (vr->operands);
1659 vr->hashcode = vn_reference_compute_hash (vr);
1661 /* Adjust *ref from the new operands. */
1662 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1663 return (void *)-1;
1664 /* This can happen with bitfields. */
1665 if (ref->size != r.size)
1666 return (void *)-1;
1667 *ref = r;
1669 /* Do not update last seen VUSE after translating. */
1670 last_vuse_ptr = NULL;
1672 /* Keep looking for the adjusted *REF / VR pair. */
1673 return NULL;
1676 /* 6) For memcpy copies translate the reference through them if
1677 the copy kills ref. */
1678 else if (vn_walk_kind == VN_WALKREWRITE
1679 && is_gimple_reg_type (vr->type)
1680 /* ??? Handle BCOPY as well. */
1681 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
1682 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
1683 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
1684 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
1685 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
1686 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
1687 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
1688 && host_integerp (gimple_call_arg (def_stmt, 2), 1))
1690 tree lhs, rhs;
1691 ao_ref r;
1692 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
1693 vn_reference_op_s op;
1694 HOST_WIDE_INT at;
1697 /* Only handle non-variable, addressable refs. */
1698 if (ref->size != maxsize
1699 || offset % BITS_PER_UNIT != 0
1700 || ref->size % BITS_PER_UNIT != 0)
1701 return (void *)-1;
1703 /* Extract a pointer base and an offset for the destination. */
1704 lhs = gimple_call_arg (def_stmt, 0);
1705 lhs_offset = 0;
1706 if (TREE_CODE (lhs) == SSA_NAME)
1707 lhs = SSA_VAL (lhs);
1708 if (TREE_CODE (lhs) == ADDR_EXPR)
1710 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
1711 &lhs_offset);
1712 if (!tem)
1713 return (void *)-1;
1714 if (TREE_CODE (tem) == MEM_REF
1715 && host_integerp (TREE_OPERAND (tem, 1), 1))
1717 lhs = TREE_OPERAND (tem, 0);
1718 lhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1720 else if (DECL_P (tem))
1721 lhs = build_fold_addr_expr (tem);
1722 else
1723 return (void *)-1;
1725 if (TREE_CODE (lhs) != SSA_NAME
1726 && TREE_CODE (lhs) != ADDR_EXPR)
1727 return (void *)-1;
1729 /* Extract a pointer base and an offset for the source. */
1730 rhs = gimple_call_arg (def_stmt, 1);
1731 rhs_offset = 0;
1732 if (TREE_CODE (rhs) == SSA_NAME)
1733 rhs = SSA_VAL (rhs);
1734 if (TREE_CODE (rhs) == ADDR_EXPR)
1736 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
1737 &rhs_offset);
1738 if (!tem)
1739 return (void *)-1;
1740 if (TREE_CODE (tem) == MEM_REF
1741 && host_integerp (TREE_OPERAND (tem, 1), 1))
1743 rhs = TREE_OPERAND (tem, 0);
1744 rhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1746 else if (DECL_P (tem))
1747 rhs = build_fold_addr_expr (tem);
1748 else
1749 return (void *)-1;
1751 if (TREE_CODE (rhs) != SSA_NAME
1752 && TREE_CODE (rhs) != ADDR_EXPR)
1753 return (void *)-1;
1755 copy_size = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2));
1757 /* The bases of the destination and the references have to agree. */
1758 if ((TREE_CODE (base) != MEM_REF
1759 && !DECL_P (base))
1760 || (TREE_CODE (base) == MEM_REF
1761 && (TREE_OPERAND (base, 0) != lhs
1762 || !host_integerp (TREE_OPERAND (base, 1), 1)))
1763 || (DECL_P (base)
1764 && (TREE_CODE (lhs) != ADDR_EXPR
1765 || TREE_OPERAND (lhs, 0) != base)))
1766 return (void *)-1;
1768 /* And the access has to be contained within the memcpy destination. */
1769 at = offset / BITS_PER_UNIT;
1770 if (TREE_CODE (base) == MEM_REF)
1771 at += TREE_INT_CST_LOW (TREE_OPERAND (base, 1));
1772 if (lhs_offset > at
1773 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
1774 return (void *)-1;
1776 /* Make room for 2 operands in the new reference. */
1777 if (VEC_length (vn_reference_op_s, vr->operands) < 2)
1779 VEC (vn_reference_op_s, heap) *old = vr->operands;
1780 VEC_safe_grow (vn_reference_op_s, heap, vr->operands, 2);
1781 if (old == shared_lookup_references
1782 && vr->operands != old)
1783 shared_lookup_references = NULL;
1785 else
1786 VEC_truncate (vn_reference_op_s, vr->operands, 2);
1788 /* The looked-through reference is a simple MEM_REF. */
1789 memset (&op, 0, sizeof (op));
1790 op.type = vr->type;
1791 op.opcode = MEM_REF;
1792 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
1793 op.off = at - lhs_offset + rhs_offset;
1794 VEC_replace (vn_reference_op_s, vr->operands, 0, &op);
1795 op.type = TREE_TYPE (rhs);
1796 op.opcode = TREE_CODE (rhs);
1797 op.op0 = rhs;
1798 op.off = -1;
1799 VEC_replace (vn_reference_op_s, vr->operands, 1, &op);
1800 vr->hashcode = vn_reference_compute_hash (vr);
1802 /* Adjust *ref from the new operands. */
1803 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1804 return (void *)-1;
1805 /* This can happen with bitfields. */
1806 if (ref->size != r.size)
1807 return (void *)-1;
1808 *ref = r;
1810 /* Do not update last seen VUSE after translating. */
1811 last_vuse_ptr = NULL;
1813 /* Keep looking for the adjusted *REF / VR pair. */
1814 return NULL;
1817 /* Bail out and stop walking. */
1818 return (void *)-1;
1821 /* Lookup a reference operation by it's parts, in the current hash table.
1822 Returns the resulting value number if it exists in the hash table,
1823 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1824 vn_reference_t stored in the hashtable if something is found. */
1826 tree
1827 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
1828 VEC (vn_reference_op_s, heap) *operands,
1829 vn_reference_t *vnresult, vn_lookup_kind kind)
1831 struct vn_reference_s vr1;
1832 vn_reference_t tmp;
1833 tree cst;
1835 if (!vnresult)
1836 vnresult = &tmp;
1837 *vnresult = NULL;
1839 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1840 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1841 VEC_safe_grow (vn_reference_op_s, heap, shared_lookup_references,
1842 VEC_length (vn_reference_op_s, operands));
1843 memcpy (VEC_address (vn_reference_op_s, shared_lookup_references),
1844 VEC_address (vn_reference_op_s, operands),
1845 sizeof (vn_reference_op_s)
1846 * VEC_length (vn_reference_op_s, operands));
1847 vr1.operands = operands = shared_lookup_references
1848 = valueize_refs (shared_lookup_references);
1849 vr1.type = type;
1850 vr1.set = set;
1851 vr1.hashcode = vn_reference_compute_hash (&vr1);
1852 if ((cst = fully_constant_vn_reference_p (&vr1)))
1853 return cst;
1855 vn_reference_lookup_1 (&vr1, vnresult);
1856 if (!*vnresult
1857 && kind != VN_NOWALK
1858 && vr1.vuse)
1860 ao_ref r;
1861 vn_walk_kind = kind;
1862 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
1863 *vnresult =
1864 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1865 vn_reference_lookup_2,
1866 vn_reference_lookup_3, &vr1);
1867 if (vr1.operands != operands)
1868 VEC_free (vn_reference_op_s, heap, vr1.operands);
1871 if (*vnresult)
1872 return (*vnresult)->result;
1874 return NULL_TREE;
1877 /* Lookup OP in the current hash table, and return the resulting value
1878 number if it exists in the hash table. Return NULL_TREE if it does
1879 not exist in the hash table or if the result field of the structure
1880 was NULL.. VNRESULT will be filled in with the vn_reference_t
1881 stored in the hashtable if one exists. */
1883 tree
1884 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
1885 vn_reference_t *vnresult)
1887 VEC (vn_reference_op_s, heap) *operands;
1888 struct vn_reference_s vr1;
1889 tree cst;
1890 bool valuezied_anything;
1892 if (vnresult)
1893 *vnresult = NULL;
1895 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1896 vr1.operands = operands
1897 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
1898 vr1.type = TREE_TYPE (op);
1899 vr1.set = get_alias_set (op);
1900 vr1.hashcode = vn_reference_compute_hash (&vr1);
1901 if ((cst = fully_constant_vn_reference_p (&vr1)))
1902 return cst;
1904 if (kind != VN_NOWALK
1905 && vr1.vuse)
1907 vn_reference_t wvnresult;
1908 ao_ref r;
1909 /* Make sure to use a valueized reference if we valueized anything.
1910 Otherwise preserve the full reference for advanced TBAA. */
1911 if (!valuezied_anything
1912 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
1913 vr1.operands))
1914 ao_ref_init (&r, op);
1915 vn_walk_kind = kind;
1916 wvnresult =
1917 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1918 vn_reference_lookup_2,
1919 vn_reference_lookup_3, &vr1);
1920 if (vr1.operands != operands)
1921 VEC_free (vn_reference_op_s, heap, vr1.operands);
1922 if (wvnresult)
1924 if (vnresult)
1925 *vnresult = wvnresult;
1926 return wvnresult->result;
1929 return NULL_TREE;
1932 return vn_reference_lookup_1 (&vr1, vnresult);
1936 /* Insert OP into the current hash table with a value number of
1937 RESULT, and return the resulting reference structure we created. */
1939 vn_reference_t
1940 vn_reference_insert (tree op, tree result, tree vuse)
1942 void **slot;
1943 vn_reference_t vr1;
1945 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1946 if (TREE_CODE (result) == SSA_NAME)
1947 vr1->value_id = VN_INFO (result)->value_id;
1948 else
1949 vr1->value_id = get_or_alloc_constant_value_id (result);
1950 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1951 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
1952 vr1->type = TREE_TYPE (op);
1953 vr1->set = get_alias_set (op);
1954 vr1->hashcode = vn_reference_compute_hash (vr1);
1955 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
1957 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1958 INSERT);
1960 /* Because we lookup stores using vuses, and value number failures
1961 using the vdefs (see visit_reference_op_store for how and why),
1962 it's possible that on failure we may try to insert an already
1963 inserted store. This is not wrong, there is no ssa name for a
1964 store that we could use as a differentiator anyway. Thus, unlike
1965 the other lookup functions, you cannot gcc_assert (!*slot)
1966 here. */
1968 /* But free the old slot in case of a collision. */
1969 if (*slot)
1970 free_reference (*slot);
1972 *slot = vr1;
1973 return vr1;
1976 /* Insert a reference by it's pieces into the current hash table with
1977 a value number of RESULT. Return the resulting reference
1978 structure we created. */
1980 vn_reference_t
1981 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
1982 VEC (vn_reference_op_s, heap) *operands,
1983 tree result, unsigned int value_id)
1986 void **slot;
1987 vn_reference_t vr1;
1989 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1990 vr1->value_id = value_id;
1991 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1992 vr1->operands = valueize_refs (operands);
1993 vr1->type = type;
1994 vr1->set = set;
1995 vr1->hashcode = vn_reference_compute_hash (vr1);
1996 if (result && TREE_CODE (result) == SSA_NAME)
1997 result = SSA_VAL (result);
1998 vr1->result = result;
2000 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
2001 INSERT);
2003 /* At this point we should have all the things inserted that we have
2004 seen before, and we should never try inserting something that
2005 already exists. */
2006 gcc_assert (!*slot);
2007 if (*slot)
2008 free_reference (*slot);
2010 *slot = vr1;
2011 return vr1;
2014 /* Compute and return the hash value for nary operation VBO1. */
2016 hashval_t
2017 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2019 hashval_t hash;
2020 unsigned i;
2022 for (i = 0; i < vno1->length; ++i)
2023 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2024 vno1->op[i] = SSA_VAL (vno1->op[i]);
2026 if (vno1->length == 2
2027 && commutative_tree_code (vno1->opcode)
2028 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2030 tree temp = vno1->op[0];
2031 vno1->op[0] = vno1->op[1];
2032 vno1->op[1] = temp;
2035 hash = iterative_hash_hashval_t (vno1->opcode, 0);
2036 for (i = 0; i < vno1->length; ++i)
2037 hash = iterative_hash_expr (vno1->op[i], hash);
2039 return hash;
2042 /* Return the computed hashcode for nary operation P1. */
2044 static hashval_t
2045 vn_nary_op_hash (const void *p1)
2047 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
2048 return vno1->hashcode;
2051 /* Compare nary operations P1 and P2 and return true if they are
2052 equivalent. */
2055 vn_nary_op_eq (const void *p1, const void *p2)
2057 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
2058 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
2059 unsigned i;
2061 if (vno1->hashcode != vno2->hashcode)
2062 return false;
2064 if (vno1->length != vno2->length)
2065 return false;
2067 if (vno1->opcode != vno2->opcode
2068 || !types_compatible_p (vno1->type, vno2->type))
2069 return false;
2071 for (i = 0; i < vno1->length; ++i)
2072 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2073 return false;
2075 return true;
2078 /* Initialize VNO from the pieces provided. */
2080 static void
2081 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2082 enum tree_code code, tree type, tree *ops)
2084 vno->opcode = code;
2085 vno->length = length;
2086 vno->type = type;
2087 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2090 /* Initialize VNO from OP. */
2092 static void
2093 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2095 unsigned i;
2097 vno->opcode = TREE_CODE (op);
2098 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2099 vno->type = TREE_TYPE (op);
2100 for (i = 0; i < vno->length; ++i)
2101 vno->op[i] = TREE_OPERAND (op, i);
2104 /* Return the number of operands for a vn_nary ops structure from STMT. */
2106 static unsigned int
2107 vn_nary_length_from_stmt (gimple stmt)
2109 switch (gimple_assign_rhs_code (stmt))
2111 case REALPART_EXPR:
2112 case IMAGPART_EXPR:
2113 case VIEW_CONVERT_EXPR:
2114 return 1;
2116 case CONSTRUCTOR:
2117 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2119 default:
2120 return gimple_num_ops (stmt) - 1;
2124 /* Initialize VNO from STMT. */
2126 static void
2127 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
2129 unsigned i;
2131 vno->opcode = gimple_assign_rhs_code (stmt);
2132 vno->type = gimple_expr_type (stmt);
2133 switch (vno->opcode)
2135 case REALPART_EXPR:
2136 case IMAGPART_EXPR:
2137 case VIEW_CONVERT_EXPR:
2138 vno->length = 1;
2139 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2140 break;
2142 case CONSTRUCTOR:
2143 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2144 for (i = 0; i < vno->length; ++i)
2145 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2146 break;
2148 default:
2149 vno->length = gimple_num_ops (stmt) - 1;
2150 for (i = 0; i < vno->length; ++i)
2151 vno->op[i] = gimple_op (stmt, i + 1);
2155 /* Compute the hashcode for VNO and look for it in the hash table;
2156 return the resulting value number if it exists in the hash table.
2157 Return NULL_TREE if it does not exist in the hash table or if the
2158 result field of the operation is NULL. VNRESULT will contain the
2159 vn_nary_op_t from the hashtable if it exists. */
2161 static tree
2162 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2164 void **slot;
2166 if (vnresult)
2167 *vnresult = NULL;
2169 vno->hashcode = vn_nary_op_compute_hash (vno);
2170 slot = htab_find_slot_with_hash (current_info->nary, vno, vno->hashcode,
2171 NO_INSERT);
2172 if (!slot && current_info == optimistic_info)
2173 slot = htab_find_slot_with_hash (valid_info->nary, vno, vno->hashcode,
2174 NO_INSERT);
2175 if (!slot)
2176 return NULL_TREE;
2177 if (vnresult)
2178 *vnresult = (vn_nary_op_t)*slot;
2179 return ((vn_nary_op_t)*slot)->result;
2182 /* Lookup a n-ary operation by its pieces and return the resulting value
2183 number if it exists in the hash table. Return NULL_TREE if it does
2184 not exist in the hash table or if the result field of the operation
2185 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2186 if it exists. */
2188 tree
2189 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2190 tree type, tree *ops, vn_nary_op_t *vnresult)
2192 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2193 sizeof_vn_nary_op (length));
2194 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2195 return vn_nary_op_lookup_1 (vno1, vnresult);
2198 /* Lookup OP in the current hash table, and return the resulting value
2199 number if it exists in the hash table. Return NULL_TREE if it does
2200 not exist in the hash table or if the result field of the operation
2201 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2202 if it exists. */
2204 tree
2205 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2207 vn_nary_op_t vno1
2208 = XALLOCAVAR (struct vn_nary_op_s,
2209 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2210 init_vn_nary_op_from_op (vno1, op);
2211 return vn_nary_op_lookup_1 (vno1, vnresult);
2214 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2215 value number if it exists in the hash table. Return NULL_TREE if
2216 it does not exist in the hash table. VNRESULT will contain the
2217 vn_nary_op_t from the hashtable if it exists. */
2219 tree
2220 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
2222 vn_nary_op_t vno1
2223 = XALLOCAVAR (struct vn_nary_op_s,
2224 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2225 init_vn_nary_op_from_stmt (vno1, stmt);
2226 return vn_nary_op_lookup_1 (vno1, vnresult);
2229 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2231 static vn_nary_op_t
2232 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2234 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2237 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2238 obstack. */
2240 static vn_nary_op_t
2241 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2243 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2244 &current_info->nary_obstack);
2246 vno1->value_id = value_id;
2247 vno1->length = length;
2248 vno1->result = result;
2250 return vno1;
2253 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2254 VNO->HASHCODE first. */
2256 static vn_nary_op_t
2257 vn_nary_op_insert_into (vn_nary_op_t vno, htab_t table, bool compute_hash)
2259 void **slot;
2261 if (compute_hash)
2262 vno->hashcode = vn_nary_op_compute_hash (vno);
2264 slot = htab_find_slot_with_hash (table, vno, vno->hashcode, INSERT);
2265 gcc_assert (!*slot);
2267 *slot = vno;
2268 return vno;
2271 /* Insert a n-ary operation into the current hash table using it's
2272 pieces. Return the vn_nary_op_t structure we created and put in
2273 the hashtable. */
2275 vn_nary_op_t
2276 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2277 tree type, tree *ops,
2278 tree result, unsigned int value_id)
2280 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2281 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2282 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2285 /* Insert OP into the current hash table with a value number of
2286 RESULT. Return the vn_nary_op_t structure we created and put in
2287 the hashtable. */
2289 vn_nary_op_t
2290 vn_nary_op_insert (tree op, tree result)
2292 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2293 vn_nary_op_t vno1;
2295 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2296 init_vn_nary_op_from_op (vno1, op);
2297 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2300 /* Insert the rhs of STMT into the current hash table with a value number of
2301 RESULT. */
2303 vn_nary_op_t
2304 vn_nary_op_insert_stmt (gimple stmt, tree result)
2306 vn_nary_op_t vno1
2307 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2308 result, VN_INFO (result)->value_id);
2309 init_vn_nary_op_from_stmt (vno1, stmt);
2310 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2313 /* Compute a hashcode for PHI operation VP1 and return it. */
2315 static inline hashval_t
2316 vn_phi_compute_hash (vn_phi_t vp1)
2318 hashval_t result;
2319 int i;
2320 tree phi1op;
2321 tree type;
2323 result = vp1->block->index;
2325 /* If all PHI arguments are constants we need to distinguish
2326 the PHI node via its type. */
2327 type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
2328 result += (INTEGRAL_TYPE_P (type)
2329 + (INTEGRAL_TYPE_P (type)
2330 ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
2332 FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
2334 if (phi1op == VN_TOP)
2335 continue;
2336 result = iterative_hash_expr (phi1op, result);
2339 return result;
2342 /* Return the computed hashcode for phi operation P1. */
2344 static hashval_t
2345 vn_phi_hash (const void *p1)
2347 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
2348 return vp1->hashcode;
2351 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2353 static int
2354 vn_phi_eq (const void *p1, const void *p2)
2356 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
2357 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
2359 if (vp1->hashcode != vp2->hashcode)
2360 return false;
2362 if (vp1->block == vp2->block)
2364 int i;
2365 tree phi1op;
2367 /* If the PHI nodes do not have compatible types
2368 they are not the same. */
2369 if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
2370 TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
2371 return false;
2373 /* Any phi in the same block will have it's arguments in the
2374 same edge order, because of how we store phi nodes. */
2375 FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
2377 tree phi2op = VEC_index (tree, vp2->phiargs, i);
2378 if (phi1op == VN_TOP || phi2op == VN_TOP)
2379 continue;
2380 if (!expressions_equal_p (phi1op, phi2op))
2381 return false;
2383 return true;
2385 return false;
2388 static VEC(tree, heap) *shared_lookup_phiargs;
2390 /* Lookup PHI in the current hash table, and return the resulting
2391 value number if it exists in the hash table. Return NULL_TREE if
2392 it does not exist in the hash table. */
2394 static tree
2395 vn_phi_lookup (gimple phi)
2397 void **slot;
2398 struct vn_phi_s vp1;
2399 unsigned i;
2401 VEC_truncate (tree, shared_lookup_phiargs, 0);
2403 /* Canonicalize the SSA_NAME's to their value number. */
2404 for (i = 0; i < gimple_phi_num_args (phi); i++)
2406 tree def = PHI_ARG_DEF (phi, i);
2407 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2408 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
2410 vp1.phiargs = shared_lookup_phiargs;
2411 vp1.block = gimple_bb (phi);
2412 vp1.hashcode = vn_phi_compute_hash (&vp1);
2413 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
2414 NO_INSERT);
2415 if (!slot && current_info == optimistic_info)
2416 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
2417 NO_INSERT);
2418 if (!slot)
2419 return NULL_TREE;
2420 return ((vn_phi_t)*slot)->result;
2423 /* Insert PHI into the current hash table with a value number of
2424 RESULT. */
2426 static vn_phi_t
2427 vn_phi_insert (gimple phi, tree result)
2429 void **slot;
2430 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
2431 unsigned i;
2432 VEC (tree, heap) *args = NULL;
2434 /* Canonicalize the SSA_NAME's to their value number. */
2435 for (i = 0; i < gimple_phi_num_args (phi); i++)
2437 tree def = PHI_ARG_DEF (phi, i);
2438 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2439 VEC_safe_push (tree, heap, args, def);
2441 vp1->value_id = VN_INFO (result)->value_id;
2442 vp1->phiargs = args;
2443 vp1->block = gimple_bb (phi);
2444 vp1->result = result;
2445 vp1->hashcode = vn_phi_compute_hash (vp1);
2447 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
2448 INSERT);
2450 /* Because we iterate over phi operations more than once, it's
2451 possible the slot might already exist here, hence no assert.*/
2452 *slot = vp1;
2453 return vp1;
2457 /* Print set of components in strongly connected component SCC to OUT. */
2459 static void
2460 print_scc (FILE *out, VEC (tree, heap) *scc)
2462 tree var;
2463 unsigned int i;
2465 fprintf (out, "SCC consists of:");
2466 FOR_EACH_VEC_ELT (tree, scc, i, var)
2468 fprintf (out, " ");
2469 print_generic_expr (out, var, 0);
2471 fprintf (out, "\n");
2474 /* Set the value number of FROM to TO, return true if it has changed
2475 as a result. */
2477 static inline bool
2478 set_ssa_val_to (tree from, tree to)
2480 tree currval = SSA_VAL (from);
2482 if (from != to)
2484 if (currval == from)
2486 if (dump_file && (dump_flags & TDF_DETAILS))
2488 fprintf (dump_file, "Not changing value number of ");
2489 print_generic_expr (dump_file, from, 0);
2490 fprintf (dump_file, " from VARYING to ");
2491 print_generic_expr (dump_file, to, 0);
2492 fprintf (dump_file, "\n");
2494 return false;
2496 else if (TREE_CODE (to) == SSA_NAME
2497 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2498 to = from;
2501 /* The only thing we allow as value numbers are VN_TOP, ssa_names
2502 and invariants. So assert that here. */
2503 gcc_assert (to != NULL_TREE
2504 && (to == VN_TOP
2505 || TREE_CODE (to) == SSA_NAME
2506 || is_gimple_min_invariant (to)));
2508 if (dump_file && (dump_flags & TDF_DETAILS))
2510 fprintf (dump_file, "Setting value number of ");
2511 print_generic_expr (dump_file, from, 0);
2512 fprintf (dump_file, " to ");
2513 print_generic_expr (dump_file, to, 0);
2516 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
2518 VN_INFO (from)->valnum = to;
2519 if (dump_file && (dump_flags & TDF_DETAILS))
2520 fprintf (dump_file, " (changed)\n");
2521 return true;
2523 if (dump_file && (dump_flags & TDF_DETAILS))
2524 fprintf (dump_file, "\n");
2525 return false;
2528 /* Set all definitions in STMT to value number to themselves.
2529 Return true if a value number changed. */
2531 static bool
2532 defs_to_varying (gimple stmt)
2534 bool changed = false;
2535 ssa_op_iter iter;
2536 def_operand_p defp;
2538 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2540 tree def = DEF_FROM_PTR (defp);
2542 VN_INFO (def)->use_processed = true;
2543 changed |= set_ssa_val_to (def, def);
2545 return changed;
2548 static bool expr_has_constants (tree expr);
2549 static tree valueize_expr (tree expr);
2551 /* Visit a copy between LHS and RHS, return true if the value number
2552 changed. */
2554 static bool
2555 visit_copy (tree lhs, tree rhs)
2557 /* Follow chains of copies to their destination. */
2558 while (TREE_CODE (rhs) == SSA_NAME
2559 && SSA_VAL (rhs) != rhs)
2560 rhs = SSA_VAL (rhs);
2562 /* The copy may have a more interesting constant filled expression
2563 (we don't, since we know our RHS is just an SSA name). */
2564 if (TREE_CODE (rhs) == SSA_NAME)
2566 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2567 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2570 return set_ssa_val_to (lhs, rhs);
2573 /* Visit a nary operator RHS, value number it, and return true if the
2574 value number of LHS has changed as a result. */
2576 static bool
2577 visit_nary_op (tree lhs, gimple stmt)
2579 bool changed = false;
2580 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2582 if (result)
2583 changed = set_ssa_val_to (lhs, result);
2584 else
2586 changed = set_ssa_val_to (lhs, lhs);
2587 vn_nary_op_insert_stmt (stmt, lhs);
2590 return changed;
2593 /* Visit a call STMT storing into LHS. Return true if the value number
2594 of the LHS has changed as a result. */
2596 static bool
2597 visit_reference_op_call (tree lhs, gimple stmt)
2599 bool changed = false;
2600 struct vn_reference_s vr1;
2601 tree result;
2602 tree vuse = gimple_vuse (stmt);
2604 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2605 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
2606 vr1.type = gimple_expr_type (stmt);
2607 vr1.set = 0;
2608 vr1.hashcode = vn_reference_compute_hash (&vr1);
2609 result = vn_reference_lookup_1 (&vr1, NULL);
2610 if (result)
2612 changed = set_ssa_val_to (lhs, result);
2613 if (TREE_CODE (result) == SSA_NAME
2614 && VN_INFO (result)->has_constants)
2615 VN_INFO (lhs)->has_constants = true;
2617 else
2619 void **slot;
2620 vn_reference_t vr2;
2621 changed = set_ssa_val_to (lhs, lhs);
2622 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
2623 vr2->vuse = vr1.vuse;
2624 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
2625 vr2->type = vr1.type;
2626 vr2->set = vr1.set;
2627 vr2->hashcode = vr1.hashcode;
2628 vr2->result = lhs;
2629 slot = htab_find_slot_with_hash (current_info->references,
2630 vr2, vr2->hashcode, INSERT);
2631 if (*slot)
2632 free_reference (*slot);
2633 *slot = vr2;
2636 return changed;
2639 /* Visit a load from a reference operator RHS, part of STMT, value number it,
2640 and return true if the value number of the LHS has changed as a result. */
2642 static bool
2643 visit_reference_op_load (tree lhs, tree op, gimple stmt)
2645 bool changed = false;
2646 tree last_vuse;
2647 tree result;
2649 last_vuse = gimple_vuse (stmt);
2650 last_vuse_ptr = &last_vuse;
2651 result = vn_reference_lookup (op, gimple_vuse (stmt),
2652 default_vn_walk_kind, NULL);
2653 last_vuse_ptr = NULL;
2655 /* If we have a VCE, try looking up its operand as it might be stored in
2656 a different type. */
2657 if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
2658 result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
2659 default_vn_walk_kind, NULL);
2661 /* We handle type-punning through unions by value-numbering based
2662 on offset and size of the access. Be prepared to handle a
2663 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
2664 if (result
2665 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
2667 /* We will be setting the value number of lhs to the value number
2668 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2669 So first simplify and lookup this expression to see if it
2670 is already available. */
2671 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
2672 if ((CONVERT_EXPR_P (val)
2673 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
2674 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
2676 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
2677 if ((CONVERT_EXPR_P (tem)
2678 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
2679 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
2680 TREE_TYPE (val), tem)))
2681 val = tem;
2683 result = val;
2684 if (!is_gimple_min_invariant (val)
2685 && TREE_CODE (val) != SSA_NAME)
2686 result = vn_nary_op_lookup (val, NULL);
2687 /* If the expression is not yet available, value-number lhs to
2688 a new SSA_NAME we create. */
2689 if (!result)
2691 result = make_ssa_name (SSA_NAME_VAR (lhs), gimple_build_nop ());
2692 /* Initialize value-number information properly. */
2693 VN_INFO_GET (result)->valnum = result;
2694 VN_INFO (result)->value_id = get_next_value_id ();
2695 VN_INFO (result)->expr = val;
2696 VN_INFO (result)->has_constants = expr_has_constants (val);
2697 VN_INFO (result)->needs_insertion = true;
2698 /* As all "inserted" statements are singleton SCCs, insert
2699 to the valid table. This is strictly needed to
2700 avoid re-generating new value SSA_NAMEs for the same
2701 expression during SCC iteration over and over (the
2702 optimistic table gets cleared after each iteration).
2703 We do not need to insert into the optimistic table, as
2704 lookups there will fall back to the valid table. */
2705 if (current_info == optimistic_info)
2707 current_info = valid_info;
2708 vn_nary_op_insert (val, result);
2709 current_info = optimistic_info;
2711 else
2712 vn_nary_op_insert (val, result);
2713 if (dump_file && (dump_flags & TDF_DETAILS))
2715 fprintf (dump_file, "Inserting name ");
2716 print_generic_expr (dump_file, result, 0);
2717 fprintf (dump_file, " for expression ");
2718 print_generic_expr (dump_file, val, 0);
2719 fprintf (dump_file, "\n");
2724 if (result)
2726 changed = set_ssa_val_to (lhs, result);
2727 if (TREE_CODE (result) == SSA_NAME
2728 && VN_INFO (result)->has_constants)
2730 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2731 VN_INFO (lhs)->has_constants = true;
2734 else
2736 changed = set_ssa_val_to (lhs, lhs);
2737 vn_reference_insert (op, lhs, last_vuse);
2740 return changed;
2744 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2745 and return true if the value number of the LHS has changed as a result. */
2747 static bool
2748 visit_reference_op_store (tree lhs, tree op, gimple stmt)
2750 bool changed = false;
2751 tree result;
2752 bool resultsame = false;
2754 /* First we want to lookup using the *vuses* from the store and see
2755 if there the last store to this location with the same address
2756 had the same value.
2758 The vuses represent the memory state before the store. If the
2759 memory state, address, and value of the store is the same as the
2760 last store to this location, then this store will produce the
2761 same memory state as that store.
2763 In this case the vdef versions for this store are value numbered to those
2764 vuse versions, since they represent the same memory state after
2765 this store.
2767 Otherwise, the vdefs for the store are used when inserting into
2768 the table, since the store generates a new memory state. */
2770 result = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_NOWALK, NULL);
2772 if (result)
2774 if (TREE_CODE (result) == SSA_NAME)
2775 result = SSA_VAL (result);
2776 if (TREE_CODE (op) == SSA_NAME)
2777 op = SSA_VAL (op);
2778 resultsame = expressions_equal_p (result, op);
2781 if (!result || !resultsame)
2783 tree vdef;
2785 if (dump_file && (dump_flags & TDF_DETAILS))
2787 fprintf (dump_file, "No store match\n");
2788 fprintf (dump_file, "Value numbering store ");
2789 print_generic_expr (dump_file, lhs, 0);
2790 fprintf (dump_file, " to ");
2791 print_generic_expr (dump_file, op, 0);
2792 fprintf (dump_file, "\n");
2794 /* Have to set value numbers before insert, since insert is
2795 going to valueize the references in-place. */
2796 if ((vdef = gimple_vdef (stmt)))
2798 VN_INFO (vdef)->use_processed = true;
2799 changed |= set_ssa_val_to (vdef, vdef);
2802 /* Do not insert structure copies into the tables. */
2803 if (is_gimple_min_invariant (op)
2804 || is_gimple_reg (op))
2805 vn_reference_insert (lhs, op, vdef);
2807 else
2809 /* We had a match, so value number the vdef to have the value
2810 number of the vuse it came from. */
2811 tree def, use;
2813 if (dump_file && (dump_flags & TDF_DETAILS))
2814 fprintf (dump_file, "Store matched earlier value,"
2815 "value numbering store vdefs to matching vuses.\n");
2817 def = gimple_vdef (stmt);
2818 use = gimple_vuse (stmt);
2820 VN_INFO (def)->use_processed = true;
2821 changed |= set_ssa_val_to (def, SSA_VAL (use));
2824 return changed;
2827 /* Visit and value number PHI, return true if the value number
2828 changed. */
2830 static bool
2831 visit_phi (gimple phi)
2833 bool changed = false;
2834 tree result;
2835 tree sameval = VN_TOP;
2836 bool allsame = true;
2837 unsigned i;
2839 /* TODO: We could check for this in init_sccvn, and replace this
2840 with a gcc_assert. */
2841 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
2842 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2844 /* See if all non-TOP arguments have the same value. TOP is
2845 equivalent to everything, so we can ignore it. */
2846 for (i = 0; i < gimple_phi_num_args (phi); i++)
2848 tree def = PHI_ARG_DEF (phi, i);
2850 if (TREE_CODE (def) == SSA_NAME)
2851 def = SSA_VAL (def);
2852 if (def == VN_TOP)
2853 continue;
2854 if (sameval == VN_TOP)
2856 sameval = def;
2858 else
2860 if (!expressions_equal_p (def, sameval))
2862 allsame = false;
2863 break;
2868 /* If all value numbered to the same value, the phi node has that
2869 value. */
2870 if (allsame)
2872 if (is_gimple_min_invariant (sameval))
2874 VN_INFO (PHI_RESULT (phi))->has_constants = true;
2875 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2877 else
2879 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2880 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2883 if (TREE_CODE (sameval) == SSA_NAME)
2884 return visit_copy (PHI_RESULT (phi), sameval);
2886 return set_ssa_val_to (PHI_RESULT (phi), sameval);
2889 /* Otherwise, see if it is equivalent to a phi node in this block. */
2890 result = vn_phi_lookup (phi);
2891 if (result)
2893 if (TREE_CODE (result) == SSA_NAME)
2894 changed = visit_copy (PHI_RESULT (phi), result);
2895 else
2896 changed = set_ssa_val_to (PHI_RESULT (phi), result);
2898 else
2900 vn_phi_insert (phi, PHI_RESULT (phi));
2901 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2902 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
2903 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2906 return changed;
2909 /* Return true if EXPR contains constants. */
2911 static bool
2912 expr_has_constants (tree expr)
2914 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2916 case tcc_unary:
2917 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
2919 case tcc_binary:
2920 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
2921 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
2922 /* Constants inside reference ops are rarely interesting, but
2923 it can take a lot of looking to find them. */
2924 case tcc_reference:
2925 case tcc_declaration:
2926 return false;
2927 default:
2928 return is_gimple_min_invariant (expr);
2930 return false;
2933 /* Return true if STMT contains constants. */
2935 static bool
2936 stmt_has_constants (gimple stmt)
2938 if (gimple_code (stmt) != GIMPLE_ASSIGN)
2939 return false;
2941 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2943 case GIMPLE_UNARY_RHS:
2944 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2946 case GIMPLE_BINARY_RHS:
2947 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2948 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
2949 case GIMPLE_TERNARY_RHS:
2950 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2951 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt))
2952 || is_gimple_min_invariant (gimple_assign_rhs3 (stmt)));
2953 case GIMPLE_SINGLE_RHS:
2954 /* Constants inside reference ops are rarely interesting, but
2955 it can take a lot of looking to find them. */
2956 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2957 default:
2958 gcc_unreachable ();
2960 return false;
2963 /* Replace SSA_NAMES in expr with their value numbers, and return the
2964 result.
2965 This is performed in place. */
2967 static tree
2968 valueize_expr (tree expr)
2970 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2972 case tcc_binary:
2973 TREE_OPERAND (expr, 1) = vn_valueize (TREE_OPERAND (expr, 1));
2974 /* Fallthru. */
2975 case tcc_unary:
2976 TREE_OPERAND (expr, 0) = vn_valueize (TREE_OPERAND (expr, 0));
2977 break;
2978 default:;
2980 return expr;
2983 /* Simplify the binary expression RHS, and return the result if
2984 simplified. */
2986 static tree
2987 simplify_binary_expression (gimple stmt)
2989 tree result = NULL_TREE;
2990 tree op0 = gimple_assign_rhs1 (stmt);
2991 tree op1 = gimple_assign_rhs2 (stmt);
2992 enum tree_code code = gimple_assign_rhs_code (stmt);
2994 /* This will not catch every single case we could combine, but will
2995 catch those with constants. The goal here is to simultaneously
2996 combine constants between expressions, but avoid infinite
2997 expansion of expressions during simplification. */
2998 if (TREE_CODE (op0) == SSA_NAME)
3000 if (VN_INFO (op0)->has_constants
3001 || TREE_CODE_CLASS (code) == tcc_comparison
3002 || code == COMPLEX_EXPR)
3003 op0 = valueize_expr (vn_get_expr_for (op0));
3004 else
3005 op0 = vn_valueize (op0);
3008 if (TREE_CODE (op1) == SSA_NAME)
3010 if (VN_INFO (op1)->has_constants
3011 || code == COMPLEX_EXPR)
3012 op1 = valueize_expr (vn_get_expr_for (op1));
3013 else
3014 op1 = vn_valueize (op1);
3017 /* Pointer plus constant can be represented as invariant address.
3018 Do so to allow further propatation, see also tree forwprop. */
3019 if (code == POINTER_PLUS_EXPR
3020 && host_integerp (op1, 1)
3021 && TREE_CODE (op0) == ADDR_EXPR
3022 && is_gimple_min_invariant (op0))
3023 return build_invariant_address (TREE_TYPE (op0),
3024 TREE_OPERAND (op0, 0),
3025 TREE_INT_CST_LOW (op1));
3027 /* Avoid folding if nothing changed. */
3028 if (op0 == gimple_assign_rhs1 (stmt)
3029 && op1 == gimple_assign_rhs2 (stmt))
3030 return NULL_TREE;
3032 fold_defer_overflow_warnings ();
3034 result = fold_binary (code, gimple_expr_type (stmt), op0, op1);
3035 if (result)
3036 STRIP_USELESS_TYPE_CONVERSION (result);
3038 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
3039 stmt, 0);
3041 /* Make sure result is not a complex expression consisting
3042 of operators of operators (IE (a + b) + (a + c))
3043 Otherwise, we will end up with unbounded expressions if
3044 fold does anything at all. */
3045 if (result && valid_gimple_rhs_p (result))
3046 return result;
3048 return NULL_TREE;
3051 /* Simplify the unary expression RHS, and return the result if
3052 simplified. */
3054 static tree
3055 simplify_unary_expression (gimple stmt)
3057 tree result = NULL_TREE;
3058 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
3059 enum tree_code code = gimple_assign_rhs_code (stmt);
3061 /* We handle some tcc_reference codes here that are all
3062 GIMPLE_ASSIGN_SINGLE codes. */
3063 if (code == REALPART_EXPR
3064 || code == IMAGPART_EXPR
3065 || code == VIEW_CONVERT_EXPR
3066 || code == BIT_FIELD_REF)
3067 op0 = TREE_OPERAND (op0, 0);
3069 if (TREE_CODE (op0) != SSA_NAME)
3070 return NULL_TREE;
3072 orig_op0 = op0;
3073 if (VN_INFO (op0)->has_constants)
3074 op0 = valueize_expr (vn_get_expr_for (op0));
3075 else if (CONVERT_EXPR_CODE_P (code)
3076 || code == REALPART_EXPR
3077 || code == IMAGPART_EXPR
3078 || code == VIEW_CONVERT_EXPR
3079 || code == BIT_FIELD_REF)
3081 /* We want to do tree-combining on conversion-like expressions.
3082 Make sure we feed only SSA_NAMEs or constants to fold though. */
3083 tree tem = valueize_expr (vn_get_expr_for (op0));
3084 if (UNARY_CLASS_P (tem)
3085 || BINARY_CLASS_P (tem)
3086 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
3087 || TREE_CODE (tem) == SSA_NAME
3088 || TREE_CODE (tem) == CONSTRUCTOR
3089 || is_gimple_min_invariant (tem))
3090 op0 = tem;
3093 /* Avoid folding if nothing changed, but remember the expression. */
3094 if (op0 == orig_op0)
3095 return NULL_TREE;
3097 if (code == BIT_FIELD_REF)
3099 tree rhs = gimple_assign_rhs1 (stmt);
3100 result = fold_ternary (BIT_FIELD_REF, TREE_TYPE (rhs),
3101 op0, TREE_OPERAND (rhs, 1), TREE_OPERAND (rhs, 2));
3103 else
3104 result = fold_unary_ignore_overflow (code, gimple_expr_type (stmt), op0);
3105 if (result)
3107 STRIP_USELESS_TYPE_CONVERSION (result);
3108 if (valid_gimple_rhs_p (result))
3109 return result;
3112 return NULL_TREE;
3115 /* Try to simplify RHS using equivalences and constant folding. */
3117 static tree
3118 try_to_simplify (gimple stmt)
3120 enum tree_code code = gimple_assign_rhs_code (stmt);
3121 tree tem;
3123 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3124 in this case, there is no point in doing extra work. */
3125 if (code == SSA_NAME)
3126 return NULL_TREE;
3128 /* First try constant folding based on our current lattice. */
3129 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize);
3130 if (tem
3131 && (TREE_CODE (tem) == SSA_NAME
3132 || is_gimple_min_invariant (tem)))
3133 return tem;
3135 /* If that didn't work try combining multiple statements. */
3136 switch (TREE_CODE_CLASS (code))
3138 case tcc_reference:
3139 /* Fallthrough for some unary codes that can operate on registers. */
3140 if (!(code == REALPART_EXPR
3141 || code == IMAGPART_EXPR
3142 || code == VIEW_CONVERT_EXPR
3143 || code == BIT_FIELD_REF))
3144 break;
3145 /* We could do a little more with unary ops, if they expand
3146 into binary ops, but it's debatable whether it is worth it. */
3147 case tcc_unary:
3148 return simplify_unary_expression (stmt);
3150 case tcc_comparison:
3151 case tcc_binary:
3152 return simplify_binary_expression (stmt);
3154 default:
3155 break;
3158 return NULL_TREE;
3161 /* Visit and value number USE, return true if the value number
3162 changed. */
3164 static bool
3165 visit_use (tree use)
3167 bool changed = false;
3168 gimple stmt = SSA_NAME_DEF_STMT (use);
3170 VN_INFO (use)->use_processed = true;
3172 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3173 if (dump_file && (dump_flags & TDF_DETAILS)
3174 && !SSA_NAME_IS_DEFAULT_DEF (use))
3176 fprintf (dump_file, "Value numbering ");
3177 print_generic_expr (dump_file, use, 0);
3178 fprintf (dump_file, " stmt = ");
3179 print_gimple_stmt (dump_file, stmt, 0, 0);
3182 /* Handle uninitialized uses. */
3183 if (SSA_NAME_IS_DEFAULT_DEF (use))
3184 changed = set_ssa_val_to (use, use);
3185 else
3187 if (gimple_code (stmt) == GIMPLE_PHI)
3188 changed = visit_phi (stmt);
3189 else if (!gimple_has_lhs (stmt)
3190 || gimple_has_volatile_ops (stmt))
3191 changed = defs_to_varying (stmt);
3192 else if (is_gimple_assign (stmt))
3194 enum tree_code code = gimple_assign_rhs_code (stmt);
3195 tree lhs = gimple_assign_lhs (stmt);
3196 tree rhs1 = gimple_assign_rhs1 (stmt);
3197 tree simplified;
3199 /* Shortcut for copies. Simplifying copies is pointless,
3200 since we copy the expression and value they represent. */
3201 if (code == SSA_NAME
3202 && TREE_CODE (lhs) == SSA_NAME)
3204 changed = visit_copy (lhs, rhs1);
3205 goto done;
3207 simplified = try_to_simplify (stmt);
3208 if (simplified)
3210 if (dump_file && (dump_flags & TDF_DETAILS))
3212 fprintf (dump_file, "RHS ");
3213 print_gimple_expr (dump_file, stmt, 0, 0);
3214 fprintf (dump_file, " simplified to ");
3215 print_generic_expr (dump_file, simplified, 0);
3216 if (TREE_CODE (lhs) == SSA_NAME)
3217 fprintf (dump_file, " has constants %d\n",
3218 expr_has_constants (simplified));
3219 else
3220 fprintf (dump_file, "\n");
3223 /* Setting value numbers to constants will occasionally
3224 screw up phi congruence because constants are not
3225 uniquely associated with a single ssa name that can be
3226 looked up. */
3227 if (simplified
3228 && is_gimple_min_invariant (simplified)
3229 && TREE_CODE (lhs) == SSA_NAME)
3231 VN_INFO (lhs)->expr = simplified;
3232 VN_INFO (lhs)->has_constants = true;
3233 changed = set_ssa_val_to (lhs, simplified);
3234 goto done;
3236 else if (simplified
3237 && TREE_CODE (simplified) == SSA_NAME
3238 && TREE_CODE (lhs) == SSA_NAME)
3240 changed = visit_copy (lhs, simplified);
3241 goto done;
3243 else if (simplified)
3245 if (TREE_CODE (lhs) == SSA_NAME)
3247 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
3248 /* We have to unshare the expression or else
3249 valuizing may change the IL stream. */
3250 VN_INFO (lhs)->expr = unshare_expr (simplified);
3253 else if (stmt_has_constants (stmt)
3254 && TREE_CODE (lhs) == SSA_NAME)
3255 VN_INFO (lhs)->has_constants = true;
3256 else if (TREE_CODE (lhs) == SSA_NAME)
3258 /* We reset expr and constantness here because we may
3259 have been value numbering optimistically, and
3260 iterating. They may become non-constant in this case,
3261 even if they were optimistically constant. */
3263 VN_INFO (lhs)->has_constants = false;
3264 VN_INFO (lhs)->expr = NULL_TREE;
3267 if ((TREE_CODE (lhs) == SSA_NAME
3268 /* We can substitute SSA_NAMEs that are live over
3269 abnormal edges with their constant value. */
3270 && !(gimple_assign_copy_p (stmt)
3271 && is_gimple_min_invariant (rhs1))
3272 && !(simplified
3273 && is_gimple_min_invariant (simplified))
3274 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3275 /* Stores or copies from SSA_NAMEs that are live over
3276 abnormal edges are a problem. */
3277 || (code == SSA_NAME
3278 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3279 changed = defs_to_varying (stmt);
3280 else if (REFERENCE_CLASS_P (lhs)
3281 || DECL_P (lhs))
3282 changed = visit_reference_op_store (lhs, rhs1, stmt);
3283 else if (TREE_CODE (lhs) == SSA_NAME)
3285 if ((gimple_assign_copy_p (stmt)
3286 && is_gimple_min_invariant (rhs1))
3287 || (simplified
3288 && is_gimple_min_invariant (simplified)))
3290 VN_INFO (lhs)->has_constants = true;
3291 if (simplified)
3292 changed = set_ssa_val_to (lhs, simplified);
3293 else
3294 changed = set_ssa_val_to (lhs, rhs1);
3296 else
3298 switch (get_gimple_rhs_class (code))
3300 case GIMPLE_UNARY_RHS:
3301 case GIMPLE_BINARY_RHS:
3302 case GIMPLE_TERNARY_RHS:
3303 changed = visit_nary_op (lhs, stmt);
3304 break;
3305 case GIMPLE_SINGLE_RHS:
3306 switch (TREE_CODE_CLASS (code))
3308 case tcc_reference:
3309 /* VOP-less references can go through unary case. */
3310 if ((code == REALPART_EXPR
3311 || code == IMAGPART_EXPR
3312 || code == VIEW_CONVERT_EXPR
3313 || code == BIT_FIELD_REF)
3314 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
3316 changed = visit_nary_op (lhs, stmt);
3317 break;
3319 /* Fallthrough. */
3320 case tcc_declaration:
3321 changed = visit_reference_op_load (lhs, rhs1, stmt);
3322 break;
3323 default:
3324 if (code == ADDR_EXPR)
3326 changed = visit_nary_op (lhs, stmt);
3327 break;
3329 else if (code == CONSTRUCTOR)
3331 changed = visit_nary_op (lhs, stmt);
3332 break;
3334 changed = defs_to_varying (stmt);
3336 break;
3337 default:
3338 changed = defs_to_varying (stmt);
3339 break;
3343 else
3344 changed = defs_to_varying (stmt);
3346 else if (is_gimple_call (stmt))
3348 tree lhs = gimple_call_lhs (stmt);
3350 /* ??? We could try to simplify calls. */
3352 if (stmt_has_constants (stmt)
3353 && TREE_CODE (lhs) == SSA_NAME)
3354 VN_INFO (lhs)->has_constants = true;
3355 else if (TREE_CODE (lhs) == SSA_NAME)
3357 /* We reset expr and constantness here because we may
3358 have been value numbering optimistically, and
3359 iterating. They may become non-constant in this case,
3360 even if they were optimistically constant. */
3361 VN_INFO (lhs)->has_constants = false;
3362 VN_INFO (lhs)->expr = NULL_TREE;
3365 if (TREE_CODE (lhs) == SSA_NAME
3366 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3367 changed = defs_to_varying (stmt);
3368 /* ??? We should handle stores from calls. */
3369 else if (TREE_CODE (lhs) == SSA_NAME)
3371 if (!gimple_call_internal_p (stmt)
3372 && gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
3373 changed = visit_reference_op_call (lhs, stmt);
3374 else
3375 changed = defs_to_varying (stmt);
3377 else
3378 changed = defs_to_varying (stmt);
3381 done:
3382 return changed;
3385 /* Compare two operands by reverse postorder index */
3387 static int
3388 compare_ops (const void *pa, const void *pb)
3390 const tree opa = *((const tree *)pa);
3391 const tree opb = *((const tree *)pb);
3392 gimple opstmta = SSA_NAME_DEF_STMT (opa);
3393 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
3394 basic_block bba;
3395 basic_block bbb;
3397 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3398 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3399 else if (gimple_nop_p (opstmta))
3400 return -1;
3401 else if (gimple_nop_p (opstmtb))
3402 return 1;
3404 bba = gimple_bb (opstmta);
3405 bbb = gimple_bb (opstmtb);
3407 if (!bba && !bbb)
3408 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3409 else if (!bba)
3410 return -1;
3411 else if (!bbb)
3412 return 1;
3414 if (bba == bbb)
3416 if (gimple_code (opstmta) == GIMPLE_PHI
3417 && gimple_code (opstmtb) == GIMPLE_PHI)
3418 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3419 else if (gimple_code (opstmta) == GIMPLE_PHI)
3420 return -1;
3421 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3422 return 1;
3423 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3424 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3425 else
3426 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3428 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3431 /* Sort an array containing members of a strongly connected component
3432 SCC so that the members are ordered by RPO number.
3433 This means that when the sort is complete, iterating through the
3434 array will give you the members in RPO order. */
3436 static void
3437 sort_scc (VEC (tree, heap) *scc)
3439 VEC_qsort (tree, scc, compare_ops);
3442 /* Insert the no longer used nary ONARY to the hash INFO. */
3444 static void
3445 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3447 size_t size = sizeof_vn_nary_op (onary->length);
3448 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3449 &info->nary_obstack);
3450 memcpy (nary, onary, size);
3451 vn_nary_op_insert_into (nary, info->nary, false);
3454 /* Insert the no longer used phi OPHI to the hash INFO. */
3456 static void
3457 copy_phi (vn_phi_t ophi, vn_tables_t info)
3459 vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
3460 void **slot;
3461 memcpy (phi, ophi, sizeof (*phi));
3462 ophi->phiargs = NULL;
3463 slot = htab_find_slot_with_hash (info->phis, phi, phi->hashcode, INSERT);
3464 gcc_assert (!*slot);
3465 *slot = phi;
3468 /* Insert the no longer used reference OREF to the hash INFO. */
3470 static void
3471 copy_reference (vn_reference_t oref, vn_tables_t info)
3473 vn_reference_t ref;
3474 void **slot;
3475 ref = (vn_reference_t) pool_alloc (info->references_pool);
3476 memcpy (ref, oref, sizeof (*ref));
3477 oref->operands = NULL;
3478 slot = htab_find_slot_with_hash (info->references, ref, ref->hashcode,
3479 INSERT);
3480 if (*slot)
3481 free_reference (*slot);
3482 *slot = ref;
3485 /* Process a strongly connected component in the SSA graph. */
3487 static void
3488 process_scc (VEC (tree, heap) *scc)
3490 tree var;
3491 unsigned int i;
3492 unsigned int iterations = 0;
3493 bool changed = true;
3494 htab_iterator hi;
3495 vn_nary_op_t nary;
3496 vn_phi_t phi;
3497 vn_reference_t ref;
3499 /* If the SCC has a single member, just visit it. */
3500 if (VEC_length (tree, scc) == 1)
3502 tree use = VEC_index (tree, scc, 0);
3503 if (VN_INFO (use)->use_processed)
3504 return;
3505 /* We need to make sure it doesn't form a cycle itself, which can
3506 happen for self-referential PHI nodes. In that case we would
3507 end up inserting an expression with VN_TOP operands into the
3508 valid table which makes us derive bogus equivalences later.
3509 The cheapest way to check this is to assume it for all PHI nodes. */
3510 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3511 /* Fallthru to iteration. */ ;
3512 else
3514 visit_use (use);
3515 return;
3519 /* Iterate over the SCC with the optimistic table until it stops
3520 changing. */
3521 current_info = optimistic_info;
3522 while (changed)
3524 changed = false;
3525 iterations++;
3526 if (dump_file && (dump_flags & TDF_DETAILS))
3527 fprintf (dump_file, "Starting iteration %d\n", iterations);
3528 /* As we are value-numbering optimistically we have to
3529 clear the expression tables and the simplified expressions
3530 in each iteration until we converge. */
3531 htab_empty (optimistic_info->nary);
3532 htab_empty (optimistic_info->phis);
3533 htab_empty (optimistic_info->references);
3534 obstack_free (&optimistic_info->nary_obstack, NULL);
3535 gcc_obstack_init (&optimistic_info->nary_obstack);
3536 empty_alloc_pool (optimistic_info->phis_pool);
3537 empty_alloc_pool (optimistic_info->references_pool);
3538 FOR_EACH_VEC_ELT (tree, scc, i, var)
3539 VN_INFO (var)->expr = NULL_TREE;
3540 FOR_EACH_VEC_ELT (tree, scc, i, var)
3541 changed |= visit_use (var);
3544 statistics_histogram_event (cfun, "SCC iterations", iterations);
3546 /* Finally, copy the contents of the no longer used optimistic
3547 table to the valid table. */
3548 FOR_EACH_HTAB_ELEMENT (optimistic_info->nary, nary, vn_nary_op_t, hi)
3549 copy_nary (nary, valid_info);
3550 FOR_EACH_HTAB_ELEMENT (optimistic_info->phis, phi, vn_phi_t, hi)
3551 copy_phi (phi, valid_info);
3552 FOR_EACH_HTAB_ELEMENT (optimistic_info->references, ref, vn_reference_t, hi)
3553 copy_reference (ref, valid_info);
3555 current_info = valid_info;
3558 DEF_VEC_O(ssa_op_iter);
3559 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
3561 /* Pop the components of the found SCC for NAME off the SCC stack
3562 and process them. Returns true if all went well, false if
3563 we run into resource limits. */
3565 static bool
3566 extract_and_process_scc_for_name (tree name)
3568 VEC (tree, heap) *scc = NULL;
3569 tree x;
3571 /* Found an SCC, pop the components off the SCC stack and
3572 process them. */
3575 x = VEC_pop (tree, sccstack);
3577 VN_INFO (x)->on_sccstack = false;
3578 VEC_safe_push (tree, heap, scc, x);
3579 } while (x != name);
3581 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
3582 if (VEC_length (tree, scc)
3583 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
3585 if (dump_file)
3586 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
3587 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
3588 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
3589 return false;
3592 if (VEC_length (tree, scc) > 1)
3593 sort_scc (scc);
3595 if (dump_file && (dump_flags & TDF_DETAILS))
3596 print_scc (dump_file, scc);
3598 process_scc (scc);
3600 VEC_free (tree, heap, scc);
3602 return true;
3605 /* Depth first search on NAME to discover and process SCC's in the SSA
3606 graph.
3607 Execution of this algorithm relies on the fact that the SCC's are
3608 popped off the stack in topological order.
3609 Returns true if successful, false if we stopped processing SCC's due
3610 to resource constraints. */
3612 static bool
3613 DFS (tree name)
3615 VEC(ssa_op_iter, heap) *itervec = NULL;
3616 VEC(tree, heap) *namevec = NULL;
3617 use_operand_p usep = NULL;
3618 gimple defstmt;
3619 tree use;
3620 ssa_op_iter iter;
3622 start_over:
3623 /* SCC info */
3624 VN_INFO (name)->dfsnum = next_dfs_num++;
3625 VN_INFO (name)->visited = true;
3626 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
3628 VEC_safe_push (tree, heap, sccstack, name);
3629 VN_INFO (name)->on_sccstack = true;
3630 defstmt = SSA_NAME_DEF_STMT (name);
3632 /* Recursively DFS on our operands, looking for SCC's. */
3633 if (!gimple_nop_p (defstmt))
3635 /* Push a new iterator. */
3636 if (gimple_code (defstmt) == GIMPLE_PHI)
3637 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
3638 else
3639 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
3641 else
3642 clear_and_done_ssa_iter (&iter);
3644 while (1)
3646 /* If we are done processing uses of a name, go up the stack
3647 of iterators and process SCCs as we found them. */
3648 if (op_iter_done (&iter))
3650 /* See if we found an SCC. */
3651 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
3652 if (!extract_and_process_scc_for_name (name))
3654 VEC_free (tree, heap, namevec);
3655 VEC_free (ssa_op_iter, heap, itervec);
3656 return false;
3659 /* Check if we are done. */
3660 if (VEC_empty (tree, namevec))
3662 VEC_free (tree, heap, namevec);
3663 VEC_free (ssa_op_iter, heap, itervec);
3664 return true;
3667 /* Restore the last use walker and continue walking there. */
3668 use = name;
3669 name = VEC_pop (tree, namevec);
3670 memcpy (&iter, VEC_last (ssa_op_iter, itervec),
3671 sizeof (ssa_op_iter));
3672 VEC_pop (ssa_op_iter, itervec);
3673 goto continue_walking;
3676 use = USE_FROM_PTR (usep);
3678 /* Since we handle phi nodes, we will sometimes get
3679 invariants in the use expression. */
3680 if (TREE_CODE (use) == SSA_NAME)
3682 if (! (VN_INFO (use)->visited))
3684 /* Recurse by pushing the current use walking state on
3685 the stack and starting over. */
3686 VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
3687 VEC_safe_push(tree, heap, namevec, name);
3688 name = use;
3689 goto start_over;
3691 continue_walking:
3692 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3693 VN_INFO (use)->low);
3695 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3696 && VN_INFO (use)->on_sccstack)
3698 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3699 VN_INFO (name)->low);
3703 usep = op_iter_next_use (&iter);
3707 /* Allocate a value number table. */
3709 static void
3710 allocate_vn_table (vn_tables_t table)
3712 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
3713 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
3714 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
3715 free_reference);
3717 gcc_obstack_init (&table->nary_obstack);
3718 table->phis_pool = create_alloc_pool ("VN phis",
3719 sizeof (struct vn_phi_s),
3720 30);
3721 table->references_pool = create_alloc_pool ("VN references",
3722 sizeof (struct vn_reference_s),
3723 30);
3726 /* Free a value number table. */
3728 static void
3729 free_vn_table (vn_tables_t table)
3731 htab_delete (table->phis);
3732 htab_delete (table->nary);
3733 htab_delete (table->references);
3734 obstack_free (&table->nary_obstack, NULL);
3735 free_alloc_pool (table->phis_pool);
3736 free_alloc_pool (table->references_pool);
3739 static void
3740 init_scc_vn (void)
3742 size_t i;
3743 int j;
3744 int *rpo_numbers_temp;
3746 calculate_dominance_info (CDI_DOMINATORS);
3747 sccstack = NULL;
3748 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
3749 free);
3751 constant_value_ids = BITMAP_ALLOC (NULL);
3753 next_dfs_num = 1;
3754 next_value_id = 1;
3756 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
3757 /* VEC_alloc doesn't actually grow it to the right size, it just
3758 preallocates the space to do so. */
3759 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
3760 gcc_obstack_init (&vn_ssa_aux_obstack);
3762 shared_lookup_phiargs = NULL;
3763 shared_lookup_references = NULL;
3764 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3765 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3766 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
3768 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3769 the i'th block in RPO order is bb. We want to map bb's to RPO
3770 numbers, so we need to rearrange this array. */
3771 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
3772 rpo_numbers[rpo_numbers_temp[j]] = j;
3774 XDELETE (rpo_numbers_temp);
3776 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
3778 /* Create the VN_INFO structures, and initialize value numbers to
3779 TOP. */
3780 for (i = 0; i < num_ssa_names; i++)
3782 tree name = ssa_name (i);
3783 if (name)
3785 VN_INFO_GET (name)->valnum = VN_TOP;
3786 VN_INFO (name)->expr = NULL_TREE;
3787 VN_INFO (name)->value_id = 0;
3791 renumber_gimple_stmt_uids ();
3793 /* Create the valid and optimistic value numbering tables. */
3794 valid_info = XCNEW (struct vn_tables_s);
3795 allocate_vn_table (valid_info);
3796 optimistic_info = XCNEW (struct vn_tables_s);
3797 allocate_vn_table (optimistic_info);
3800 void
3801 free_scc_vn (void)
3803 size_t i;
3805 htab_delete (constant_to_value_id);
3806 BITMAP_FREE (constant_value_ids);
3807 VEC_free (tree, heap, shared_lookup_phiargs);
3808 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
3809 XDELETEVEC (rpo_numbers);
3811 for (i = 0; i < num_ssa_names; i++)
3813 tree name = ssa_name (i);
3814 if (name
3815 && VN_INFO (name)->needs_insertion)
3816 release_ssa_name (name);
3818 obstack_free (&vn_ssa_aux_obstack, NULL);
3819 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
3821 VEC_free (tree, heap, sccstack);
3822 free_vn_table (valid_info);
3823 XDELETE (valid_info);
3824 free_vn_table (optimistic_info);
3825 XDELETE (optimistic_info);
3828 /* Set *ID if we computed something useful in RESULT. */
3830 static void
3831 set_value_id_for_result (tree result, unsigned int *id)
3833 if (result)
3835 if (TREE_CODE (result) == SSA_NAME)
3836 *id = VN_INFO (result)->value_id;
3837 else if (is_gimple_min_invariant (result))
3838 *id = get_or_alloc_constant_value_id (result);
3842 /* Set the value ids in the valid hash tables. */
3844 static void
3845 set_hashtable_value_ids (void)
3847 htab_iterator hi;
3848 vn_nary_op_t vno;
3849 vn_reference_t vr;
3850 vn_phi_t vp;
3852 /* Now set the value ids of the things we had put in the hash
3853 table. */
3855 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
3856 vno, vn_nary_op_t, hi)
3857 set_value_id_for_result (vno->result, &vno->value_id);
3859 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
3860 vp, vn_phi_t, hi)
3861 set_value_id_for_result (vp->result, &vp->value_id);
3863 FOR_EACH_HTAB_ELEMENT (valid_info->references,
3864 vr, vn_reference_t, hi)
3865 set_value_id_for_result (vr->result, &vr->value_id);
3868 /* Do SCCVN. Returns true if it finished, false if we bailed out
3869 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
3870 how we use the alias oracle walking during the VN process. */
3872 bool
3873 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
3875 size_t i;
3876 tree param;
3877 bool changed = true;
3879 default_vn_walk_kind = default_vn_walk_kind_;
3881 init_scc_vn ();
3882 current_info = valid_info;
3884 for (param = DECL_ARGUMENTS (current_function_decl);
3885 param;
3886 param = DECL_CHAIN (param))
3888 if (gimple_default_def (cfun, param) != NULL)
3890 tree def = gimple_default_def (cfun, param);
3891 VN_INFO (def)->valnum = def;
3895 for (i = 1; i < num_ssa_names; ++i)
3897 tree name = ssa_name (i);
3898 if (name
3899 && VN_INFO (name)->visited == false
3900 && !has_zero_uses (name))
3901 if (!DFS (name))
3903 free_scc_vn ();
3904 return false;
3908 /* Initialize the value ids. */
3910 for (i = 1; i < num_ssa_names; ++i)
3912 tree name = ssa_name (i);
3913 vn_ssa_aux_t info;
3914 if (!name)
3915 continue;
3916 info = VN_INFO (name);
3917 if (info->valnum == name
3918 || info->valnum == VN_TOP)
3919 info->value_id = get_next_value_id ();
3920 else if (is_gimple_min_invariant (info->valnum))
3921 info->value_id = get_or_alloc_constant_value_id (info->valnum);
3924 /* Propagate until they stop changing. */
3925 while (changed)
3927 changed = false;
3928 for (i = 1; i < num_ssa_names; ++i)
3930 tree name = ssa_name (i);
3931 vn_ssa_aux_t info;
3932 if (!name)
3933 continue;
3934 info = VN_INFO (name);
3935 if (TREE_CODE (info->valnum) == SSA_NAME
3936 && info->valnum != name
3937 && info->value_id != VN_INFO (info->valnum)->value_id)
3939 changed = true;
3940 info->value_id = VN_INFO (info->valnum)->value_id;
3945 set_hashtable_value_ids ();
3947 if (dump_file && (dump_flags & TDF_DETAILS))
3949 fprintf (dump_file, "Value numbers:\n");
3950 for (i = 0; i < num_ssa_names; i++)
3952 tree name = ssa_name (i);
3953 if (name
3954 && VN_INFO (name)->visited
3955 && SSA_VAL (name) != name)
3957 print_generic_expr (dump_file, name, 0);
3958 fprintf (dump_file, " = ");
3959 print_generic_expr (dump_file, SSA_VAL (name), 0);
3960 fprintf (dump_file, "\n");
3965 return true;
3968 /* Return the maximum value id we have ever seen. */
3970 unsigned int
3971 get_max_value_id (void)
3973 return next_value_id;
3976 /* Return the next unique value id. */
3978 unsigned int
3979 get_next_value_id (void)
3981 return next_value_id++;
3985 /* Compare two expressions E1 and E2 and return true if they are equal. */
3987 bool
3988 expressions_equal_p (tree e1, tree e2)
3990 /* The obvious case. */
3991 if (e1 == e2)
3992 return true;
3994 /* If only one of them is null, they cannot be equal. */
3995 if (!e1 || !e2)
3996 return false;
3998 /* Now perform the actual comparison. */
3999 if (TREE_CODE (e1) == TREE_CODE (e2)
4000 && operand_equal_p (e1, e2, OEP_PURE_SAME))
4001 return true;
4003 return false;
4007 /* Return true if the nary operation NARY may trap. This is a copy
4008 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4010 bool
4011 vn_nary_may_trap (vn_nary_op_t nary)
4013 tree type;
4014 tree rhs2 = NULL_TREE;
4015 bool honor_nans = false;
4016 bool honor_snans = false;
4017 bool fp_operation = false;
4018 bool honor_trapv = false;
4019 bool handled, ret;
4020 unsigned i;
4022 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4023 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4024 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4026 type = nary->type;
4027 fp_operation = FLOAT_TYPE_P (type);
4028 if (fp_operation)
4030 honor_nans = flag_trapping_math && !flag_finite_math_only;
4031 honor_snans = flag_signaling_nans != 0;
4033 else if (INTEGRAL_TYPE_P (type)
4034 && TYPE_OVERFLOW_TRAPS (type))
4035 honor_trapv = true;
4037 if (nary->length >= 2)
4038 rhs2 = nary->op[1];
4039 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4040 honor_trapv,
4041 honor_nans, honor_snans, rhs2,
4042 &handled);
4043 if (handled
4044 && ret)
4045 return true;
4047 for (i = 0; i < nary->length; ++i)
4048 if (tree_could_trap_p (nary->op[i]))
4049 return true;
4051 return false;