PR tree-optimization/50802
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blobfa268c2ef8b0f535f1a6e999278d15fc5aa30f8d
1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "basic-block.h"
28 #include "tree-pretty-print.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "gimple.h"
33 #include "tree-dump.h"
34 #include "timevar.h"
35 #include "fibheap.h"
36 #include "hashtab.h"
37 #include "tree-iterator.h"
38 #include "alloc-pool.h"
39 #include "tree-pass.h"
40 #include "flags.h"
41 #include "bitmap.h"
42 #include "langhooks.h"
43 #include "cfgloop.h"
44 #include "params.h"
45 #include "tree-ssa-propagate.h"
46 #include "tree-ssa-sccvn.h"
47 #include "gimple-fold.h"
49 /* This algorithm is based on the SCC algorithm presented by Keith
50 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51 (http://citeseer.ist.psu.edu/41805.html). In
52 straight line code, it is equivalent to a regular hash based value
53 numbering that is performed in reverse postorder.
55 For code with cycles, there are two alternatives, both of which
56 require keeping the hashtables separate from the actual list of
57 value numbers for SSA names.
59 1. Iterate value numbering in an RPO walk of the blocks, removing
60 all the entries from the hashtable after each iteration (but
61 keeping the SSA name->value number mapping between iterations).
62 Iterate until it does not change.
64 2. Perform value numbering as part of an SCC walk on the SSA graph,
65 iterating only the cycles in the SSA graph until they do not change
66 (using a separate, optimistic hashtable for value numbering the SCC
67 operands).
69 The second is not just faster in practice (because most SSA graph
70 cycles do not involve all the variables in the graph), it also has
71 some nice properties.
73 One of these nice properties is that when we pop an SCC off the
74 stack, we are guaranteed to have processed all the operands coming from
75 *outside of that SCC*, so we do not need to do anything special to
76 ensure they have value numbers.
78 Another nice property is that the SCC walk is done as part of a DFS
79 of the SSA graph, which makes it easy to perform combining and
80 simplifying operations at the same time.
82 The code below is deliberately written in a way that makes it easy
83 to separate the SCC walk from the other work it does.
85 In order to propagate constants through the code, we track which
86 expressions contain constants, and use those while folding. In
87 theory, we could also track expressions whose value numbers are
88 replaced, in case we end up folding based on expression
89 identities.
91 In order to value number memory, we assign value numbers to vuses.
92 This enables us to note that, for example, stores to the same
93 address of the same value from the same starting memory states are
94 equivalent.
95 TODO:
97 1. We can iterate only the changing portions of the SCC's, but
98 I have not seen an SCC big enough for this to be a win.
99 2. If you differentiate between phi nodes for loops and phi nodes
100 for if-then-else, you can properly consider phi nodes in different
101 blocks for equivalence.
102 3. We could value number vuses in more cases, particularly, whole
103 structure copies.
106 /* The set of hashtables and alloc_pool's for their items. */
108 typedef struct vn_tables_s
110 htab_t nary;
111 htab_t phis;
112 htab_t references;
113 struct obstack nary_obstack;
114 alloc_pool phis_pool;
115 alloc_pool references_pool;
116 } *vn_tables_t;
118 static htab_t constant_to_value_id;
119 static bitmap constant_value_ids;
122 /* Valid hashtables storing information we have proven to be
123 correct. */
125 static vn_tables_t valid_info;
127 /* Optimistic hashtables storing information we are making assumptions about
128 during iterations. */
130 static vn_tables_t optimistic_info;
132 /* Pointer to the set of hashtables that is currently being used.
133 Should always point to either the optimistic_info, or the
134 valid_info. */
136 static vn_tables_t current_info;
139 /* Reverse post order index for each basic block. */
141 static int *rpo_numbers;
143 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
145 /* This represents the top of the VN lattice, which is the universal
146 value. */
148 tree VN_TOP;
150 /* Unique counter for our value ids. */
152 static unsigned int next_value_id;
154 /* Next DFS number and the stack for strongly connected component
155 detection. */
157 static unsigned int next_dfs_num;
158 static VEC (tree, heap) *sccstack;
161 DEF_VEC_P(vn_ssa_aux_t);
162 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
164 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
165 are allocated on an obstack for locality reasons, and to free them
166 without looping over the VEC. */
168 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
169 static struct obstack vn_ssa_aux_obstack;
171 /* Return the value numbering information for a given SSA name. */
173 vn_ssa_aux_t
174 VN_INFO (tree name)
176 vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
177 SSA_NAME_VERSION (name));
178 gcc_checking_assert (res);
179 return res;
182 /* Set the value numbering info for a given SSA name to a given
183 value. */
185 static inline void
186 VN_INFO_SET (tree name, vn_ssa_aux_t value)
188 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
189 SSA_NAME_VERSION (name), value);
192 /* Initialize the value numbering info for a given SSA name.
193 This should be called just once for every SSA name. */
195 vn_ssa_aux_t
196 VN_INFO_GET (tree name)
198 vn_ssa_aux_t newinfo;
200 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
201 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
202 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
203 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
204 SSA_NAME_VERSION (name) + 1);
205 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
206 SSA_NAME_VERSION (name), newinfo);
207 return newinfo;
211 /* Get the representative expression for the SSA_NAME NAME. Returns
212 the representative SSA_NAME if there is no expression associated with it. */
214 tree
215 vn_get_expr_for (tree name)
217 vn_ssa_aux_t vn = VN_INFO (name);
218 gimple def_stmt;
219 tree expr = NULL_TREE;
220 enum tree_code code;
222 if (vn->valnum == VN_TOP)
223 return name;
225 /* If the value-number is a constant it is the representative
226 expression. */
227 if (TREE_CODE (vn->valnum) != SSA_NAME)
228 return vn->valnum;
230 /* Get to the information of the value of this SSA_NAME. */
231 vn = VN_INFO (vn->valnum);
233 /* If the value-number is a constant it is the representative
234 expression. */
235 if (TREE_CODE (vn->valnum) != SSA_NAME)
236 return vn->valnum;
238 /* Else if we have an expression, return it. */
239 if (vn->expr != NULL_TREE)
240 return vn->expr;
242 /* Otherwise use the defining statement to build the expression. */
243 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
245 /* If the value number is not an assignment use it directly. */
246 if (!is_gimple_assign (def_stmt))
247 return vn->valnum;
249 /* FIXME tuples. This is incomplete and likely will miss some
250 simplifications. */
251 code = gimple_assign_rhs_code (def_stmt);
252 switch (TREE_CODE_CLASS (code))
254 case tcc_reference:
255 if ((code == REALPART_EXPR
256 || code == IMAGPART_EXPR
257 || code == VIEW_CONVERT_EXPR)
258 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (def_stmt),
259 0)) == SSA_NAME)
260 expr = fold_build1 (code,
261 gimple_expr_type (def_stmt),
262 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
263 break;
265 case tcc_unary:
266 expr = fold_build1 (code,
267 gimple_expr_type (def_stmt),
268 gimple_assign_rhs1 (def_stmt));
269 break;
271 case tcc_binary:
272 expr = fold_build2 (code,
273 gimple_expr_type (def_stmt),
274 gimple_assign_rhs1 (def_stmt),
275 gimple_assign_rhs2 (def_stmt));
276 break;
278 case tcc_exceptional:
279 if (code == CONSTRUCTOR
280 && TREE_CODE
281 (TREE_TYPE (gimple_assign_rhs1 (def_stmt))) == VECTOR_TYPE)
282 expr = gimple_assign_rhs1 (def_stmt);
283 break;
285 default:;
287 if (expr == NULL_TREE)
288 return vn->valnum;
290 /* Cache the expression. */
291 vn->expr = expr;
293 return expr;
297 /* Free a phi operation structure VP. */
299 static void
300 free_phi (void *vp)
302 vn_phi_t phi = (vn_phi_t) vp;
303 VEC_free (tree, heap, phi->phiargs);
306 /* Free a reference operation structure VP. */
308 static void
309 free_reference (void *vp)
311 vn_reference_t vr = (vn_reference_t) vp;
312 VEC_free (vn_reference_op_s, heap, vr->operands);
315 /* Hash table equality function for vn_constant_t. */
317 static int
318 vn_constant_eq (const void *p1, const void *p2)
320 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
321 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
323 if (vc1->hashcode != vc2->hashcode)
324 return false;
326 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
329 /* Hash table hash function for vn_constant_t. */
331 static hashval_t
332 vn_constant_hash (const void *p1)
334 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
335 return vc1->hashcode;
338 /* Lookup a value id for CONSTANT and return it. If it does not
339 exist returns 0. */
341 unsigned int
342 get_constant_value_id (tree constant)
344 void **slot;
345 struct vn_constant_s vc;
347 vc.hashcode = vn_hash_constant_with_type (constant);
348 vc.constant = constant;
349 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
350 vc.hashcode, NO_INSERT);
351 if (slot)
352 return ((vn_constant_t)*slot)->value_id;
353 return 0;
356 /* Lookup a value id for CONSTANT, and if it does not exist, create a
357 new one and return it. If it does exist, return it. */
359 unsigned int
360 get_or_alloc_constant_value_id (tree constant)
362 void **slot;
363 struct vn_constant_s vc;
364 vn_constant_t vcp;
366 vc.hashcode = vn_hash_constant_with_type (constant);
367 vc.constant = constant;
368 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
369 vc.hashcode, INSERT);
370 if (*slot)
371 return ((vn_constant_t)*slot)->value_id;
373 vcp = XNEW (struct vn_constant_s);
374 vcp->hashcode = vc.hashcode;
375 vcp->constant = constant;
376 vcp->value_id = get_next_value_id ();
377 *slot = (void *) vcp;
378 bitmap_set_bit (constant_value_ids, vcp->value_id);
379 return vcp->value_id;
382 /* Return true if V is a value id for a constant. */
384 bool
385 value_id_constant_p (unsigned int v)
387 return bitmap_bit_p (constant_value_ids, v);
390 /* Compare two reference operands P1 and P2 for equality. Return true if
391 they are equal, and false otherwise. */
393 static int
394 vn_reference_op_eq (const void *p1, const void *p2)
396 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
397 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
399 return (vro1->opcode == vro2->opcode
400 /* We do not care for differences in type qualification. */
401 && (vro1->type == vro2->type
402 || (vro1->type && vro2->type
403 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
404 TYPE_MAIN_VARIANT (vro2->type))))
405 && expressions_equal_p (vro1->op0, vro2->op0)
406 && expressions_equal_p (vro1->op1, vro2->op1)
407 && expressions_equal_p (vro1->op2, vro2->op2));
410 /* Compute the hash for a reference operand VRO1. */
412 static hashval_t
413 vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result)
415 result = iterative_hash_hashval_t (vro1->opcode, result);
416 if (vro1->op0)
417 result = iterative_hash_expr (vro1->op0, result);
418 if (vro1->op1)
419 result = iterative_hash_expr (vro1->op1, result);
420 if (vro1->op2)
421 result = iterative_hash_expr (vro1->op2, result);
422 return result;
425 /* Return the hashcode for a given reference operation P1. */
427 static hashval_t
428 vn_reference_hash (const void *p1)
430 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
431 return vr1->hashcode;
434 /* Compute a hash for the reference operation VR1 and return it. */
436 hashval_t
437 vn_reference_compute_hash (const vn_reference_t vr1)
439 hashval_t result = 0;
440 int i;
441 vn_reference_op_t vro;
442 HOST_WIDE_INT off = -1;
443 bool deref = false;
445 FOR_EACH_VEC_ELT (vn_reference_op_s, vr1->operands, i, vro)
447 if (vro->opcode == MEM_REF)
448 deref = true;
449 else if (vro->opcode != ADDR_EXPR)
450 deref = false;
451 if (vro->off != -1)
453 if (off == -1)
454 off = 0;
455 off += vro->off;
457 else
459 if (off != -1
460 && off != 0)
461 result = iterative_hash_hashval_t (off, result);
462 off = -1;
463 if (deref
464 && vro->opcode == ADDR_EXPR)
466 if (vro->op0)
468 tree op = TREE_OPERAND (vro->op0, 0);
469 result = iterative_hash_hashval_t (TREE_CODE (op), result);
470 result = iterative_hash_expr (op, result);
473 else
474 result = vn_reference_op_compute_hash (vro, result);
477 if (vr1->vuse)
478 result += SSA_NAME_VERSION (vr1->vuse);
480 return result;
483 /* Return true if reference operations P1 and P2 are equivalent. This
484 means they have the same set of operands and vuses. */
487 vn_reference_eq (const void *p1, const void *p2)
489 unsigned i, j;
491 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
492 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
493 if (vr1->hashcode != vr2->hashcode)
494 return false;
496 /* Early out if this is not a hash collision. */
497 if (vr1->hashcode != vr2->hashcode)
498 return false;
500 /* The VOP needs to be the same. */
501 if (vr1->vuse != vr2->vuse)
502 return false;
504 /* If the operands are the same we are done. */
505 if (vr1->operands == vr2->operands)
506 return true;
508 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
509 return false;
511 if (INTEGRAL_TYPE_P (vr1->type)
512 && INTEGRAL_TYPE_P (vr2->type))
514 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
515 return false;
517 else if (INTEGRAL_TYPE_P (vr1->type)
518 && (TYPE_PRECISION (vr1->type)
519 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
520 return false;
521 else if (INTEGRAL_TYPE_P (vr2->type)
522 && (TYPE_PRECISION (vr2->type)
523 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
524 return false;
526 i = 0;
527 j = 0;
530 HOST_WIDE_INT off1 = 0, off2 = 0;
531 vn_reference_op_t vro1, vro2;
532 vn_reference_op_s tem1, tem2;
533 bool deref1 = false, deref2 = false;
534 for (; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro1); i++)
536 if (vro1->opcode == MEM_REF)
537 deref1 = true;
538 if (vro1->off == -1)
539 break;
540 off1 += vro1->off;
542 for (; VEC_iterate (vn_reference_op_s, vr2->operands, j, vro2); j++)
544 if (vro2->opcode == MEM_REF)
545 deref2 = true;
546 if (vro2->off == -1)
547 break;
548 off2 += vro2->off;
550 if (off1 != off2)
551 return false;
552 if (deref1 && vro1->opcode == ADDR_EXPR)
554 memset (&tem1, 0, sizeof (tem1));
555 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
556 tem1.type = TREE_TYPE (tem1.op0);
557 tem1.opcode = TREE_CODE (tem1.op0);
558 vro1 = &tem1;
560 if (deref2 && vro2->opcode == ADDR_EXPR)
562 memset (&tem2, 0, sizeof (tem2));
563 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
564 tem2.type = TREE_TYPE (tem2.op0);
565 tem2.opcode = TREE_CODE (tem2.op0);
566 vro2 = &tem2;
568 if (!vn_reference_op_eq (vro1, vro2))
569 return false;
570 ++j;
571 ++i;
573 while (VEC_length (vn_reference_op_s, vr1->operands) != i
574 || VEC_length (vn_reference_op_s, vr2->operands) != j);
576 return true;
579 /* Copy the operations present in load/store REF into RESULT, a vector of
580 vn_reference_op_s's. */
582 void
583 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
585 if (TREE_CODE (ref) == TARGET_MEM_REF)
587 vn_reference_op_s temp;
589 memset (&temp, 0, sizeof (temp));
590 temp.type = TREE_TYPE (ref);
591 temp.opcode = TREE_CODE (ref);
592 temp.op0 = TMR_INDEX (ref);
593 temp.op1 = TMR_STEP (ref);
594 temp.op2 = TMR_OFFSET (ref);
595 temp.off = -1;
596 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
598 memset (&temp, 0, sizeof (temp));
599 temp.type = NULL_TREE;
600 temp.opcode = ERROR_MARK;
601 temp.op0 = TMR_INDEX2 (ref);
602 temp.off = -1;
603 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
605 memset (&temp, 0, sizeof (temp));
606 temp.type = NULL_TREE;
607 temp.opcode = TREE_CODE (TMR_BASE (ref));
608 temp.op0 = TMR_BASE (ref);
609 temp.off = -1;
610 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
611 return;
614 /* For non-calls, store the information that makes up the address. */
616 while (ref)
618 vn_reference_op_s temp;
620 memset (&temp, 0, sizeof (temp));
621 temp.type = TREE_TYPE (ref);
622 temp.opcode = TREE_CODE (ref);
623 temp.off = -1;
625 switch (temp.opcode)
627 case MEM_REF:
628 /* The base address gets its own vn_reference_op_s structure. */
629 temp.op0 = TREE_OPERAND (ref, 1);
630 if (host_integerp (TREE_OPERAND (ref, 1), 0))
631 temp.off = TREE_INT_CST_LOW (TREE_OPERAND (ref, 1));
632 break;
633 case BIT_FIELD_REF:
634 /* Record bits and position. */
635 temp.op0 = TREE_OPERAND (ref, 1);
636 temp.op1 = TREE_OPERAND (ref, 2);
637 break;
638 case COMPONENT_REF:
639 /* The field decl is enough to unambiguously specify the field,
640 a matching type is not necessary and a mismatching type
641 is always a spurious difference. */
642 temp.type = NULL_TREE;
643 temp.op0 = TREE_OPERAND (ref, 1);
644 temp.op1 = TREE_OPERAND (ref, 2);
646 tree this_offset = component_ref_field_offset (ref);
647 if (this_offset
648 && TREE_CODE (this_offset) == INTEGER_CST)
650 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
651 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
653 double_int off
654 = double_int_add (tree_to_double_int (this_offset),
655 double_int_rshift
656 (tree_to_double_int (bit_offset),
657 BITS_PER_UNIT == 8
658 ? 3 : exact_log2 (BITS_PER_UNIT),
659 HOST_BITS_PER_DOUBLE_INT, true));
660 if (double_int_fits_in_shwi_p (off))
661 temp.off = off.low;
665 break;
666 case ARRAY_RANGE_REF:
667 case ARRAY_REF:
668 /* Record index as operand. */
669 temp.op0 = TREE_OPERAND (ref, 1);
670 /* Always record lower bounds and element size. */
671 temp.op1 = array_ref_low_bound (ref);
672 temp.op2 = array_ref_element_size (ref);
673 if (TREE_CODE (temp.op0) == INTEGER_CST
674 && TREE_CODE (temp.op1) == INTEGER_CST
675 && TREE_CODE (temp.op2) == INTEGER_CST)
677 double_int off = tree_to_double_int (temp.op0);
678 off = double_int_add (off,
679 double_int_neg
680 (tree_to_double_int (temp.op1)));
681 off = double_int_mul (off, tree_to_double_int (temp.op2));
682 if (double_int_fits_in_shwi_p (off))
683 temp.off = off.low;
685 break;
686 case VAR_DECL:
687 if (DECL_HARD_REGISTER (ref))
689 temp.op0 = ref;
690 break;
692 /* Fallthru. */
693 case PARM_DECL:
694 case CONST_DECL:
695 case RESULT_DECL:
696 /* Canonicalize decls to MEM[&decl] which is what we end up with
697 when valueizing MEM[ptr] with ptr = &decl. */
698 temp.opcode = MEM_REF;
699 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
700 temp.off = 0;
701 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
702 temp.opcode = ADDR_EXPR;
703 temp.op0 = build_fold_addr_expr (ref);
704 temp.type = TREE_TYPE (temp.op0);
705 temp.off = -1;
706 break;
707 case STRING_CST:
708 case INTEGER_CST:
709 case COMPLEX_CST:
710 case VECTOR_CST:
711 case REAL_CST:
712 case FIXED_CST:
713 case CONSTRUCTOR:
714 case SSA_NAME:
715 temp.op0 = ref;
716 break;
717 case ADDR_EXPR:
718 if (is_gimple_min_invariant (ref))
720 temp.op0 = ref;
721 break;
723 /* Fallthrough. */
724 /* These are only interesting for their operands, their
725 existence, and their type. They will never be the last
726 ref in the chain of references (IE they require an
727 operand), so we don't have to put anything
728 for op* as it will be handled by the iteration */
729 case REALPART_EXPR:
730 case VIEW_CONVERT_EXPR:
731 temp.off = 0;
732 break;
733 case IMAGPART_EXPR:
734 /* This is only interesting for its constant offset. */
735 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
736 break;
737 default:
738 gcc_unreachable ();
740 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
742 if (REFERENCE_CLASS_P (ref)
743 || (TREE_CODE (ref) == ADDR_EXPR
744 && !is_gimple_min_invariant (ref)))
745 ref = TREE_OPERAND (ref, 0);
746 else
747 ref = NULL_TREE;
751 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
752 operands in *OPS, the reference alias set SET and the reference type TYPE.
753 Return true if something useful was produced. */
755 bool
756 ao_ref_init_from_vn_reference (ao_ref *ref,
757 alias_set_type set, tree type,
758 VEC (vn_reference_op_s, heap) *ops)
760 vn_reference_op_t op;
761 unsigned i;
762 tree base = NULL_TREE;
763 tree *op0_p = &base;
764 HOST_WIDE_INT offset = 0;
765 HOST_WIDE_INT max_size;
766 HOST_WIDE_INT size = -1;
767 tree size_tree = NULL_TREE;
768 alias_set_type base_alias_set = -1;
770 /* First get the final access size from just the outermost expression. */
771 op = VEC_index (vn_reference_op_s, ops, 0);
772 if (op->opcode == COMPONENT_REF)
773 size_tree = DECL_SIZE (op->op0);
774 else if (op->opcode == BIT_FIELD_REF)
775 size_tree = op->op0;
776 else
778 enum machine_mode mode = TYPE_MODE (type);
779 if (mode == BLKmode)
780 size_tree = TYPE_SIZE (type);
781 else
782 size = GET_MODE_BITSIZE (mode);
784 if (size_tree != NULL_TREE)
786 if (!host_integerp (size_tree, 1))
787 size = -1;
788 else
789 size = TREE_INT_CST_LOW (size_tree);
792 /* Initially, maxsize is the same as the accessed element size.
793 In the following it will only grow (or become -1). */
794 max_size = size;
796 /* Compute cumulative bit-offset for nested component-refs and array-refs,
797 and find the ultimate containing object. */
798 FOR_EACH_VEC_ELT (vn_reference_op_s, ops, i, op)
800 switch (op->opcode)
802 /* These may be in the reference ops, but we cannot do anything
803 sensible with them here. */
804 case ADDR_EXPR:
805 /* Apart from ADDR_EXPR arguments to MEM_REF. */
806 if (base != NULL_TREE
807 && TREE_CODE (base) == MEM_REF
808 && op->op0
809 && DECL_P (TREE_OPERAND (op->op0, 0)))
811 vn_reference_op_t pop = VEC_index (vn_reference_op_s, ops, i-1);
812 base = TREE_OPERAND (op->op0, 0);
813 if (pop->off == -1)
815 max_size = -1;
816 offset = 0;
818 else
819 offset += pop->off * BITS_PER_UNIT;
820 op0_p = NULL;
821 break;
823 /* Fallthru. */
824 case CALL_EXPR:
825 return false;
827 /* Record the base objects. */
828 case MEM_REF:
829 base_alias_set = get_deref_alias_set (op->op0);
830 *op0_p = build2 (MEM_REF, op->type,
831 NULL_TREE, op->op0);
832 op0_p = &TREE_OPERAND (*op0_p, 0);
833 break;
835 case VAR_DECL:
836 case PARM_DECL:
837 case RESULT_DECL:
838 case SSA_NAME:
839 *op0_p = op->op0;
840 op0_p = NULL;
841 break;
843 /* And now the usual component-reference style ops. */
844 case BIT_FIELD_REF:
845 offset += tree_low_cst (op->op1, 0);
846 break;
848 case COMPONENT_REF:
850 tree field = op->op0;
851 /* We do not have a complete COMPONENT_REF tree here so we
852 cannot use component_ref_field_offset. Do the interesting
853 parts manually. */
855 if (op->op1
856 || !host_integerp (DECL_FIELD_OFFSET (field), 1))
857 max_size = -1;
858 else
860 offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
861 * BITS_PER_UNIT);
862 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
864 break;
867 case ARRAY_RANGE_REF:
868 case ARRAY_REF:
869 /* We recorded the lower bound and the element size. */
870 if (!host_integerp (op->op0, 0)
871 || !host_integerp (op->op1, 0)
872 || !host_integerp (op->op2, 0))
873 max_size = -1;
874 else
876 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
877 hindex -= TREE_INT_CST_LOW (op->op1);
878 hindex *= TREE_INT_CST_LOW (op->op2);
879 hindex *= BITS_PER_UNIT;
880 offset += hindex;
882 break;
884 case REALPART_EXPR:
885 break;
887 case IMAGPART_EXPR:
888 offset += size;
889 break;
891 case VIEW_CONVERT_EXPR:
892 break;
894 case STRING_CST:
895 case INTEGER_CST:
896 case COMPLEX_CST:
897 case VECTOR_CST:
898 case REAL_CST:
899 case CONSTRUCTOR:
900 case CONST_DECL:
901 return false;
903 default:
904 return false;
908 if (base == NULL_TREE)
909 return false;
911 ref->ref = NULL_TREE;
912 ref->base = base;
913 ref->offset = offset;
914 ref->size = size;
915 ref->max_size = max_size;
916 ref->ref_alias_set = set;
917 if (base_alias_set != -1)
918 ref->base_alias_set = base_alias_set;
919 else
920 ref->base_alias_set = get_alias_set (base);
922 return true;
925 /* Copy the operations present in load/store/call REF into RESULT, a vector of
926 vn_reference_op_s's. */
928 void
929 copy_reference_ops_from_call (gimple call,
930 VEC(vn_reference_op_s, heap) **result)
932 vn_reference_op_s temp;
933 unsigned i;
935 /* Copy the type, opcode, function being called and static chain. */
936 memset (&temp, 0, sizeof (temp));
937 temp.type = gimple_call_return_type (call);
938 temp.opcode = CALL_EXPR;
939 temp.op0 = gimple_call_fn (call);
940 temp.op1 = gimple_call_chain (call);
941 temp.off = -1;
942 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
944 /* Copy the call arguments. As they can be references as well,
945 just chain them together. */
946 for (i = 0; i < gimple_call_num_args (call); ++i)
948 tree callarg = gimple_call_arg (call, i);
949 copy_reference_ops_from_ref (callarg, result);
953 /* Create a vector of vn_reference_op_s structures from REF, a
954 REFERENCE_CLASS_P tree. The vector is not shared. */
956 static VEC(vn_reference_op_s, heap) *
957 create_reference_ops_from_ref (tree ref)
959 VEC (vn_reference_op_s, heap) *result = NULL;
961 copy_reference_ops_from_ref (ref, &result);
962 return result;
965 /* Create a vector of vn_reference_op_s structures from CALL, a
966 call statement. The vector is not shared. */
968 static VEC(vn_reference_op_s, heap) *
969 create_reference_ops_from_call (gimple call)
971 VEC (vn_reference_op_s, heap) *result = NULL;
973 copy_reference_ops_from_call (call, &result);
974 return result;
977 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
978 *I_P to point to the last element of the replacement. */
979 void
980 vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
981 unsigned int *i_p)
983 unsigned int i = *i_p;
984 vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
985 vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
986 tree addr_base;
987 HOST_WIDE_INT addr_offset;
989 /* The only thing we have to do is from &OBJ.foo.bar add the offset
990 from .foo.bar to the preceeding MEM_REF offset and replace the
991 address with &OBJ. */
992 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
993 &addr_offset);
994 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
995 if (addr_base != op->op0)
997 double_int off = tree_to_double_int (mem_op->op0);
998 off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
999 off = double_int_add (off, shwi_to_double_int (addr_offset));
1000 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1001 op->op0 = build_fold_addr_expr (addr_base);
1002 if (host_integerp (mem_op->op0, 0))
1003 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1004 else
1005 mem_op->off = -1;
1009 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1010 *I_P to point to the last element of the replacement. */
1011 static void
1012 vn_reference_maybe_forwprop_address (VEC (vn_reference_op_s, heap) **ops,
1013 unsigned int *i_p)
1015 unsigned int i = *i_p;
1016 vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
1017 vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
1018 gimple def_stmt;
1019 enum tree_code code;
1020 double_int off;
1022 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1023 if (!is_gimple_assign (def_stmt))
1024 return;
1026 code = gimple_assign_rhs_code (def_stmt);
1027 if (code != ADDR_EXPR
1028 && code != POINTER_PLUS_EXPR)
1029 return;
1031 off = tree_to_double_int (mem_op->op0);
1032 off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1034 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1035 from .foo.bar to the preceeding MEM_REF offset and replace the
1036 address with &OBJ. */
1037 if (code == ADDR_EXPR)
1039 tree addr, addr_base;
1040 HOST_WIDE_INT addr_offset;
1042 addr = gimple_assign_rhs1 (def_stmt);
1043 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1044 &addr_offset);
1045 if (!addr_base
1046 || TREE_CODE (addr_base) != MEM_REF)
1047 return;
1049 off = double_int_add (off, shwi_to_double_int (addr_offset));
1050 off = double_int_add (off, mem_ref_offset (addr_base));
1051 op->op0 = TREE_OPERAND (addr_base, 0);
1053 else
1055 tree ptr, ptroff;
1056 ptr = gimple_assign_rhs1 (def_stmt);
1057 ptroff = gimple_assign_rhs2 (def_stmt);
1058 if (TREE_CODE (ptr) != SSA_NAME
1059 || TREE_CODE (ptroff) != INTEGER_CST)
1060 return;
1062 off = double_int_add (off, tree_to_double_int (ptroff));
1063 op->op0 = ptr;
1066 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1067 if (host_integerp (mem_op->op0, 0))
1068 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1069 else
1070 mem_op->off = -1;
1071 if (TREE_CODE (op->op0) == SSA_NAME)
1072 op->op0 = SSA_VAL (op->op0);
1073 if (TREE_CODE (op->op0) != SSA_NAME)
1074 op->opcode = TREE_CODE (op->op0);
1076 /* And recurse. */
1077 if (TREE_CODE (op->op0) == SSA_NAME)
1078 vn_reference_maybe_forwprop_address (ops, i_p);
1079 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1080 vn_reference_fold_indirect (ops, i_p);
1083 /* Optimize the reference REF to a constant if possible or return
1084 NULL_TREE if not. */
1086 tree
1087 fully_constant_vn_reference_p (vn_reference_t ref)
1089 VEC (vn_reference_op_s, heap) *operands = ref->operands;
1090 vn_reference_op_t op;
1092 /* Try to simplify the translated expression if it is
1093 a call to a builtin function with at most two arguments. */
1094 op = VEC_index (vn_reference_op_s, operands, 0);
1095 if (op->opcode == CALL_EXPR
1096 && TREE_CODE (op->op0) == ADDR_EXPR
1097 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1098 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1099 && VEC_length (vn_reference_op_s, operands) >= 2
1100 && VEC_length (vn_reference_op_s, operands) <= 3)
1102 vn_reference_op_t arg0, arg1 = NULL;
1103 bool anyconst = false;
1104 arg0 = VEC_index (vn_reference_op_s, operands, 1);
1105 if (VEC_length (vn_reference_op_s, operands) > 2)
1106 arg1 = VEC_index (vn_reference_op_s, operands, 2);
1107 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1108 || (arg0->opcode == ADDR_EXPR
1109 && is_gimple_min_invariant (arg0->op0)))
1110 anyconst = true;
1111 if (arg1
1112 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1113 || (arg1->opcode == ADDR_EXPR
1114 && is_gimple_min_invariant (arg1->op0))))
1115 anyconst = true;
1116 if (anyconst)
1118 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1119 arg1 ? 2 : 1,
1120 arg0->op0,
1121 arg1 ? arg1->op0 : NULL);
1122 if (folded
1123 && TREE_CODE (folded) == NOP_EXPR)
1124 folded = TREE_OPERAND (folded, 0);
1125 if (folded
1126 && is_gimple_min_invariant (folded))
1127 return folded;
1131 /* Simplify reads from constant strings. */
1132 else if (op->opcode == ARRAY_REF
1133 && TREE_CODE (op->op0) == INTEGER_CST
1134 && integer_zerop (op->op1)
1135 && VEC_length (vn_reference_op_s, operands) == 2)
1137 vn_reference_op_t arg0;
1138 arg0 = VEC_index (vn_reference_op_s, operands, 1);
1139 if (arg0->opcode == STRING_CST
1140 && (TYPE_MODE (op->type)
1141 == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
1142 && GET_MODE_CLASS (TYPE_MODE (op->type)) == MODE_INT
1143 && GET_MODE_SIZE (TYPE_MODE (op->type)) == 1
1144 && compare_tree_int (op->op0, TREE_STRING_LENGTH (arg0->op0)) < 0)
1145 return build_int_cst_type (op->type,
1146 (TREE_STRING_POINTER (arg0->op0)
1147 [TREE_INT_CST_LOW (op->op0)]));
1150 return NULL_TREE;
1153 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1154 structures into their value numbers. This is done in-place, and
1155 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1156 whether any operands were valueized. */
1158 static VEC (vn_reference_op_s, heap) *
1159 valueize_refs_1 (VEC (vn_reference_op_s, heap) *orig, bool *valueized_anything)
1161 vn_reference_op_t vro;
1162 unsigned int i;
1164 *valueized_anything = false;
1166 FOR_EACH_VEC_ELT (vn_reference_op_s, orig, i, vro)
1168 if (vro->opcode == SSA_NAME
1169 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1171 tree tem = SSA_VAL (vro->op0);
1172 if (tem != vro->op0)
1174 *valueized_anything = true;
1175 vro->op0 = tem;
1177 /* If it transforms from an SSA_NAME to a constant, update
1178 the opcode. */
1179 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1180 vro->opcode = TREE_CODE (vro->op0);
1182 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1184 tree tem = SSA_VAL (vro->op1);
1185 if (tem != vro->op1)
1187 *valueized_anything = true;
1188 vro->op1 = tem;
1191 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1193 tree tem = SSA_VAL (vro->op2);
1194 if (tem != vro->op2)
1196 *valueized_anything = true;
1197 vro->op2 = tem;
1200 /* If it transforms from an SSA_NAME to an address, fold with
1201 a preceding indirect reference. */
1202 if (i > 0
1203 && vro->op0
1204 && TREE_CODE (vro->op0) == ADDR_EXPR
1205 && VEC_index (vn_reference_op_s,
1206 orig, i - 1)->opcode == MEM_REF)
1207 vn_reference_fold_indirect (&orig, &i);
1208 else if (i > 0
1209 && vro->opcode == SSA_NAME
1210 && VEC_index (vn_reference_op_s,
1211 orig, i - 1)->opcode == MEM_REF)
1212 vn_reference_maybe_forwprop_address (&orig, &i);
1213 /* If it transforms a non-constant ARRAY_REF into a constant
1214 one, adjust the constant offset. */
1215 else if (vro->opcode == ARRAY_REF
1216 && vro->off == -1
1217 && TREE_CODE (vro->op0) == INTEGER_CST
1218 && TREE_CODE (vro->op1) == INTEGER_CST
1219 && TREE_CODE (vro->op2) == INTEGER_CST)
1221 double_int off = tree_to_double_int (vro->op0);
1222 off = double_int_add (off,
1223 double_int_neg
1224 (tree_to_double_int (vro->op1)));
1225 off = double_int_mul (off, tree_to_double_int (vro->op2));
1226 if (double_int_fits_in_shwi_p (off))
1227 vro->off = off.low;
1231 return orig;
1234 static VEC (vn_reference_op_s, heap) *
1235 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
1237 bool tem;
1238 return valueize_refs_1 (orig, &tem);
1241 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
1243 /* Create a vector of vn_reference_op_s structures from REF, a
1244 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1245 this function. *VALUEIZED_ANYTHING will specify whether any
1246 operands were valueized. */
1248 static VEC(vn_reference_op_s, heap) *
1249 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1251 if (!ref)
1252 return NULL;
1253 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1254 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1255 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1256 valueized_anything);
1257 return shared_lookup_references;
1260 /* Create a vector of vn_reference_op_s structures from CALL, a
1261 call statement. The vector is shared among all callers of
1262 this function. */
1264 static VEC(vn_reference_op_s, heap) *
1265 valueize_shared_reference_ops_from_call (gimple call)
1267 if (!call)
1268 return NULL;
1269 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1270 copy_reference_ops_from_call (call, &shared_lookup_references);
1271 shared_lookup_references = valueize_refs (shared_lookup_references);
1272 return shared_lookup_references;
1275 /* Lookup a SCCVN reference operation VR in the current hash table.
1276 Returns the resulting value number if it exists in the hash table,
1277 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1278 vn_reference_t stored in the hashtable if something is found. */
1280 static tree
1281 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1283 void **slot;
1284 hashval_t hash;
1286 hash = vr->hashcode;
1287 slot = htab_find_slot_with_hash (current_info->references, vr,
1288 hash, NO_INSERT);
1289 if (!slot && current_info == optimistic_info)
1290 slot = htab_find_slot_with_hash (valid_info->references, vr,
1291 hash, NO_INSERT);
1292 if (slot)
1294 if (vnresult)
1295 *vnresult = (vn_reference_t)*slot;
1296 return ((vn_reference_t)*slot)->result;
1299 return NULL_TREE;
1302 static tree *last_vuse_ptr;
1303 static vn_lookup_kind vn_walk_kind;
1304 static vn_lookup_kind default_vn_walk_kind;
1306 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1307 with the current VUSE and performs the expression lookup. */
1309 static void *
1310 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
1312 vn_reference_t vr = (vn_reference_t)vr_;
1313 void **slot;
1314 hashval_t hash;
1316 if (last_vuse_ptr)
1317 *last_vuse_ptr = vuse;
1319 /* Fixup vuse and hash. */
1320 if (vr->vuse)
1321 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1322 vr->vuse = SSA_VAL (vuse);
1323 if (vr->vuse)
1324 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1326 hash = vr->hashcode;
1327 slot = htab_find_slot_with_hash (current_info->references, vr,
1328 hash, NO_INSERT);
1329 if (!slot && current_info == optimistic_info)
1330 slot = htab_find_slot_with_hash (valid_info->references, vr,
1331 hash, NO_INSERT);
1332 if (slot)
1333 return *slot;
1335 return NULL;
1338 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1339 from the statement defining VUSE and if not successful tries to
1340 translate *REFP and VR_ through an aggregate copy at the defintion
1341 of VUSE. */
1343 static void *
1344 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
1346 vn_reference_t vr = (vn_reference_t)vr_;
1347 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1348 tree base;
1349 HOST_WIDE_INT offset, maxsize;
1350 static VEC (vn_reference_op_s, heap) *lhs_ops = NULL;
1351 ao_ref lhs_ref;
1352 bool lhs_ref_ok = false;
1354 /* First try to disambiguate after value-replacing in the definitions LHS. */
1355 if (is_gimple_assign (def_stmt))
1357 VEC (vn_reference_op_s, heap) *tem;
1358 tree lhs = gimple_assign_lhs (def_stmt);
1359 bool valueized_anything = false;
1360 /* Avoid re-allocation overhead. */
1361 VEC_truncate (vn_reference_op_s, lhs_ops, 0);
1362 copy_reference_ops_from_ref (lhs, &lhs_ops);
1363 tem = lhs_ops;
1364 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1365 gcc_assert (lhs_ops == tem);
1366 if (valueized_anything)
1368 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1369 get_alias_set (lhs),
1370 TREE_TYPE (lhs), lhs_ops);
1371 if (lhs_ref_ok
1372 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1373 return NULL;
1375 else
1377 ao_ref_init (&lhs_ref, lhs);
1378 lhs_ref_ok = true;
1382 base = ao_ref_base (ref);
1383 offset = ref->offset;
1384 maxsize = ref->max_size;
1386 /* If we cannot constrain the size of the reference we cannot
1387 test if anything kills it. */
1388 if (maxsize == -1)
1389 return (void *)-1;
1391 /* We can't deduce anything useful from clobbers. */
1392 if (gimple_clobber_p (def_stmt))
1393 return (void *)-1;
1395 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1396 from that definition.
1397 1) Memset. */
1398 if (is_gimple_reg_type (vr->type)
1399 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1400 && integer_zerop (gimple_call_arg (def_stmt, 1))
1401 && host_integerp (gimple_call_arg (def_stmt, 2), 1)
1402 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1404 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1405 tree base2;
1406 HOST_WIDE_INT offset2, size2, maxsize2;
1407 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1408 size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
1409 if ((unsigned HOST_WIDE_INT)size2 / 8
1410 == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
1411 && maxsize2 != -1
1412 && operand_equal_p (base, base2, 0)
1413 && offset2 <= offset
1414 && offset2 + size2 >= offset + maxsize)
1416 tree val = build_zero_cst (vr->type);
1417 unsigned int value_id = get_or_alloc_constant_value_id (val);
1418 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1419 VEC_copy (vn_reference_op_s,
1420 heap, vr->operands),
1421 val, value_id);
1425 /* 2) Assignment from an empty CONSTRUCTOR. */
1426 else if (is_gimple_reg_type (vr->type)
1427 && gimple_assign_single_p (def_stmt)
1428 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1429 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1431 tree base2;
1432 HOST_WIDE_INT offset2, size2, maxsize2;
1433 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1434 &offset2, &size2, &maxsize2);
1435 if (maxsize2 != -1
1436 && operand_equal_p (base, base2, 0)
1437 && offset2 <= offset
1438 && offset2 + size2 >= offset + maxsize)
1440 tree val = build_zero_cst (vr->type);
1441 unsigned int value_id = get_or_alloc_constant_value_id (val);
1442 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1443 VEC_copy (vn_reference_op_s,
1444 heap, vr->operands),
1445 val, value_id);
1449 /* 3) Assignment from a constant. We can use folds native encode/interpret
1450 routines to extract the assigned bits. */
1451 else if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
1452 && ref->size == maxsize
1453 && maxsize % BITS_PER_UNIT == 0
1454 && offset % BITS_PER_UNIT == 0
1455 && is_gimple_reg_type (vr->type)
1456 && gimple_assign_single_p (def_stmt)
1457 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
1459 tree base2;
1460 HOST_WIDE_INT offset2, size2, maxsize2;
1461 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1462 &offset2, &size2, &maxsize2);
1463 if (maxsize2 != -1
1464 && maxsize2 == size2
1465 && size2 % BITS_PER_UNIT == 0
1466 && offset2 % BITS_PER_UNIT == 0
1467 && operand_equal_p (base, base2, 0)
1468 && offset2 <= offset
1469 && offset2 + size2 >= offset + maxsize)
1471 /* We support up to 512-bit values (for V8DFmode). */
1472 unsigned char buffer[64];
1473 int len;
1475 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1476 buffer, sizeof (buffer));
1477 if (len > 0)
1479 tree val = native_interpret_expr (vr->type,
1480 buffer
1481 + ((offset - offset2)
1482 / BITS_PER_UNIT),
1483 ref->size / BITS_PER_UNIT);
1484 if (val)
1486 unsigned int value_id = get_or_alloc_constant_value_id (val);
1487 return vn_reference_insert_pieces
1488 (vuse, vr->set, vr->type,
1489 VEC_copy (vn_reference_op_s, heap, vr->operands),
1490 val, value_id);
1496 /* 4) Assignment from an SSA name which definition we may be able
1497 to access pieces from. */
1498 else if (ref->size == maxsize
1499 && is_gimple_reg_type (vr->type)
1500 && gimple_assign_single_p (def_stmt)
1501 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
1503 tree rhs1 = gimple_assign_rhs1 (def_stmt);
1504 gimple def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
1505 if (is_gimple_assign (def_stmt2)
1506 && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
1507 || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
1508 && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1))))
1510 tree base2;
1511 HOST_WIDE_INT offset2, size2, maxsize2, off;
1512 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1513 &offset2, &size2, &maxsize2);
1514 off = offset - offset2;
1515 if (maxsize2 != -1
1516 && maxsize2 == size2
1517 && operand_equal_p (base, base2, 0)
1518 && offset2 <= offset
1519 && offset2 + size2 >= offset + maxsize)
1521 tree val = NULL_TREE;
1522 HOST_WIDE_INT elsz
1523 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1))));
1524 if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR)
1526 if (off == 0)
1527 val = gimple_assign_rhs1 (def_stmt2);
1528 else if (off == elsz)
1529 val = gimple_assign_rhs2 (def_stmt2);
1531 else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR
1532 && off % elsz == 0)
1534 tree ctor = gimple_assign_rhs1 (def_stmt2);
1535 unsigned i = off / elsz;
1536 if (i < CONSTRUCTOR_NELTS (ctor))
1538 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
1539 if (compare_tree_int (elt->index, i) == 0)
1540 val = elt->value;
1543 if (val)
1545 unsigned int value_id = get_or_alloc_constant_value_id (val);
1546 return vn_reference_insert_pieces
1547 (vuse, vr->set, vr->type,
1548 VEC_copy (vn_reference_op_s, heap, vr->operands),
1549 val, value_id);
1555 /* 5) For aggregate copies translate the reference through them if
1556 the copy kills ref. */
1557 else if (vn_walk_kind == VN_WALKREWRITE
1558 && gimple_assign_single_p (def_stmt)
1559 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1560 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1561 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1563 tree base2;
1564 HOST_WIDE_INT offset2, size2, maxsize2;
1565 int i, j;
1566 VEC (vn_reference_op_s, heap) *rhs = NULL;
1567 vn_reference_op_t vro;
1568 ao_ref r;
1570 if (!lhs_ref_ok)
1571 return (void *)-1;
1573 /* See if the assignment kills REF. */
1574 base2 = ao_ref_base (&lhs_ref);
1575 offset2 = lhs_ref.offset;
1576 size2 = lhs_ref.size;
1577 maxsize2 = lhs_ref.max_size;
1578 if (maxsize2 == -1
1579 || (base != base2 && !operand_equal_p (base, base2, 0))
1580 || offset2 > offset
1581 || offset2 + size2 < offset + maxsize)
1582 return (void *)-1;
1584 /* Find the common base of ref and the lhs. lhs_ops already
1585 contains valueized operands for the lhs. */
1586 i = VEC_length (vn_reference_op_s, vr->operands) - 1;
1587 j = VEC_length (vn_reference_op_s, lhs_ops) - 1;
1588 while (j >= 0 && i >= 0
1589 && vn_reference_op_eq (VEC_index (vn_reference_op_s,
1590 vr->operands, i),
1591 VEC_index (vn_reference_op_s, lhs_ops, j)))
1593 i--;
1594 j--;
1597 /* ??? The innermost op should always be a MEM_REF and we already
1598 checked that the assignment to the lhs kills vr. Thus for
1599 aggregate copies using char[] types the vn_reference_op_eq
1600 may fail when comparing types for compatibility. But we really
1601 don't care here - further lookups with the rewritten operands
1602 will simply fail if we messed up types too badly. */
1603 if (j == 0 && i >= 0
1604 && VEC_index (vn_reference_op_s, lhs_ops, 0)->opcode == MEM_REF
1605 && VEC_index (vn_reference_op_s, lhs_ops, 0)->off != -1
1606 && (VEC_index (vn_reference_op_s, lhs_ops, 0)->off
1607 == VEC_index (vn_reference_op_s, vr->operands, i)->off))
1608 i--, j--;
1610 /* i now points to the first additional op.
1611 ??? LHS may not be completely contained in VR, one or more
1612 VIEW_CONVERT_EXPRs could be in its way. We could at least
1613 try handling outermost VIEW_CONVERT_EXPRs. */
1614 if (j != -1)
1615 return (void *)-1;
1617 /* Now re-write REF to be based on the rhs of the assignment. */
1618 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1619 /* We need to pre-pend vr->operands[0..i] to rhs. */
1620 if (i + 1 + VEC_length (vn_reference_op_s, rhs)
1621 > VEC_length (vn_reference_op_s, vr->operands))
1623 VEC (vn_reference_op_s, heap) *old = vr->operands;
1624 VEC_safe_grow (vn_reference_op_s, heap, vr->operands,
1625 i + 1 + VEC_length (vn_reference_op_s, rhs));
1626 if (old == shared_lookup_references
1627 && vr->operands != old)
1628 shared_lookup_references = NULL;
1630 else
1631 VEC_truncate (vn_reference_op_s, vr->operands,
1632 i + 1 + VEC_length (vn_reference_op_s, rhs));
1633 FOR_EACH_VEC_ELT (vn_reference_op_s, rhs, j, vro)
1634 VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, vro);
1635 VEC_free (vn_reference_op_s, heap, rhs);
1636 vr->hashcode = vn_reference_compute_hash (vr);
1638 /* Adjust *ref from the new operands. */
1639 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1640 return (void *)-1;
1641 /* This can happen with bitfields. */
1642 if (ref->size != r.size)
1643 return (void *)-1;
1644 *ref = r;
1646 /* Do not update last seen VUSE after translating. */
1647 last_vuse_ptr = NULL;
1649 /* Keep looking for the adjusted *REF / VR pair. */
1650 return NULL;
1653 /* 6) For memcpy copies translate the reference through them if
1654 the copy kills ref. */
1655 else if (vn_walk_kind == VN_WALKREWRITE
1656 && is_gimple_reg_type (vr->type)
1657 /* ??? Handle BCOPY as well. */
1658 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
1659 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
1660 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
1661 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
1662 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
1663 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
1664 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
1665 && host_integerp (gimple_call_arg (def_stmt, 2), 1))
1667 tree lhs, rhs;
1668 ao_ref r;
1669 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
1670 vn_reference_op_s op;
1671 HOST_WIDE_INT at;
1674 /* Only handle non-variable, addressable refs. */
1675 if (ref->size != maxsize
1676 || offset % BITS_PER_UNIT != 0
1677 || ref->size % BITS_PER_UNIT != 0)
1678 return (void *)-1;
1680 /* Extract a pointer base and an offset for the destination. */
1681 lhs = gimple_call_arg (def_stmt, 0);
1682 lhs_offset = 0;
1683 if (TREE_CODE (lhs) == SSA_NAME)
1684 lhs = SSA_VAL (lhs);
1685 if (TREE_CODE (lhs) == ADDR_EXPR)
1687 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
1688 &lhs_offset);
1689 if (!tem)
1690 return (void *)-1;
1691 if (TREE_CODE (tem) == MEM_REF
1692 && host_integerp (TREE_OPERAND (tem, 1), 1))
1694 lhs = TREE_OPERAND (tem, 0);
1695 lhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1697 else if (DECL_P (tem))
1698 lhs = build_fold_addr_expr (tem);
1699 else
1700 return (void *)-1;
1702 if (TREE_CODE (lhs) != SSA_NAME
1703 && TREE_CODE (lhs) != ADDR_EXPR)
1704 return (void *)-1;
1706 /* Extract a pointer base and an offset for the source. */
1707 rhs = gimple_call_arg (def_stmt, 1);
1708 rhs_offset = 0;
1709 if (TREE_CODE (rhs) == SSA_NAME)
1710 rhs = SSA_VAL (rhs);
1711 if (TREE_CODE (rhs) == ADDR_EXPR)
1713 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
1714 &rhs_offset);
1715 if (!tem)
1716 return (void *)-1;
1717 if (TREE_CODE (tem) == MEM_REF
1718 && host_integerp (TREE_OPERAND (tem, 1), 1))
1720 rhs = TREE_OPERAND (tem, 0);
1721 rhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1723 else if (DECL_P (tem))
1724 rhs = build_fold_addr_expr (tem);
1725 else
1726 return (void *)-1;
1728 if (TREE_CODE (rhs) != SSA_NAME
1729 && TREE_CODE (rhs) != ADDR_EXPR)
1730 return (void *)-1;
1732 copy_size = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2));
1734 /* The bases of the destination and the references have to agree. */
1735 if ((TREE_CODE (base) != MEM_REF
1736 && !DECL_P (base))
1737 || (TREE_CODE (base) == MEM_REF
1738 && (TREE_OPERAND (base, 0) != lhs
1739 || !host_integerp (TREE_OPERAND (base, 1), 1)))
1740 || (DECL_P (base)
1741 && (TREE_CODE (lhs) != ADDR_EXPR
1742 || TREE_OPERAND (lhs, 0) != base)))
1743 return (void *)-1;
1745 /* And the access has to be contained within the memcpy destination. */
1746 at = offset / BITS_PER_UNIT;
1747 if (TREE_CODE (base) == MEM_REF)
1748 at += TREE_INT_CST_LOW (TREE_OPERAND (base, 1));
1749 if (lhs_offset > at
1750 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
1751 return (void *)-1;
1753 /* Make room for 2 operands in the new reference. */
1754 if (VEC_length (vn_reference_op_s, vr->operands) < 2)
1756 VEC (vn_reference_op_s, heap) *old = vr->operands;
1757 VEC_safe_grow (vn_reference_op_s, heap, vr->operands, 2);
1758 if (old == shared_lookup_references
1759 && vr->operands != old)
1760 shared_lookup_references = NULL;
1762 else
1763 VEC_truncate (vn_reference_op_s, vr->operands, 2);
1765 /* The looked-through reference is a simple MEM_REF. */
1766 memset (&op, 0, sizeof (op));
1767 op.type = vr->type;
1768 op.opcode = MEM_REF;
1769 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
1770 op.off = at - lhs_offset + rhs_offset;
1771 VEC_replace (vn_reference_op_s, vr->operands, 0, &op);
1772 op.type = TREE_TYPE (rhs);
1773 op.opcode = TREE_CODE (rhs);
1774 op.op0 = rhs;
1775 op.off = -1;
1776 VEC_replace (vn_reference_op_s, vr->operands, 1, &op);
1777 vr->hashcode = vn_reference_compute_hash (vr);
1779 /* Adjust *ref from the new operands. */
1780 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1781 return (void *)-1;
1782 /* This can happen with bitfields. */
1783 if (ref->size != r.size)
1784 return (void *)-1;
1785 *ref = r;
1787 /* Do not update last seen VUSE after translating. */
1788 last_vuse_ptr = NULL;
1790 /* Keep looking for the adjusted *REF / VR pair. */
1791 return NULL;
1794 /* Bail out and stop walking. */
1795 return (void *)-1;
1798 /* Lookup a reference operation by it's parts, in the current hash table.
1799 Returns the resulting value number if it exists in the hash table,
1800 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1801 vn_reference_t stored in the hashtable if something is found. */
1803 tree
1804 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
1805 VEC (vn_reference_op_s, heap) *operands,
1806 vn_reference_t *vnresult, vn_lookup_kind kind)
1808 struct vn_reference_s vr1;
1809 vn_reference_t tmp;
1810 tree cst;
1812 if (!vnresult)
1813 vnresult = &tmp;
1814 *vnresult = NULL;
1816 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1817 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1818 VEC_safe_grow (vn_reference_op_s, heap, shared_lookup_references,
1819 VEC_length (vn_reference_op_s, operands));
1820 memcpy (VEC_address (vn_reference_op_s, shared_lookup_references),
1821 VEC_address (vn_reference_op_s, operands),
1822 sizeof (vn_reference_op_s)
1823 * VEC_length (vn_reference_op_s, operands));
1824 vr1.operands = operands = shared_lookup_references
1825 = valueize_refs (shared_lookup_references);
1826 vr1.type = type;
1827 vr1.set = set;
1828 vr1.hashcode = vn_reference_compute_hash (&vr1);
1829 if ((cst = fully_constant_vn_reference_p (&vr1)))
1830 return cst;
1832 vn_reference_lookup_1 (&vr1, vnresult);
1833 if (!*vnresult
1834 && kind != VN_NOWALK
1835 && vr1.vuse)
1837 ao_ref r;
1838 vn_walk_kind = kind;
1839 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
1840 *vnresult =
1841 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1842 vn_reference_lookup_2,
1843 vn_reference_lookup_3, &vr1);
1844 if (vr1.operands != operands)
1845 VEC_free (vn_reference_op_s, heap, vr1.operands);
1848 if (*vnresult)
1849 return (*vnresult)->result;
1851 return NULL_TREE;
1854 /* Lookup OP in the current hash table, and return the resulting value
1855 number if it exists in the hash table. Return NULL_TREE if it does
1856 not exist in the hash table or if the result field of the structure
1857 was NULL.. VNRESULT will be filled in with the vn_reference_t
1858 stored in the hashtable if one exists. */
1860 tree
1861 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
1862 vn_reference_t *vnresult)
1864 VEC (vn_reference_op_s, heap) *operands;
1865 struct vn_reference_s vr1;
1866 tree cst;
1867 bool valuezied_anything;
1869 if (vnresult)
1870 *vnresult = NULL;
1872 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1873 vr1.operands = operands
1874 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
1875 vr1.type = TREE_TYPE (op);
1876 vr1.set = get_alias_set (op);
1877 vr1.hashcode = vn_reference_compute_hash (&vr1);
1878 if ((cst = fully_constant_vn_reference_p (&vr1)))
1879 return cst;
1881 if (kind != VN_NOWALK
1882 && vr1.vuse)
1884 vn_reference_t wvnresult;
1885 ao_ref r;
1886 /* Make sure to use a valueized reference if we valueized anything.
1887 Otherwise preserve the full reference for advanced TBAA. */
1888 if (!valuezied_anything
1889 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
1890 vr1.operands))
1891 ao_ref_init (&r, op);
1892 vn_walk_kind = kind;
1893 wvnresult =
1894 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1895 vn_reference_lookup_2,
1896 vn_reference_lookup_3, &vr1);
1897 if (vr1.operands != operands)
1898 VEC_free (vn_reference_op_s, heap, vr1.operands);
1899 if (wvnresult)
1901 if (vnresult)
1902 *vnresult = wvnresult;
1903 return wvnresult->result;
1906 return NULL_TREE;
1909 return vn_reference_lookup_1 (&vr1, vnresult);
1913 /* Insert OP into the current hash table with a value number of
1914 RESULT, and return the resulting reference structure we created. */
1916 vn_reference_t
1917 vn_reference_insert (tree op, tree result, tree vuse)
1919 void **slot;
1920 vn_reference_t vr1;
1922 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1923 if (TREE_CODE (result) == SSA_NAME)
1924 vr1->value_id = VN_INFO (result)->value_id;
1925 else
1926 vr1->value_id = get_or_alloc_constant_value_id (result);
1927 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1928 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
1929 vr1->type = TREE_TYPE (op);
1930 vr1->set = get_alias_set (op);
1931 vr1->hashcode = vn_reference_compute_hash (vr1);
1932 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
1934 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1935 INSERT);
1937 /* Because we lookup stores using vuses, and value number failures
1938 using the vdefs (see visit_reference_op_store for how and why),
1939 it's possible that on failure we may try to insert an already
1940 inserted store. This is not wrong, there is no ssa name for a
1941 store that we could use as a differentiator anyway. Thus, unlike
1942 the other lookup functions, you cannot gcc_assert (!*slot)
1943 here. */
1945 /* But free the old slot in case of a collision. */
1946 if (*slot)
1947 free_reference (*slot);
1949 *slot = vr1;
1950 return vr1;
1953 /* Insert a reference by it's pieces into the current hash table with
1954 a value number of RESULT. Return the resulting reference
1955 structure we created. */
1957 vn_reference_t
1958 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
1959 VEC (vn_reference_op_s, heap) *operands,
1960 tree result, unsigned int value_id)
1963 void **slot;
1964 vn_reference_t vr1;
1966 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1967 vr1->value_id = value_id;
1968 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1969 vr1->operands = valueize_refs (operands);
1970 vr1->type = type;
1971 vr1->set = set;
1972 vr1->hashcode = vn_reference_compute_hash (vr1);
1973 if (result && TREE_CODE (result) == SSA_NAME)
1974 result = SSA_VAL (result);
1975 vr1->result = result;
1977 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1978 INSERT);
1980 /* At this point we should have all the things inserted that we have
1981 seen before, and we should never try inserting something that
1982 already exists. */
1983 gcc_assert (!*slot);
1984 if (*slot)
1985 free_reference (*slot);
1987 *slot = vr1;
1988 return vr1;
1991 /* Compute and return the hash value for nary operation VBO1. */
1993 hashval_t
1994 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
1996 hashval_t hash;
1997 unsigned i;
1999 for (i = 0; i < vno1->length; ++i)
2000 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2001 vno1->op[i] = SSA_VAL (vno1->op[i]);
2003 if (vno1->length == 2
2004 && commutative_tree_code (vno1->opcode)
2005 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2007 tree temp = vno1->op[0];
2008 vno1->op[0] = vno1->op[1];
2009 vno1->op[1] = temp;
2012 hash = iterative_hash_hashval_t (vno1->opcode, 0);
2013 for (i = 0; i < vno1->length; ++i)
2014 hash = iterative_hash_expr (vno1->op[i], hash);
2016 return hash;
2019 /* Return the computed hashcode for nary operation P1. */
2021 static hashval_t
2022 vn_nary_op_hash (const void *p1)
2024 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
2025 return vno1->hashcode;
2028 /* Compare nary operations P1 and P2 and return true if they are
2029 equivalent. */
2032 vn_nary_op_eq (const void *p1, const void *p2)
2034 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
2035 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
2036 unsigned i;
2038 if (vno1->hashcode != vno2->hashcode)
2039 return false;
2041 if (vno1->length != vno2->length)
2042 return false;
2044 if (vno1->opcode != vno2->opcode
2045 || !types_compatible_p (vno1->type, vno2->type))
2046 return false;
2048 for (i = 0; i < vno1->length; ++i)
2049 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2050 return false;
2052 return true;
2055 /* Initialize VNO from the pieces provided. */
2057 static void
2058 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2059 enum tree_code code, tree type, tree *ops)
2061 vno->opcode = code;
2062 vno->length = length;
2063 vno->type = type;
2064 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2067 /* Initialize VNO from OP. */
2069 static void
2070 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2072 unsigned i;
2074 vno->opcode = TREE_CODE (op);
2075 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2076 vno->type = TREE_TYPE (op);
2077 for (i = 0; i < vno->length; ++i)
2078 vno->op[i] = TREE_OPERAND (op, i);
2081 /* Return the number of operands for a vn_nary ops structure from STMT. */
2083 static unsigned int
2084 vn_nary_length_from_stmt (gimple stmt)
2086 switch (gimple_assign_rhs_code (stmt))
2088 case REALPART_EXPR:
2089 case IMAGPART_EXPR:
2090 case VIEW_CONVERT_EXPR:
2091 return 1;
2093 case CONSTRUCTOR:
2094 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2096 default:
2097 return gimple_num_ops (stmt) - 1;
2101 /* Initialize VNO from STMT. */
2103 static void
2104 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
2106 unsigned i;
2108 vno->opcode = gimple_assign_rhs_code (stmt);
2109 vno->type = gimple_expr_type (stmt);
2110 switch (vno->opcode)
2112 case REALPART_EXPR:
2113 case IMAGPART_EXPR:
2114 case VIEW_CONVERT_EXPR:
2115 vno->length = 1;
2116 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2117 break;
2119 case CONSTRUCTOR:
2120 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2121 for (i = 0; i < vno->length; ++i)
2122 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2123 break;
2125 default:
2126 vno->length = gimple_num_ops (stmt) - 1;
2127 for (i = 0; i < vno->length; ++i)
2128 vno->op[i] = gimple_op (stmt, i + 1);
2132 /* Compute the hashcode for VNO and look for it in the hash table;
2133 return the resulting value number if it exists in the hash table.
2134 Return NULL_TREE if it does not exist in the hash table or if the
2135 result field of the operation is NULL. VNRESULT will contain the
2136 vn_nary_op_t from the hashtable if it exists. */
2138 static tree
2139 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2141 void **slot;
2143 if (vnresult)
2144 *vnresult = NULL;
2146 vno->hashcode = vn_nary_op_compute_hash (vno);
2147 slot = htab_find_slot_with_hash (current_info->nary, vno, vno->hashcode,
2148 NO_INSERT);
2149 if (!slot && current_info == optimistic_info)
2150 slot = htab_find_slot_with_hash (valid_info->nary, vno, vno->hashcode,
2151 NO_INSERT);
2152 if (!slot)
2153 return NULL_TREE;
2154 if (vnresult)
2155 *vnresult = (vn_nary_op_t)*slot;
2156 return ((vn_nary_op_t)*slot)->result;
2159 /* Lookup a n-ary operation by its pieces and return the resulting value
2160 number if it exists in the hash table. Return NULL_TREE if it does
2161 not exist in the hash table or if the result field of the operation
2162 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2163 if it exists. */
2165 tree
2166 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2167 tree type, tree *ops, vn_nary_op_t *vnresult)
2169 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2170 sizeof_vn_nary_op (length));
2171 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2172 return vn_nary_op_lookup_1 (vno1, vnresult);
2175 /* Lookup OP in the current hash table, and return the resulting value
2176 number if it exists in the hash table. Return NULL_TREE if it does
2177 not exist in the hash table or if the result field of the operation
2178 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2179 if it exists. */
2181 tree
2182 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2184 vn_nary_op_t vno1
2185 = XALLOCAVAR (struct vn_nary_op_s,
2186 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2187 init_vn_nary_op_from_op (vno1, op);
2188 return vn_nary_op_lookup_1 (vno1, vnresult);
2191 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2192 value number if it exists in the hash table. Return NULL_TREE if
2193 it does not exist in the hash table. VNRESULT will contain the
2194 vn_nary_op_t from the hashtable if it exists. */
2196 tree
2197 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
2199 vn_nary_op_t vno1
2200 = XALLOCAVAR (struct vn_nary_op_s,
2201 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2202 init_vn_nary_op_from_stmt (vno1, stmt);
2203 return vn_nary_op_lookup_1 (vno1, vnresult);
2206 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2208 static vn_nary_op_t
2209 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2211 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2214 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2215 obstack. */
2217 static vn_nary_op_t
2218 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2220 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2221 &current_info->nary_obstack);
2223 vno1->value_id = value_id;
2224 vno1->length = length;
2225 vno1->result = result;
2227 return vno1;
2230 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2231 VNO->HASHCODE first. */
2233 static vn_nary_op_t
2234 vn_nary_op_insert_into (vn_nary_op_t vno, htab_t table, bool compute_hash)
2236 void **slot;
2238 if (compute_hash)
2239 vno->hashcode = vn_nary_op_compute_hash (vno);
2241 slot = htab_find_slot_with_hash (table, vno, vno->hashcode, INSERT);
2242 gcc_assert (!*slot);
2244 *slot = vno;
2245 return vno;
2248 /* Insert a n-ary operation into the current hash table using it's
2249 pieces. Return the vn_nary_op_t structure we created and put in
2250 the hashtable. */
2252 vn_nary_op_t
2253 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2254 tree type, tree *ops,
2255 tree result, unsigned int value_id)
2257 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2258 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2259 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2262 /* Insert OP into the current hash table with a value number of
2263 RESULT. Return the vn_nary_op_t structure we created and put in
2264 the hashtable. */
2266 vn_nary_op_t
2267 vn_nary_op_insert (tree op, tree result)
2269 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2270 vn_nary_op_t vno1;
2272 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2273 init_vn_nary_op_from_op (vno1, op);
2274 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2277 /* Insert the rhs of STMT into the current hash table with a value number of
2278 RESULT. */
2280 vn_nary_op_t
2281 vn_nary_op_insert_stmt (gimple stmt, tree result)
2283 vn_nary_op_t vno1
2284 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2285 result, VN_INFO (result)->value_id);
2286 init_vn_nary_op_from_stmt (vno1, stmt);
2287 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2290 /* Compute a hashcode for PHI operation VP1 and return it. */
2292 static inline hashval_t
2293 vn_phi_compute_hash (vn_phi_t vp1)
2295 hashval_t result;
2296 int i;
2297 tree phi1op;
2298 tree type;
2300 result = vp1->block->index;
2302 /* If all PHI arguments are constants we need to distinguish
2303 the PHI node via its type. */
2304 type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
2305 result += (INTEGRAL_TYPE_P (type)
2306 + (INTEGRAL_TYPE_P (type)
2307 ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
2309 FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
2311 if (phi1op == VN_TOP)
2312 continue;
2313 result = iterative_hash_expr (phi1op, result);
2316 return result;
2319 /* Return the computed hashcode for phi operation P1. */
2321 static hashval_t
2322 vn_phi_hash (const void *p1)
2324 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
2325 return vp1->hashcode;
2328 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2330 static int
2331 vn_phi_eq (const void *p1, const void *p2)
2333 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
2334 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
2336 if (vp1->hashcode != vp2->hashcode)
2337 return false;
2339 if (vp1->block == vp2->block)
2341 int i;
2342 tree phi1op;
2344 /* If the PHI nodes do not have compatible types
2345 they are not the same. */
2346 if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
2347 TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
2348 return false;
2350 /* Any phi in the same block will have it's arguments in the
2351 same edge order, because of how we store phi nodes. */
2352 FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
2354 tree phi2op = VEC_index (tree, vp2->phiargs, i);
2355 if (phi1op == VN_TOP || phi2op == VN_TOP)
2356 continue;
2357 if (!expressions_equal_p (phi1op, phi2op))
2358 return false;
2360 return true;
2362 return false;
2365 static VEC(tree, heap) *shared_lookup_phiargs;
2367 /* Lookup PHI in the current hash table, and return the resulting
2368 value number if it exists in the hash table. Return NULL_TREE if
2369 it does not exist in the hash table. */
2371 static tree
2372 vn_phi_lookup (gimple phi)
2374 void **slot;
2375 struct vn_phi_s vp1;
2376 unsigned i;
2378 VEC_truncate (tree, shared_lookup_phiargs, 0);
2380 /* Canonicalize the SSA_NAME's to their value number. */
2381 for (i = 0; i < gimple_phi_num_args (phi); i++)
2383 tree def = PHI_ARG_DEF (phi, i);
2384 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2385 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
2387 vp1.phiargs = shared_lookup_phiargs;
2388 vp1.block = gimple_bb (phi);
2389 vp1.hashcode = vn_phi_compute_hash (&vp1);
2390 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
2391 NO_INSERT);
2392 if (!slot && current_info == optimistic_info)
2393 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
2394 NO_INSERT);
2395 if (!slot)
2396 return NULL_TREE;
2397 return ((vn_phi_t)*slot)->result;
2400 /* Insert PHI into the current hash table with a value number of
2401 RESULT. */
2403 static vn_phi_t
2404 vn_phi_insert (gimple phi, tree result)
2406 void **slot;
2407 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
2408 unsigned i;
2409 VEC (tree, heap) *args = NULL;
2411 /* Canonicalize the SSA_NAME's to their value number. */
2412 for (i = 0; i < gimple_phi_num_args (phi); i++)
2414 tree def = PHI_ARG_DEF (phi, i);
2415 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2416 VEC_safe_push (tree, heap, args, def);
2418 vp1->value_id = VN_INFO (result)->value_id;
2419 vp1->phiargs = args;
2420 vp1->block = gimple_bb (phi);
2421 vp1->result = result;
2422 vp1->hashcode = vn_phi_compute_hash (vp1);
2424 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
2425 INSERT);
2427 /* Because we iterate over phi operations more than once, it's
2428 possible the slot might already exist here, hence no assert.*/
2429 *slot = vp1;
2430 return vp1;
2434 /* Print set of components in strongly connected component SCC to OUT. */
2436 static void
2437 print_scc (FILE *out, VEC (tree, heap) *scc)
2439 tree var;
2440 unsigned int i;
2442 fprintf (out, "SCC consists of: ");
2443 FOR_EACH_VEC_ELT (tree, scc, i, var)
2445 print_generic_expr (out, var, 0);
2446 fprintf (out, " ");
2448 fprintf (out, "\n");
2451 /* Set the value number of FROM to TO, return true if it has changed
2452 as a result. */
2454 static inline bool
2455 set_ssa_val_to (tree from, tree to)
2457 tree currval = SSA_VAL (from);
2459 if (from != to)
2461 if (currval == from)
2463 if (dump_file && (dump_flags & TDF_DETAILS))
2465 fprintf (dump_file, "Not changing value number of ");
2466 print_generic_expr (dump_file, from, 0);
2467 fprintf (dump_file, " from VARYING to ");
2468 print_generic_expr (dump_file, to, 0);
2469 fprintf (dump_file, "\n");
2471 return false;
2473 else if (TREE_CODE (to) == SSA_NAME
2474 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2475 to = from;
2478 /* The only thing we allow as value numbers are VN_TOP, ssa_names
2479 and invariants. So assert that here. */
2480 gcc_assert (to != NULL_TREE
2481 && (to == VN_TOP
2482 || TREE_CODE (to) == SSA_NAME
2483 || is_gimple_min_invariant (to)));
2485 if (dump_file && (dump_flags & TDF_DETAILS))
2487 fprintf (dump_file, "Setting value number of ");
2488 print_generic_expr (dump_file, from, 0);
2489 fprintf (dump_file, " to ");
2490 print_generic_expr (dump_file, to, 0);
2493 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
2495 VN_INFO (from)->valnum = to;
2496 if (dump_file && (dump_flags & TDF_DETAILS))
2497 fprintf (dump_file, " (changed)\n");
2498 return true;
2500 if (dump_file && (dump_flags & TDF_DETAILS))
2501 fprintf (dump_file, "\n");
2502 return false;
2505 /* Set all definitions in STMT to value number to themselves.
2506 Return true if a value number changed. */
2508 static bool
2509 defs_to_varying (gimple stmt)
2511 bool changed = false;
2512 ssa_op_iter iter;
2513 def_operand_p defp;
2515 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2517 tree def = DEF_FROM_PTR (defp);
2519 VN_INFO (def)->use_processed = true;
2520 changed |= set_ssa_val_to (def, def);
2522 return changed;
2525 static bool expr_has_constants (tree expr);
2526 static tree valueize_expr (tree expr);
2528 /* Visit a copy between LHS and RHS, return true if the value number
2529 changed. */
2531 static bool
2532 visit_copy (tree lhs, tree rhs)
2534 /* Follow chains of copies to their destination. */
2535 while (TREE_CODE (rhs) == SSA_NAME
2536 && SSA_VAL (rhs) != rhs)
2537 rhs = SSA_VAL (rhs);
2539 /* The copy may have a more interesting constant filled expression
2540 (we don't, since we know our RHS is just an SSA name). */
2541 if (TREE_CODE (rhs) == SSA_NAME)
2543 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2544 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2547 return set_ssa_val_to (lhs, rhs);
2550 /* Visit a nary operator RHS, value number it, and return true if the
2551 value number of LHS has changed as a result. */
2553 static bool
2554 visit_nary_op (tree lhs, gimple stmt)
2556 bool changed = false;
2557 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2559 if (result)
2560 changed = set_ssa_val_to (lhs, result);
2561 else
2563 changed = set_ssa_val_to (lhs, lhs);
2564 vn_nary_op_insert_stmt (stmt, lhs);
2567 return changed;
2570 /* Visit a call STMT storing into LHS. Return true if the value number
2571 of the LHS has changed as a result. */
2573 static bool
2574 visit_reference_op_call (tree lhs, gimple stmt)
2576 bool changed = false;
2577 struct vn_reference_s vr1;
2578 tree result;
2579 tree vuse = gimple_vuse (stmt);
2581 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2582 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
2583 vr1.type = gimple_expr_type (stmt);
2584 vr1.set = 0;
2585 vr1.hashcode = vn_reference_compute_hash (&vr1);
2586 result = vn_reference_lookup_1 (&vr1, NULL);
2587 if (result)
2589 changed = set_ssa_val_to (lhs, result);
2590 if (TREE_CODE (result) == SSA_NAME
2591 && VN_INFO (result)->has_constants)
2592 VN_INFO (lhs)->has_constants = true;
2594 else
2596 void **slot;
2597 vn_reference_t vr2;
2598 changed = set_ssa_val_to (lhs, lhs);
2599 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
2600 vr2->vuse = vr1.vuse;
2601 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
2602 vr2->type = vr1.type;
2603 vr2->set = vr1.set;
2604 vr2->hashcode = vr1.hashcode;
2605 vr2->result = lhs;
2606 slot = htab_find_slot_with_hash (current_info->references,
2607 vr2, vr2->hashcode, INSERT);
2608 if (*slot)
2609 free_reference (*slot);
2610 *slot = vr2;
2613 return changed;
2616 /* Visit a load from a reference operator RHS, part of STMT, value number it,
2617 and return true if the value number of the LHS has changed as a result. */
2619 static bool
2620 visit_reference_op_load (tree lhs, tree op, gimple stmt)
2622 bool changed = false;
2623 tree last_vuse;
2624 tree result;
2626 last_vuse = gimple_vuse (stmt);
2627 last_vuse_ptr = &last_vuse;
2628 result = vn_reference_lookup (op, gimple_vuse (stmt),
2629 default_vn_walk_kind, NULL);
2630 last_vuse_ptr = NULL;
2632 /* If we have a VCE, try looking up its operand as it might be stored in
2633 a different type. */
2634 if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
2635 result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
2636 default_vn_walk_kind, NULL);
2638 /* We handle type-punning through unions by value-numbering based
2639 on offset and size of the access. Be prepared to handle a
2640 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
2641 if (result
2642 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
2644 /* We will be setting the value number of lhs to the value number
2645 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2646 So first simplify and lookup this expression to see if it
2647 is already available. */
2648 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
2649 if ((CONVERT_EXPR_P (val)
2650 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
2651 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
2653 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
2654 if ((CONVERT_EXPR_P (tem)
2655 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
2656 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
2657 TREE_TYPE (val), tem)))
2658 val = tem;
2660 result = val;
2661 if (!is_gimple_min_invariant (val)
2662 && TREE_CODE (val) != SSA_NAME)
2663 result = vn_nary_op_lookup (val, NULL);
2664 /* If the expression is not yet available, value-number lhs to
2665 a new SSA_NAME we create. */
2666 if (!result)
2668 result = make_ssa_name (SSA_NAME_VAR (lhs), gimple_build_nop ());
2669 /* Initialize value-number information properly. */
2670 VN_INFO_GET (result)->valnum = result;
2671 VN_INFO (result)->value_id = get_next_value_id ();
2672 VN_INFO (result)->expr = val;
2673 VN_INFO (result)->has_constants = expr_has_constants (val);
2674 VN_INFO (result)->needs_insertion = true;
2675 /* As all "inserted" statements are singleton SCCs, insert
2676 to the valid table. This is strictly needed to
2677 avoid re-generating new value SSA_NAMEs for the same
2678 expression during SCC iteration over and over (the
2679 optimistic table gets cleared after each iteration).
2680 We do not need to insert into the optimistic table, as
2681 lookups there will fall back to the valid table. */
2682 if (current_info == optimistic_info)
2684 current_info = valid_info;
2685 vn_nary_op_insert (val, result);
2686 current_info = optimistic_info;
2688 else
2689 vn_nary_op_insert (val, result);
2690 if (dump_file && (dump_flags & TDF_DETAILS))
2692 fprintf (dump_file, "Inserting name ");
2693 print_generic_expr (dump_file, result, 0);
2694 fprintf (dump_file, " for expression ");
2695 print_generic_expr (dump_file, val, 0);
2696 fprintf (dump_file, "\n");
2701 if (result)
2703 changed = set_ssa_val_to (lhs, result);
2704 if (TREE_CODE (result) == SSA_NAME
2705 && VN_INFO (result)->has_constants)
2707 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2708 VN_INFO (lhs)->has_constants = true;
2711 else
2713 changed = set_ssa_val_to (lhs, lhs);
2714 vn_reference_insert (op, lhs, last_vuse);
2717 return changed;
2721 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2722 and return true if the value number of the LHS has changed as a result. */
2724 static bool
2725 visit_reference_op_store (tree lhs, tree op, gimple stmt)
2727 bool changed = false;
2728 tree result;
2729 bool resultsame = false;
2731 /* First we want to lookup using the *vuses* from the store and see
2732 if there the last store to this location with the same address
2733 had the same value.
2735 The vuses represent the memory state before the store. If the
2736 memory state, address, and value of the store is the same as the
2737 last store to this location, then this store will produce the
2738 same memory state as that store.
2740 In this case the vdef versions for this store are value numbered to those
2741 vuse versions, since they represent the same memory state after
2742 this store.
2744 Otherwise, the vdefs for the store are used when inserting into
2745 the table, since the store generates a new memory state. */
2747 result = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_NOWALK, NULL);
2749 if (result)
2751 if (TREE_CODE (result) == SSA_NAME)
2752 result = SSA_VAL (result);
2753 if (TREE_CODE (op) == SSA_NAME)
2754 op = SSA_VAL (op);
2755 resultsame = expressions_equal_p (result, op);
2758 if (!result || !resultsame)
2760 tree vdef;
2762 if (dump_file && (dump_flags & TDF_DETAILS))
2764 fprintf (dump_file, "No store match\n");
2765 fprintf (dump_file, "Value numbering store ");
2766 print_generic_expr (dump_file, lhs, 0);
2767 fprintf (dump_file, " to ");
2768 print_generic_expr (dump_file, op, 0);
2769 fprintf (dump_file, "\n");
2771 /* Have to set value numbers before insert, since insert is
2772 going to valueize the references in-place. */
2773 if ((vdef = gimple_vdef (stmt)))
2775 VN_INFO (vdef)->use_processed = true;
2776 changed |= set_ssa_val_to (vdef, vdef);
2779 /* Do not insert structure copies into the tables. */
2780 if (is_gimple_min_invariant (op)
2781 || is_gimple_reg (op))
2782 vn_reference_insert (lhs, op, vdef);
2784 else
2786 /* We had a match, so value number the vdef to have the value
2787 number of the vuse it came from. */
2788 tree def, use;
2790 if (dump_file && (dump_flags & TDF_DETAILS))
2791 fprintf (dump_file, "Store matched earlier value,"
2792 "value numbering store vdefs to matching vuses.\n");
2794 def = gimple_vdef (stmt);
2795 use = gimple_vuse (stmt);
2797 VN_INFO (def)->use_processed = true;
2798 changed |= set_ssa_val_to (def, SSA_VAL (use));
2801 return changed;
2804 /* Visit and value number PHI, return true if the value number
2805 changed. */
2807 static bool
2808 visit_phi (gimple phi)
2810 bool changed = false;
2811 tree result;
2812 tree sameval = VN_TOP;
2813 bool allsame = true;
2814 unsigned i;
2816 /* TODO: We could check for this in init_sccvn, and replace this
2817 with a gcc_assert. */
2818 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
2819 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2821 /* See if all non-TOP arguments have the same value. TOP is
2822 equivalent to everything, so we can ignore it. */
2823 for (i = 0; i < gimple_phi_num_args (phi); i++)
2825 tree def = PHI_ARG_DEF (phi, i);
2827 if (TREE_CODE (def) == SSA_NAME)
2828 def = SSA_VAL (def);
2829 if (def == VN_TOP)
2830 continue;
2831 if (sameval == VN_TOP)
2833 sameval = def;
2835 else
2837 if (!expressions_equal_p (def, sameval))
2839 allsame = false;
2840 break;
2845 /* If all value numbered to the same value, the phi node has that
2846 value. */
2847 if (allsame)
2849 if (is_gimple_min_invariant (sameval))
2851 VN_INFO (PHI_RESULT (phi))->has_constants = true;
2852 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2854 else
2856 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2857 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2860 if (TREE_CODE (sameval) == SSA_NAME)
2861 return visit_copy (PHI_RESULT (phi), sameval);
2863 return set_ssa_val_to (PHI_RESULT (phi), sameval);
2866 /* Otherwise, see if it is equivalent to a phi node in this block. */
2867 result = vn_phi_lookup (phi);
2868 if (result)
2870 if (TREE_CODE (result) == SSA_NAME)
2871 changed = visit_copy (PHI_RESULT (phi), result);
2872 else
2873 changed = set_ssa_val_to (PHI_RESULT (phi), result);
2875 else
2877 vn_phi_insert (phi, PHI_RESULT (phi));
2878 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2879 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
2880 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2883 return changed;
2886 /* Return true if EXPR contains constants. */
2888 static bool
2889 expr_has_constants (tree expr)
2891 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2893 case tcc_unary:
2894 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
2896 case tcc_binary:
2897 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
2898 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
2899 /* Constants inside reference ops are rarely interesting, but
2900 it can take a lot of looking to find them. */
2901 case tcc_reference:
2902 case tcc_declaration:
2903 return false;
2904 default:
2905 return is_gimple_min_invariant (expr);
2907 return false;
2910 /* Return true if STMT contains constants. */
2912 static bool
2913 stmt_has_constants (gimple stmt)
2915 if (gimple_code (stmt) != GIMPLE_ASSIGN)
2916 return false;
2918 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2920 case GIMPLE_UNARY_RHS:
2921 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2923 case GIMPLE_BINARY_RHS:
2924 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2925 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
2926 case GIMPLE_TERNARY_RHS:
2927 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2928 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt))
2929 || is_gimple_min_invariant (gimple_assign_rhs3 (stmt)));
2930 case GIMPLE_SINGLE_RHS:
2931 /* Constants inside reference ops are rarely interesting, but
2932 it can take a lot of looking to find them. */
2933 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2934 default:
2935 gcc_unreachable ();
2937 return false;
2940 /* Replace SSA_NAMES in expr with their value numbers, and return the
2941 result.
2942 This is performed in place. */
2944 static tree
2945 valueize_expr (tree expr)
2947 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2949 case tcc_binary:
2950 TREE_OPERAND (expr, 1) = vn_valueize (TREE_OPERAND (expr, 1));
2951 /* Fallthru. */
2952 case tcc_unary:
2953 TREE_OPERAND (expr, 0) = vn_valueize (TREE_OPERAND (expr, 0));
2954 break;
2955 default:;
2957 return expr;
2960 /* Simplify the binary expression RHS, and return the result if
2961 simplified. */
2963 static tree
2964 simplify_binary_expression (gimple stmt)
2966 tree result = NULL_TREE;
2967 tree op0 = gimple_assign_rhs1 (stmt);
2968 tree op1 = gimple_assign_rhs2 (stmt);
2969 enum tree_code code = gimple_assign_rhs_code (stmt);
2971 /* This will not catch every single case we could combine, but will
2972 catch those with constants. The goal here is to simultaneously
2973 combine constants between expressions, but avoid infinite
2974 expansion of expressions during simplification. */
2975 if (TREE_CODE (op0) == SSA_NAME)
2977 if (VN_INFO (op0)->has_constants
2978 || TREE_CODE_CLASS (code) == tcc_comparison
2979 || code == COMPLEX_EXPR)
2980 op0 = valueize_expr (vn_get_expr_for (op0));
2981 else
2982 op0 = vn_valueize (op0);
2985 if (TREE_CODE (op1) == SSA_NAME)
2987 if (VN_INFO (op1)->has_constants
2988 || code == COMPLEX_EXPR)
2989 op1 = valueize_expr (vn_get_expr_for (op1));
2990 else
2991 op1 = vn_valueize (op1);
2994 /* Pointer plus constant can be represented as invariant address.
2995 Do so to allow further propatation, see also tree forwprop. */
2996 if (code == POINTER_PLUS_EXPR
2997 && host_integerp (op1, 1)
2998 && TREE_CODE (op0) == ADDR_EXPR
2999 && is_gimple_min_invariant (op0))
3000 return build_invariant_address (TREE_TYPE (op0),
3001 TREE_OPERAND (op0, 0),
3002 TREE_INT_CST_LOW (op1));
3004 /* Avoid folding if nothing changed. */
3005 if (op0 == gimple_assign_rhs1 (stmt)
3006 && op1 == gimple_assign_rhs2 (stmt))
3007 return NULL_TREE;
3009 fold_defer_overflow_warnings ();
3011 result = fold_binary (code, gimple_expr_type (stmt), op0, op1);
3012 if (result)
3013 STRIP_USELESS_TYPE_CONVERSION (result);
3015 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
3016 stmt, 0);
3018 /* Make sure result is not a complex expression consisting
3019 of operators of operators (IE (a + b) + (a + c))
3020 Otherwise, we will end up with unbounded expressions if
3021 fold does anything at all. */
3022 if (result && valid_gimple_rhs_p (result))
3023 return result;
3025 return NULL_TREE;
3028 /* Simplify the unary expression RHS, and return the result if
3029 simplified. */
3031 static tree
3032 simplify_unary_expression (gimple stmt)
3034 tree result = NULL_TREE;
3035 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
3036 enum tree_code code = gimple_assign_rhs_code (stmt);
3038 /* We handle some tcc_reference codes here that are all
3039 GIMPLE_ASSIGN_SINGLE codes. */
3040 if (code == REALPART_EXPR
3041 || code == IMAGPART_EXPR
3042 || code == VIEW_CONVERT_EXPR
3043 || code == BIT_FIELD_REF)
3044 op0 = TREE_OPERAND (op0, 0);
3046 if (TREE_CODE (op0) != SSA_NAME)
3047 return NULL_TREE;
3049 orig_op0 = op0;
3050 if (VN_INFO (op0)->has_constants)
3051 op0 = valueize_expr (vn_get_expr_for (op0));
3052 else if (CONVERT_EXPR_CODE_P (code)
3053 || code == REALPART_EXPR
3054 || code == IMAGPART_EXPR
3055 || code == VIEW_CONVERT_EXPR
3056 || code == BIT_FIELD_REF)
3058 /* We want to do tree-combining on conversion-like expressions.
3059 Make sure we feed only SSA_NAMEs or constants to fold though. */
3060 tree tem = valueize_expr (vn_get_expr_for (op0));
3061 if (UNARY_CLASS_P (tem)
3062 || BINARY_CLASS_P (tem)
3063 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
3064 || TREE_CODE (tem) == SSA_NAME
3065 || TREE_CODE (tem) == CONSTRUCTOR
3066 || is_gimple_min_invariant (tem))
3067 op0 = tem;
3070 /* Avoid folding if nothing changed, but remember the expression. */
3071 if (op0 == orig_op0)
3072 return NULL_TREE;
3074 if (code == BIT_FIELD_REF)
3076 tree rhs = gimple_assign_rhs1 (stmt);
3077 result = fold_ternary (BIT_FIELD_REF, TREE_TYPE (rhs),
3078 op0, TREE_OPERAND (rhs, 1), TREE_OPERAND (rhs, 2));
3080 else
3081 result = fold_unary_ignore_overflow (code, gimple_expr_type (stmt), op0);
3082 if (result)
3084 STRIP_USELESS_TYPE_CONVERSION (result);
3085 if (valid_gimple_rhs_p (result))
3086 return result;
3089 return NULL_TREE;
3092 /* Try to simplify RHS using equivalences and constant folding. */
3094 static tree
3095 try_to_simplify (gimple stmt)
3097 enum tree_code code = gimple_assign_rhs_code (stmt);
3098 tree tem;
3100 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3101 in this case, there is no point in doing extra work. */
3102 if (code == SSA_NAME)
3103 return NULL_TREE;
3105 /* First try constant folding based on our current lattice. */
3106 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize);
3107 if (tem
3108 && (TREE_CODE (tem) == SSA_NAME
3109 || is_gimple_min_invariant (tem)))
3110 return tem;
3112 /* If that didn't work try combining multiple statements. */
3113 switch (TREE_CODE_CLASS (code))
3115 case tcc_reference:
3116 /* Fallthrough for some unary codes that can operate on registers. */
3117 if (!(code == REALPART_EXPR
3118 || code == IMAGPART_EXPR
3119 || code == VIEW_CONVERT_EXPR
3120 || code == BIT_FIELD_REF))
3121 break;
3122 /* We could do a little more with unary ops, if they expand
3123 into binary ops, but it's debatable whether it is worth it. */
3124 case tcc_unary:
3125 return simplify_unary_expression (stmt);
3127 case tcc_comparison:
3128 case tcc_binary:
3129 return simplify_binary_expression (stmt);
3131 default:
3132 break;
3135 return NULL_TREE;
3138 /* Visit and value number USE, return true if the value number
3139 changed. */
3141 static bool
3142 visit_use (tree use)
3144 bool changed = false;
3145 gimple stmt = SSA_NAME_DEF_STMT (use);
3147 VN_INFO (use)->use_processed = true;
3149 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3150 if (dump_file && (dump_flags & TDF_DETAILS)
3151 && !SSA_NAME_IS_DEFAULT_DEF (use))
3153 fprintf (dump_file, "Value numbering ");
3154 print_generic_expr (dump_file, use, 0);
3155 fprintf (dump_file, " stmt = ");
3156 print_gimple_stmt (dump_file, stmt, 0, 0);
3159 /* Handle uninitialized uses. */
3160 if (SSA_NAME_IS_DEFAULT_DEF (use))
3161 changed = set_ssa_val_to (use, use);
3162 else
3164 if (gimple_code (stmt) == GIMPLE_PHI)
3165 changed = visit_phi (stmt);
3166 else if (!gimple_has_lhs (stmt)
3167 || gimple_has_volatile_ops (stmt)
3168 || stmt_could_throw_p (stmt))
3169 changed = defs_to_varying (stmt);
3170 else if (is_gimple_assign (stmt))
3172 enum tree_code code = gimple_assign_rhs_code (stmt);
3173 tree lhs = gimple_assign_lhs (stmt);
3174 tree rhs1 = gimple_assign_rhs1 (stmt);
3175 tree simplified;
3177 /* Shortcut for copies. Simplifying copies is pointless,
3178 since we copy the expression and value they represent. */
3179 if (code == SSA_NAME
3180 && TREE_CODE (lhs) == SSA_NAME)
3182 changed = visit_copy (lhs, rhs1);
3183 goto done;
3185 simplified = try_to_simplify (stmt);
3186 if (simplified)
3188 if (dump_file && (dump_flags & TDF_DETAILS))
3190 fprintf (dump_file, "RHS ");
3191 print_gimple_expr (dump_file, stmt, 0, 0);
3192 fprintf (dump_file, " simplified to ");
3193 print_generic_expr (dump_file, simplified, 0);
3194 if (TREE_CODE (lhs) == SSA_NAME)
3195 fprintf (dump_file, " has constants %d\n",
3196 expr_has_constants (simplified));
3197 else
3198 fprintf (dump_file, "\n");
3201 /* Setting value numbers to constants will occasionally
3202 screw up phi congruence because constants are not
3203 uniquely associated with a single ssa name that can be
3204 looked up. */
3205 if (simplified
3206 && is_gimple_min_invariant (simplified)
3207 && TREE_CODE (lhs) == SSA_NAME)
3209 VN_INFO (lhs)->expr = simplified;
3210 VN_INFO (lhs)->has_constants = true;
3211 changed = set_ssa_val_to (lhs, simplified);
3212 goto done;
3214 else if (simplified
3215 && TREE_CODE (simplified) == SSA_NAME
3216 && TREE_CODE (lhs) == SSA_NAME)
3218 changed = visit_copy (lhs, simplified);
3219 goto done;
3221 else if (simplified)
3223 if (TREE_CODE (lhs) == SSA_NAME)
3225 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
3226 /* We have to unshare the expression or else
3227 valuizing may change the IL stream. */
3228 VN_INFO (lhs)->expr = unshare_expr (simplified);
3231 else if (stmt_has_constants (stmt)
3232 && TREE_CODE (lhs) == SSA_NAME)
3233 VN_INFO (lhs)->has_constants = true;
3234 else if (TREE_CODE (lhs) == SSA_NAME)
3236 /* We reset expr and constantness here because we may
3237 have been value numbering optimistically, and
3238 iterating. They may become non-constant in this case,
3239 even if they were optimistically constant. */
3241 VN_INFO (lhs)->has_constants = false;
3242 VN_INFO (lhs)->expr = NULL_TREE;
3245 if ((TREE_CODE (lhs) == SSA_NAME
3246 /* We can substitute SSA_NAMEs that are live over
3247 abnormal edges with their constant value. */
3248 && !(gimple_assign_copy_p (stmt)
3249 && is_gimple_min_invariant (rhs1))
3250 && !(simplified
3251 && is_gimple_min_invariant (simplified))
3252 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3253 /* Stores or copies from SSA_NAMEs that are live over
3254 abnormal edges are a problem. */
3255 || (code == SSA_NAME
3256 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3257 changed = defs_to_varying (stmt);
3258 else if (REFERENCE_CLASS_P (lhs)
3259 || DECL_P (lhs))
3260 changed = visit_reference_op_store (lhs, rhs1, stmt);
3261 else if (TREE_CODE (lhs) == SSA_NAME)
3263 if ((gimple_assign_copy_p (stmt)
3264 && is_gimple_min_invariant (rhs1))
3265 || (simplified
3266 && is_gimple_min_invariant (simplified)))
3268 VN_INFO (lhs)->has_constants = true;
3269 if (simplified)
3270 changed = set_ssa_val_to (lhs, simplified);
3271 else
3272 changed = set_ssa_val_to (lhs, rhs1);
3274 else
3276 switch (get_gimple_rhs_class (code))
3278 case GIMPLE_UNARY_RHS:
3279 case GIMPLE_BINARY_RHS:
3280 case GIMPLE_TERNARY_RHS:
3281 changed = visit_nary_op (lhs, stmt);
3282 break;
3283 case GIMPLE_SINGLE_RHS:
3284 switch (TREE_CODE_CLASS (code))
3286 case tcc_reference:
3287 /* VOP-less references can go through unary case. */
3288 if ((code == REALPART_EXPR
3289 || code == IMAGPART_EXPR
3290 || code == VIEW_CONVERT_EXPR
3291 || code == BIT_FIELD_REF)
3292 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
3294 changed = visit_nary_op (lhs, stmt);
3295 break;
3297 /* Fallthrough. */
3298 case tcc_declaration:
3299 changed = visit_reference_op_load (lhs, rhs1, stmt);
3300 break;
3301 default:
3302 if (code == ADDR_EXPR)
3304 changed = visit_nary_op (lhs, stmt);
3305 break;
3307 else if (code == CONSTRUCTOR)
3309 changed = visit_nary_op (lhs, stmt);
3310 break;
3312 changed = defs_to_varying (stmt);
3314 break;
3315 default:
3316 changed = defs_to_varying (stmt);
3317 break;
3321 else
3322 changed = defs_to_varying (stmt);
3324 else if (is_gimple_call (stmt))
3326 tree lhs = gimple_call_lhs (stmt);
3328 /* ??? We could try to simplify calls. */
3330 if (stmt_has_constants (stmt)
3331 && TREE_CODE (lhs) == SSA_NAME)
3332 VN_INFO (lhs)->has_constants = true;
3333 else if (TREE_CODE (lhs) == SSA_NAME)
3335 /* We reset expr and constantness here because we may
3336 have been value numbering optimistically, and
3337 iterating. They may become non-constant in this case,
3338 even if they were optimistically constant. */
3339 VN_INFO (lhs)->has_constants = false;
3340 VN_INFO (lhs)->expr = NULL_TREE;
3343 if (TREE_CODE (lhs) == SSA_NAME
3344 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3345 changed = defs_to_varying (stmt);
3346 /* ??? We should handle stores from calls. */
3347 else if (TREE_CODE (lhs) == SSA_NAME)
3349 if (!gimple_call_internal_p (stmt)
3350 && gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
3351 changed = visit_reference_op_call (lhs, stmt);
3352 else
3353 changed = defs_to_varying (stmt);
3355 else
3356 changed = defs_to_varying (stmt);
3359 done:
3360 return changed;
3363 /* Compare two operands by reverse postorder index */
3365 static int
3366 compare_ops (const void *pa, const void *pb)
3368 const tree opa = *((const tree *)pa);
3369 const tree opb = *((const tree *)pb);
3370 gimple opstmta = SSA_NAME_DEF_STMT (opa);
3371 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
3372 basic_block bba;
3373 basic_block bbb;
3375 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3376 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3377 else if (gimple_nop_p (opstmta))
3378 return -1;
3379 else if (gimple_nop_p (opstmtb))
3380 return 1;
3382 bba = gimple_bb (opstmta);
3383 bbb = gimple_bb (opstmtb);
3385 if (!bba && !bbb)
3386 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3387 else if (!bba)
3388 return -1;
3389 else if (!bbb)
3390 return 1;
3392 if (bba == bbb)
3394 if (gimple_code (opstmta) == GIMPLE_PHI
3395 && gimple_code (opstmtb) == GIMPLE_PHI)
3396 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3397 else if (gimple_code (opstmta) == GIMPLE_PHI)
3398 return -1;
3399 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3400 return 1;
3401 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3402 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3403 else
3404 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3406 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3409 /* Sort an array containing members of a strongly connected component
3410 SCC so that the members are ordered by RPO number.
3411 This means that when the sort is complete, iterating through the
3412 array will give you the members in RPO order. */
3414 static void
3415 sort_scc (VEC (tree, heap) *scc)
3417 VEC_qsort (tree, scc, compare_ops);
3420 /* Insert the no longer used nary ONARY to the hash INFO. */
3422 static void
3423 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3425 size_t size = sizeof_vn_nary_op (onary->length);
3426 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3427 &info->nary_obstack);
3428 memcpy (nary, onary, size);
3429 vn_nary_op_insert_into (nary, info->nary, false);
3432 /* Insert the no longer used phi OPHI to the hash INFO. */
3434 static void
3435 copy_phi (vn_phi_t ophi, vn_tables_t info)
3437 vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
3438 void **slot;
3439 memcpy (phi, ophi, sizeof (*phi));
3440 ophi->phiargs = NULL;
3441 slot = htab_find_slot_with_hash (info->phis, phi, phi->hashcode, INSERT);
3442 gcc_assert (!*slot);
3443 *slot = phi;
3446 /* Insert the no longer used reference OREF to the hash INFO. */
3448 static void
3449 copy_reference (vn_reference_t oref, vn_tables_t info)
3451 vn_reference_t ref;
3452 void **slot;
3453 ref = (vn_reference_t) pool_alloc (info->references_pool);
3454 memcpy (ref, oref, sizeof (*ref));
3455 oref->operands = NULL;
3456 slot = htab_find_slot_with_hash (info->references, ref, ref->hashcode,
3457 INSERT);
3458 if (*slot)
3459 free_reference (*slot);
3460 *slot = ref;
3463 /* Process a strongly connected component in the SSA graph. */
3465 static void
3466 process_scc (VEC (tree, heap) *scc)
3468 tree var;
3469 unsigned int i;
3470 unsigned int iterations = 0;
3471 bool changed = true;
3472 htab_iterator hi;
3473 vn_nary_op_t nary;
3474 vn_phi_t phi;
3475 vn_reference_t ref;
3477 /* If the SCC has a single member, just visit it. */
3478 if (VEC_length (tree, scc) == 1)
3480 tree use = VEC_index (tree, scc, 0);
3481 if (VN_INFO (use)->use_processed)
3482 return;
3483 /* We need to make sure it doesn't form a cycle itself, which can
3484 happen for self-referential PHI nodes. In that case we would
3485 end up inserting an expression with VN_TOP operands into the
3486 valid table which makes us derive bogus equivalences later.
3487 The cheapest way to check this is to assume it for all PHI nodes. */
3488 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3489 /* Fallthru to iteration. */ ;
3490 else
3492 visit_use (use);
3493 return;
3497 /* Iterate over the SCC with the optimistic table until it stops
3498 changing. */
3499 current_info = optimistic_info;
3500 while (changed)
3502 changed = false;
3503 iterations++;
3504 if (dump_file && (dump_flags & TDF_DETAILS))
3505 fprintf (dump_file, "Starting iteration %d\n", iterations);
3506 /* As we are value-numbering optimistically we have to
3507 clear the expression tables and the simplified expressions
3508 in each iteration until we converge. */
3509 htab_empty (optimistic_info->nary);
3510 htab_empty (optimistic_info->phis);
3511 htab_empty (optimistic_info->references);
3512 obstack_free (&optimistic_info->nary_obstack, NULL);
3513 gcc_obstack_init (&optimistic_info->nary_obstack);
3514 empty_alloc_pool (optimistic_info->phis_pool);
3515 empty_alloc_pool (optimistic_info->references_pool);
3516 FOR_EACH_VEC_ELT (tree, scc, i, var)
3517 VN_INFO (var)->expr = NULL_TREE;
3518 FOR_EACH_VEC_ELT (tree, scc, i, var)
3519 changed |= visit_use (var);
3522 statistics_histogram_event (cfun, "SCC iterations", iterations);
3524 /* Finally, copy the contents of the no longer used optimistic
3525 table to the valid table. */
3526 FOR_EACH_HTAB_ELEMENT (optimistic_info->nary, nary, vn_nary_op_t, hi)
3527 copy_nary (nary, valid_info);
3528 FOR_EACH_HTAB_ELEMENT (optimistic_info->phis, phi, vn_phi_t, hi)
3529 copy_phi (phi, valid_info);
3530 FOR_EACH_HTAB_ELEMENT (optimistic_info->references, ref, vn_reference_t, hi)
3531 copy_reference (ref, valid_info);
3533 current_info = valid_info;
3536 DEF_VEC_O(ssa_op_iter);
3537 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
3539 /* Pop the components of the found SCC for NAME off the SCC stack
3540 and process them. Returns true if all went well, false if
3541 we run into resource limits. */
3543 static bool
3544 extract_and_process_scc_for_name (tree name)
3546 VEC (tree, heap) *scc = NULL;
3547 tree x;
3549 /* Found an SCC, pop the components off the SCC stack and
3550 process them. */
3553 x = VEC_pop (tree, sccstack);
3555 VN_INFO (x)->on_sccstack = false;
3556 VEC_safe_push (tree, heap, scc, x);
3557 } while (x != name);
3559 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
3560 if (VEC_length (tree, scc)
3561 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
3563 if (dump_file)
3564 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
3565 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
3566 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
3567 return false;
3570 if (VEC_length (tree, scc) > 1)
3571 sort_scc (scc);
3573 if (dump_file && (dump_flags & TDF_DETAILS))
3574 print_scc (dump_file, scc);
3576 process_scc (scc);
3578 VEC_free (tree, heap, scc);
3580 return true;
3583 /* Depth first search on NAME to discover and process SCC's in the SSA
3584 graph.
3585 Execution of this algorithm relies on the fact that the SCC's are
3586 popped off the stack in topological order.
3587 Returns true if successful, false if we stopped processing SCC's due
3588 to resource constraints. */
3590 static bool
3591 DFS (tree name)
3593 VEC(ssa_op_iter, heap) *itervec = NULL;
3594 VEC(tree, heap) *namevec = NULL;
3595 use_operand_p usep = NULL;
3596 gimple defstmt;
3597 tree use;
3598 ssa_op_iter iter;
3600 start_over:
3601 /* SCC info */
3602 VN_INFO (name)->dfsnum = next_dfs_num++;
3603 VN_INFO (name)->visited = true;
3604 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
3606 VEC_safe_push (tree, heap, sccstack, name);
3607 VN_INFO (name)->on_sccstack = true;
3608 defstmt = SSA_NAME_DEF_STMT (name);
3610 /* Recursively DFS on our operands, looking for SCC's. */
3611 if (!gimple_nop_p (defstmt))
3613 /* Push a new iterator. */
3614 if (gimple_code (defstmt) == GIMPLE_PHI)
3615 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
3616 else
3617 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
3619 else
3620 clear_and_done_ssa_iter (&iter);
3622 while (1)
3624 /* If we are done processing uses of a name, go up the stack
3625 of iterators and process SCCs as we found them. */
3626 if (op_iter_done (&iter))
3628 /* See if we found an SCC. */
3629 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
3630 if (!extract_and_process_scc_for_name (name))
3632 VEC_free (tree, heap, namevec);
3633 VEC_free (ssa_op_iter, heap, itervec);
3634 return false;
3637 /* Check if we are done. */
3638 if (VEC_empty (tree, namevec))
3640 VEC_free (tree, heap, namevec);
3641 VEC_free (ssa_op_iter, heap, itervec);
3642 return true;
3645 /* Restore the last use walker and continue walking there. */
3646 use = name;
3647 name = VEC_pop (tree, namevec);
3648 memcpy (&iter, VEC_last (ssa_op_iter, itervec),
3649 sizeof (ssa_op_iter));
3650 VEC_pop (ssa_op_iter, itervec);
3651 goto continue_walking;
3654 use = USE_FROM_PTR (usep);
3656 /* Since we handle phi nodes, we will sometimes get
3657 invariants in the use expression. */
3658 if (TREE_CODE (use) == SSA_NAME)
3660 if (! (VN_INFO (use)->visited))
3662 /* Recurse by pushing the current use walking state on
3663 the stack and starting over. */
3664 VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
3665 VEC_safe_push(tree, heap, namevec, name);
3666 name = use;
3667 goto start_over;
3669 continue_walking:
3670 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3671 VN_INFO (use)->low);
3673 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3674 && VN_INFO (use)->on_sccstack)
3676 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3677 VN_INFO (name)->low);
3681 usep = op_iter_next_use (&iter);
3685 /* Allocate a value number table. */
3687 static void
3688 allocate_vn_table (vn_tables_t table)
3690 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
3691 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
3692 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
3693 free_reference);
3695 gcc_obstack_init (&table->nary_obstack);
3696 table->phis_pool = create_alloc_pool ("VN phis",
3697 sizeof (struct vn_phi_s),
3698 30);
3699 table->references_pool = create_alloc_pool ("VN references",
3700 sizeof (struct vn_reference_s),
3701 30);
3704 /* Free a value number table. */
3706 static void
3707 free_vn_table (vn_tables_t table)
3709 htab_delete (table->phis);
3710 htab_delete (table->nary);
3711 htab_delete (table->references);
3712 obstack_free (&table->nary_obstack, NULL);
3713 free_alloc_pool (table->phis_pool);
3714 free_alloc_pool (table->references_pool);
3717 static void
3718 init_scc_vn (void)
3720 size_t i;
3721 int j;
3722 int *rpo_numbers_temp;
3724 calculate_dominance_info (CDI_DOMINATORS);
3725 sccstack = NULL;
3726 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
3727 free);
3729 constant_value_ids = BITMAP_ALLOC (NULL);
3731 next_dfs_num = 1;
3732 next_value_id = 1;
3734 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
3735 /* VEC_alloc doesn't actually grow it to the right size, it just
3736 preallocates the space to do so. */
3737 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
3738 gcc_obstack_init (&vn_ssa_aux_obstack);
3740 shared_lookup_phiargs = NULL;
3741 shared_lookup_references = NULL;
3742 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3743 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3744 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
3746 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3747 the i'th block in RPO order is bb. We want to map bb's to RPO
3748 numbers, so we need to rearrange this array. */
3749 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
3750 rpo_numbers[rpo_numbers_temp[j]] = j;
3752 XDELETE (rpo_numbers_temp);
3754 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
3756 /* Create the VN_INFO structures, and initialize value numbers to
3757 TOP. */
3758 for (i = 0; i < num_ssa_names; i++)
3760 tree name = ssa_name (i);
3761 if (name)
3763 VN_INFO_GET (name)->valnum = VN_TOP;
3764 VN_INFO (name)->expr = NULL_TREE;
3765 VN_INFO (name)->value_id = 0;
3769 renumber_gimple_stmt_uids ();
3771 /* Create the valid and optimistic value numbering tables. */
3772 valid_info = XCNEW (struct vn_tables_s);
3773 allocate_vn_table (valid_info);
3774 optimistic_info = XCNEW (struct vn_tables_s);
3775 allocate_vn_table (optimistic_info);
3778 void
3779 free_scc_vn (void)
3781 size_t i;
3783 htab_delete (constant_to_value_id);
3784 BITMAP_FREE (constant_value_ids);
3785 VEC_free (tree, heap, shared_lookup_phiargs);
3786 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
3787 XDELETEVEC (rpo_numbers);
3789 for (i = 0; i < num_ssa_names; i++)
3791 tree name = ssa_name (i);
3792 if (name
3793 && VN_INFO (name)->needs_insertion)
3794 release_ssa_name (name);
3796 obstack_free (&vn_ssa_aux_obstack, NULL);
3797 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
3799 VEC_free (tree, heap, sccstack);
3800 free_vn_table (valid_info);
3801 XDELETE (valid_info);
3802 free_vn_table (optimistic_info);
3803 XDELETE (optimistic_info);
3806 /* Set *ID if we computed something useful in RESULT. */
3808 static void
3809 set_value_id_for_result (tree result, unsigned int *id)
3811 if (result)
3813 if (TREE_CODE (result) == SSA_NAME)
3814 *id = VN_INFO (result)->value_id;
3815 else if (is_gimple_min_invariant (result))
3816 *id = get_or_alloc_constant_value_id (result);
3820 /* Set the value ids in the valid hash tables. */
3822 static void
3823 set_hashtable_value_ids (void)
3825 htab_iterator hi;
3826 vn_nary_op_t vno;
3827 vn_reference_t vr;
3828 vn_phi_t vp;
3830 /* Now set the value ids of the things we had put in the hash
3831 table. */
3833 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
3834 vno, vn_nary_op_t, hi)
3835 set_value_id_for_result (vno->result, &vno->value_id);
3837 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
3838 vp, vn_phi_t, hi)
3839 set_value_id_for_result (vp->result, &vp->value_id);
3841 FOR_EACH_HTAB_ELEMENT (valid_info->references,
3842 vr, vn_reference_t, hi)
3843 set_value_id_for_result (vr->result, &vr->value_id);
3846 /* Do SCCVN. Returns true if it finished, false if we bailed out
3847 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
3848 how we use the alias oracle walking during the VN process. */
3850 bool
3851 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
3853 size_t i;
3854 tree param;
3855 bool changed = true;
3857 default_vn_walk_kind = default_vn_walk_kind_;
3859 init_scc_vn ();
3860 current_info = valid_info;
3862 for (param = DECL_ARGUMENTS (current_function_decl);
3863 param;
3864 param = DECL_CHAIN (param))
3866 if (gimple_default_def (cfun, param) != NULL)
3868 tree def = gimple_default_def (cfun, param);
3869 VN_INFO (def)->valnum = def;
3873 for (i = 1; i < num_ssa_names; ++i)
3875 tree name = ssa_name (i);
3876 if (name
3877 && VN_INFO (name)->visited == false
3878 && !has_zero_uses (name))
3879 if (!DFS (name))
3881 free_scc_vn ();
3882 return false;
3886 /* Initialize the value ids. */
3888 for (i = 1; i < num_ssa_names; ++i)
3890 tree name = ssa_name (i);
3891 vn_ssa_aux_t info;
3892 if (!name)
3893 continue;
3894 info = VN_INFO (name);
3895 if (info->valnum == name
3896 || info->valnum == VN_TOP)
3897 info->value_id = get_next_value_id ();
3898 else if (is_gimple_min_invariant (info->valnum))
3899 info->value_id = get_or_alloc_constant_value_id (info->valnum);
3902 /* Propagate until they stop changing. */
3903 while (changed)
3905 changed = false;
3906 for (i = 1; i < num_ssa_names; ++i)
3908 tree name = ssa_name (i);
3909 vn_ssa_aux_t info;
3910 if (!name)
3911 continue;
3912 info = VN_INFO (name);
3913 if (TREE_CODE (info->valnum) == SSA_NAME
3914 && info->valnum != name
3915 && info->value_id != VN_INFO (info->valnum)->value_id)
3917 changed = true;
3918 info->value_id = VN_INFO (info->valnum)->value_id;
3923 set_hashtable_value_ids ();
3925 if (dump_file && (dump_flags & TDF_DETAILS))
3927 fprintf (dump_file, "Value numbers:\n");
3928 for (i = 0; i < num_ssa_names; i++)
3930 tree name = ssa_name (i);
3931 if (name
3932 && VN_INFO (name)->visited
3933 && SSA_VAL (name) != name)
3935 print_generic_expr (dump_file, name, 0);
3936 fprintf (dump_file, " = ");
3937 print_generic_expr (dump_file, SSA_VAL (name), 0);
3938 fprintf (dump_file, "\n");
3943 return true;
3946 /* Return the maximum value id we have ever seen. */
3948 unsigned int
3949 get_max_value_id (void)
3951 return next_value_id;
3954 /* Return the next unique value id. */
3956 unsigned int
3957 get_next_value_id (void)
3959 return next_value_id++;
3963 /* Compare two expressions E1 and E2 and return true if they are equal. */
3965 bool
3966 expressions_equal_p (tree e1, tree e2)
3968 /* The obvious case. */
3969 if (e1 == e2)
3970 return true;
3972 /* If only one of them is null, they cannot be equal. */
3973 if (!e1 || !e2)
3974 return false;
3976 /* Now perform the actual comparison. */
3977 if (TREE_CODE (e1) == TREE_CODE (e2)
3978 && operand_equal_p (e1, e2, OEP_PURE_SAME))
3979 return true;
3981 return false;
3985 /* Return true if the nary operation NARY may trap. This is a copy
3986 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
3988 bool
3989 vn_nary_may_trap (vn_nary_op_t nary)
3991 tree type;
3992 tree rhs2 = NULL_TREE;
3993 bool honor_nans = false;
3994 bool honor_snans = false;
3995 bool fp_operation = false;
3996 bool honor_trapv = false;
3997 bool handled, ret;
3998 unsigned i;
4000 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4001 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4002 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4004 type = nary->type;
4005 fp_operation = FLOAT_TYPE_P (type);
4006 if (fp_operation)
4008 honor_nans = flag_trapping_math && !flag_finite_math_only;
4009 honor_snans = flag_signaling_nans != 0;
4011 else if (INTEGRAL_TYPE_P (type)
4012 && TYPE_OVERFLOW_TRAPS (type))
4013 honor_trapv = true;
4015 if (nary->length >= 2)
4016 rhs2 = nary->op[1];
4017 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4018 honor_trapv,
4019 honor_nans, honor_snans, rhs2,
4020 &handled);
4021 if (handled
4022 && ret)
4023 return true;
4025 for (i = 0; i < nary->length; ++i)
4026 if (tree_could_trap_p (nary->op[i]))
4027 return true;
4029 return false;