re PR tree-optimization/51680 (g++ 4.7 fails to inline trivial template stuff)
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blob9adf3ecc1ea243a129fd07820e9f8c7df75a51d6
1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "basic-block.h"
28 #include "tree-pretty-print.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "gimple.h"
33 #include "tree-dump.h"
34 #include "timevar.h"
35 #include "fibheap.h"
36 #include "hashtab.h"
37 #include "tree-iterator.h"
38 #include "alloc-pool.h"
39 #include "tree-pass.h"
40 #include "flags.h"
41 #include "bitmap.h"
42 #include "langhooks.h"
43 #include "cfgloop.h"
44 #include "params.h"
45 #include "tree-ssa-propagate.h"
46 #include "tree-ssa-sccvn.h"
47 #include "gimple-fold.h"
49 /* This algorithm is based on the SCC algorithm presented by Keith
50 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51 (http://citeseer.ist.psu.edu/41805.html). In
52 straight line code, it is equivalent to a regular hash based value
53 numbering that is performed in reverse postorder.
55 For code with cycles, there are two alternatives, both of which
56 require keeping the hashtables separate from the actual list of
57 value numbers for SSA names.
59 1. Iterate value numbering in an RPO walk of the blocks, removing
60 all the entries from the hashtable after each iteration (but
61 keeping the SSA name->value number mapping between iterations).
62 Iterate until it does not change.
64 2. Perform value numbering as part of an SCC walk on the SSA graph,
65 iterating only the cycles in the SSA graph until they do not change
66 (using a separate, optimistic hashtable for value numbering the SCC
67 operands).
69 The second is not just faster in practice (because most SSA graph
70 cycles do not involve all the variables in the graph), it also has
71 some nice properties.
73 One of these nice properties is that when we pop an SCC off the
74 stack, we are guaranteed to have processed all the operands coming from
75 *outside of that SCC*, so we do not need to do anything special to
76 ensure they have value numbers.
78 Another nice property is that the SCC walk is done as part of a DFS
79 of the SSA graph, which makes it easy to perform combining and
80 simplifying operations at the same time.
82 The code below is deliberately written in a way that makes it easy
83 to separate the SCC walk from the other work it does.
85 In order to propagate constants through the code, we track which
86 expressions contain constants, and use those while folding. In
87 theory, we could also track expressions whose value numbers are
88 replaced, in case we end up folding based on expression
89 identities.
91 In order to value number memory, we assign value numbers to vuses.
92 This enables us to note that, for example, stores to the same
93 address of the same value from the same starting memory states are
94 equivalent.
95 TODO:
97 1. We can iterate only the changing portions of the SCC's, but
98 I have not seen an SCC big enough for this to be a win.
99 2. If you differentiate between phi nodes for loops and phi nodes
100 for if-then-else, you can properly consider phi nodes in different
101 blocks for equivalence.
102 3. We could value number vuses in more cases, particularly, whole
103 structure copies.
106 /* The set of hashtables and alloc_pool's for their items. */
108 typedef struct vn_tables_s
110 htab_t nary;
111 htab_t phis;
112 htab_t references;
113 struct obstack nary_obstack;
114 alloc_pool phis_pool;
115 alloc_pool references_pool;
116 } *vn_tables_t;
118 static htab_t constant_to_value_id;
119 static bitmap constant_value_ids;
122 /* Valid hashtables storing information we have proven to be
123 correct. */
125 static vn_tables_t valid_info;
127 /* Optimistic hashtables storing information we are making assumptions about
128 during iterations. */
130 static vn_tables_t optimistic_info;
132 /* Pointer to the set of hashtables that is currently being used.
133 Should always point to either the optimistic_info, or the
134 valid_info. */
136 static vn_tables_t current_info;
139 /* Reverse post order index for each basic block. */
141 static int *rpo_numbers;
143 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
145 /* This represents the top of the VN lattice, which is the universal
146 value. */
148 tree VN_TOP;
150 /* Unique counter for our value ids. */
152 static unsigned int next_value_id;
154 /* Next DFS number and the stack for strongly connected component
155 detection. */
157 static unsigned int next_dfs_num;
158 static VEC (tree, heap) *sccstack;
161 DEF_VEC_P(vn_ssa_aux_t);
162 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
164 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
165 are allocated on an obstack for locality reasons, and to free them
166 without looping over the VEC. */
168 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
169 static struct obstack vn_ssa_aux_obstack;
171 /* Return the value numbering information for a given SSA name. */
173 vn_ssa_aux_t
174 VN_INFO (tree name)
176 vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
177 SSA_NAME_VERSION (name));
178 gcc_checking_assert (res);
179 return res;
182 /* Set the value numbering info for a given SSA name to a given
183 value. */
185 static inline void
186 VN_INFO_SET (tree name, vn_ssa_aux_t value)
188 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
189 SSA_NAME_VERSION (name), value);
192 /* Initialize the value numbering info for a given SSA name.
193 This should be called just once for every SSA name. */
195 vn_ssa_aux_t
196 VN_INFO_GET (tree name)
198 vn_ssa_aux_t newinfo;
200 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
201 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
202 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
203 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
204 SSA_NAME_VERSION (name) + 1);
205 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
206 SSA_NAME_VERSION (name), newinfo);
207 return newinfo;
211 /* Get the representative expression for the SSA_NAME NAME. Returns
212 the representative SSA_NAME if there is no expression associated with it. */
214 tree
215 vn_get_expr_for (tree name)
217 vn_ssa_aux_t vn = VN_INFO (name);
218 gimple def_stmt;
219 tree expr = NULL_TREE;
220 enum tree_code code;
222 if (vn->valnum == VN_TOP)
223 return name;
225 /* If the value-number is a constant it is the representative
226 expression. */
227 if (TREE_CODE (vn->valnum) != SSA_NAME)
228 return vn->valnum;
230 /* Get to the information of the value of this SSA_NAME. */
231 vn = VN_INFO (vn->valnum);
233 /* If the value-number is a constant it is the representative
234 expression. */
235 if (TREE_CODE (vn->valnum) != SSA_NAME)
236 return vn->valnum;
238 /* Else if we have an expression, return it. */
239 if (vn->expr != NULL_TREE)
240 return vn->expr;
242 /* Otherwise use the defining statement to build the expression. */
243 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
245 /* If the value number is not an assignment use it directly. */
246 if (!is_gimple_assign (def_stmt))
247 return vn->valnum;
249 /* FIXME tuples. This is incomplete and likely will miss some
250 simplifications. */
251 code = gimple_assign_rhs_code (def_stmt);
252 switch (TREE_CODE_CLASS (code))
254 case tcc_reference:
255 if ((code == REALPART_EXPR
256 || code == IMAGPART_EXPR
257 || code == VIEW_CONVERT_EXPR)
258 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (def_stmt),
259 0)) == SSA_NAME)
260 expr = fold_build1 (code,
261 gimple_expr_type (def_stmt),
262 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
263 break;
265 case tcc_unary:
266 expr = fold_build1 (code,
267 gimple_expr_type (def_stmt),
268 gimple_assign_rhs1 (def_stmt));
269 break;
271 case tcc_binary:
272 expr = fold_build2 (code,
273 gimple_expr_type (def_stmt),
274 gimple_assign_rhs1 (def_stmt),
275 gimple_assign_rhs2 (def_stmt));
276 break;
278 case tcc_exceptional:
279 if (code == CONSTRUCTOR
280 && TREE_CODE
281 (TREE_TYPE (gimple_assign_rhs1 (def_stmt))) == VECTOR_TYPE)
282 expr = gimple_assign_rhs1 (def_stmt);
283 break;
285 default:;
287 if (expr == NULL_TREE)
288 return vn->valnum;
290 /* Cache the expression. */
291 vn->expr = expr;
293 return expr;
297 /* Free a phi operation structure VP. */
299 static void
300 free_phi (void *vp)
302 vn_phi_t phi = (vn_phi_t) vp;
303 VEC_free (tree, heap, phi->phiargs);
306 /* Free a reference operation structure VP. */
308 static void
309 free_reference (void *vp)
311 vn_reference_t vr = (vn_reference_t) vp;
312 VEC_free (vn_reference_op_s, heap, vr->operands);
315 /* Hash table equality function for vn_constant_t. */
317 static int
318 vn_constant_eq (const void *p1, const void *p2)
320 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
321 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
323 if (vc1->hashcode != vc2->hashcode)
324 return false;
326 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
329 /* Hash table hash function for vn_constant_t. */
331 static hashval_t
332 vn_constant_hash (const void *p1)
334 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
335 return vc1->hashcode;
338 /* Lookup a value id for CONSTANT and return it. If it does not
339 exist returns 0. */
341 unsigned int
342 get_constant_value_id (tree constant)
344 void **slot;
345 struct vn_constant_s vc;
347 vc.hashcode = vn_hash_constant_with_type (constant);
348 vc.constant = constant;
349 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
350 vc.hashcode, NO_INSERT);
351 if (slot)
352 return ((vn_constant_t)*slot)->value_id;
353 return 0;
356 /* Lookup a value id for CONSTANT, and if it does not exist, create a
357 new one and return it. If it does exist, return it. */
359 unsigned int
360 get_or_alloc_constant_value_id (tree constant)
362 void **slot;
363 struct vn_constant_s vc;
364 vn_constant_t vcp;
366 vc.hashcode = vn_hash_constant_with_type (constant);
367 vc.constant = constant;
368 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
369 vc.hashcode, INSERT);
370 if (*slot)
371 return ((vn_constant_t)*slot)->value_id;
373 vcp = XNEW (struct vn_constant_s);
374 vcp->hashcode = vc.hashcode;
375 vcp->constant = constant;
376 vcp->value_id = get_next_value_id ();
377 *slot = (void *) vcp;
378 bitmap_set_bit (constant_value_ids, vcp->value_id);
379 return vcp->value_id;
382 /* Return true if V is a value id for a constant. */
384 bool
385 value_id_constant_p (unsigned int v)
387 return bitmap_bit_p (constant_value_ids, v);
390 /* Compare two reference operands P1 and P2 for equality. Return true if
391 they are equal, and false otherwise. */
393 static int
394 vn_reference_op_eq (const void *p1, const void *p2)
396 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
397 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
399 return (vro1->opcode == vro2->opcode
400 /* We do not care for differences in type qualification. */
401 && (vro1->type == vro2->type
402 || (vro1->type && vro2->type
403 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
404 TYPE_MAIN_VARIANT (vro2->type))))
405 && expressions_equal_p (vro1->op0, vro2->op0)
406 && expressions_equal_p (vro1->op1, vro2->op1)
407 && expressions_equal_p (vro1->op2, vro2->op2));
410 /* Compute the hash for a reference operand VRO1. */
412 static hashval_t
413 vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result)
415 result = iterative_hash_hashval_t (vro1->opcode, result);
416 if (vro1->op0)
417 result = iterative_hash_expr (vro1->op0, result);
418 if (vro1->op1)
419 result = iterative_hash_expr (vro1->op1, result);
420 if (vro1->op2)
421 result = iterative_hash_expr (vro1->op2, result);
422 return result;
425 /* Return the hashcode for a given reference operation P1. */
427 static hashval_t
428 vn_reference_hash (const void *p1)
430 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
431 return vr1->hashcode;
434 /* Compute a hash for the reference operation VR1 and return it. */
436 hashval_t
437 vn_reference_compute_hash (const vn_reference_t vr1)
439 hashval_t result = 0;
440 int i;
441 vn_reference_op_t vro;
442 HOST_WIDE_INT off = -1;
443 bool deref = false;
445 FOR_EACH_VEC_ELT (vn_reference_op_s, vr1->operands, i, vro)
447 if (vro->opcode == MEM_REF)
448 deref = true;
449 else if (vro->opcode != ADDR_EXPR)
450 deref = false;
451 if (vro->off != -1)
453 if (off == -1)
454 off = 0;
455 off += vro->off;
457 else
459 if (off != -1
460 && off != 0)
461 result = iterative_hash_hashval_t (off, result);
462 off = -1;
463 if (deref
464 && vro->opcode == ADDR_EXPR)
466 if (vro->op0)
468 tree op = TREE_OPERAND (vro->op0, 0);
469 result = iterative_hash_hashval_t (TREE_CODE (op), result);
470 result = iterative_hash_expr (op, result);
473 else
474 result = vn_reference_op_compute_hash (vro, result);
477 if (vr1->vuse)
478 result += SSA_NAME_VERSION (vr1->vuse);
480 return result;
483 /* Return true if reference operations P1 and P2 are equivalent. This
484 means they have the same set of operands and vuses. */
487 vn_reference_eq (const void *p1, const void *p2)
489 unsigned i, j;
491 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
492 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
493 if (vr1->hashcode != vr2->hashcode)
494 return false;
496 /* Early out if this is not a hash collision. */
497 if (vr1->hashcode != vr2->hashcode)
498 return false;
500 /* The VOP needs to be the same. */
501 if (vr1->vuse != vr2->vuse)
502 return false;
504 /* If the operands are the same we are done. */
505 if (vr1->operands == vr2->operands)
506 return true;
508 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
509 return false;
511 if (INTEGRAL_TYPE_P (vr1->type)
512 && INTEGRAL_TYPE_P (vr2->type))
514 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
515 return false;
517 else if (INTEGRAL_TYPE_P (vr1->type)
518 && (TYPE_PRECISION (vr1->type)
519 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
520 return false;
521 else if (INTEGRAL_TYPE_P (vr2->type)
522 && (TYPE_PRECISION (vr2->type)
523 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
524 return false;
526 i = 0;
527 j = 0;
530 HOST_WIDE_INT off1 = 0, off2 = 0;
531 vn_reference_op_t vro1, vro2;
532 vn_reference_op_s tem1, tem2;
533 bool deref1 = false, deref2 = false;
534 for (; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro1); i++)
536 if (vro1->opcode == MEM_REF)
537 deref1 = true;
538 if (vro1->off == -1)
539 break;
540 off1 += vro1->off;
542 for (; VEC_iterate (vn_reference_op_s, vr2->operands, j, vro2); j++)
544 if (vro2->opcode == MEM_REF)
545 deref2 = true;
546 if (vro2->off == -1)
547 break;
548 off2 += vro2->off;
550 if (off1 != off2)
551 return false;
552 if (deref1 && vro1->opcode == ADDR_EXPR)
554 memset (&tem1, 0, sizeof (tem1));
555 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
556 tem1.type = TREE_TYPE (tem1.op0);
557 tem1.opcode = TREE_CODE (tem1.op0);
558 vro1 = &tem1;
560 if (deref2 && vro2->opcode == ADDR_EXPR)
562 memset (&tem2, 0, sizeof (tem2));
563 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
564 tem2.type = TREE_TYPE (tem2.op0);
565 tem2.opcode = TREE_CODE (tem2.op0);
566 vro2 = &tem2;
568 if (!vn_reference_op_eq (vro1, vro2))
569 return false;
570 ++j;
571 ++i;
573 while (VEC_length (vn_reference_op_s, vr1->operands) != i
574 || VEC_length (vn_reference_op_s, vr2->operands) != j);
576 return true;
579 /* Copy the operations present in load/store REF into RESULT, a vector of
580 vn_reference_op_s's. */
582 void
583 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
585 if (TREE_CODE (ref) == TARGET_MEM_REF)
587 vn_reference_op_s temp;
589 memset (&temp, 0, sizeof (temp));
590 temp.type = TREE_TYPE (ref);
591 temp.opcode = TREE_CODE (ref);
592 temp.op0 = TMR_INDEX (ref);
593 temp.op1 = TMR_STEP (ref);
594 temp.op2 = TMR_OFFSET (ref);
595 temp.off = -1;
596 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
598 memset (&temp, 0, sizeof (temp));
599 temp.type = NULL_TREE;
600 temp.opcode = ERROR_MARK;
601 temp.op0 = TMR_INDEX2 (ref);
602 temp.off = -1;
603 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
605 memset (&temp, 0, sizeof (temp));
606 temp.type = NULL_TREE;
607 temp.opcode = TREE_CODE (TMR_BASE (ref));
608 temp.op0 = TMR_BASE (ref);
609 temp.off = -1;
610 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
611 return;
614 /* For non-calls, store the information that makes up the address. */
616 while (ref)
618 vn_reference_op_s temp;
620 memset (&temp, 0, sizeof (temp));
621 temp.type = TREE_TYPE (ref);
622 temp.opcode = TREE_CODE (ref);
623 temp.off = -1;
625 switch (temp.opcode)
627 case MEM_REF:
628 /* The base address gets its own vn_reference_op_s structure. */
629 temp.op0 = TREE_OPERAND (ref, 1);
630 if (host_integerp (TREE_OPERAND (ref, 1), 0))
631 temp.off = TREE_INT_CST_LOW (TREE_OPERAND (ref, 1));
632 break;
633 case BIT_FIELD_REF:
634 /* Record bits and position. */
635 temp.op0 = TREE_OPERAND (ref, 1);
636 temp.op1 = TREE_OPERAND (ref, 2);
637 break;
638 case COMPONENT_REF:
639 /* The field decl is enough to unambiguously specify the field,
640 a matching type is not necessary and a mismatching type
641 is always a spurious difference. */
642 temp.type = NULL_TREE;
643 temp.op0 = TREE_OPERAND (ref, 1);
644 temp.op1 = TREE_OPERAND (ref, 2);
646 tree this_offset = component_ref_field_offset (ref);
647 if (this_offset
648 && TREE_CODE (this_offset) == INTEGER_CST)
650 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
651 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
653 double_int off
654 = double_int_add (tree_to_double_int (this_offset),
655 double_int_rshift
656 (tree_to_double_int (bit_offset),
657 BITS_PER_UNIT == 8
658 ? 3 : exact_log2 (BITS_PER_UNIT),
659 HOST_BITS_PER_DOUBLE_INT, true));
660 if (double_int_fits_in_shwi_p (off))
661 temp.off = off.low;
665 break;
666 case ARRAY_RANGE_REF:
667 case ARRAY_REF:
668 /* Record index as operand. */
669 temp.op0 = TREE_OPERAND (ref, 1);
670 /* Always record lower bounds and element size. */
671 temp.op1 = array_ref_low_bound (ref);
672 temp.op2 = array_ref_element_size (ref);
673 if (TREE_CODE (temp.op0) == INTEGER_CST
674 && TREE_CODE (temp.op1) == INTEGER_CST
675 && TREE_CODE (temp.op2) == INTEGER_CST)
677 double_int off = tree_to_double_int (temp.op0);
678 off = double_int_add (off,
679 double_int_neg
680 (tree_to_double_int (temp.op1)));
681 off = double_int_mul (off, tree_to_double_int (temp.op2));
682 if (double_int_fits_in_shwi_p (off))
683 temp.off = off.low;
685 break;
686 case VAR_DECL:
687 if (DECL_HARD_REGISTER (ref))
689 temp.op0 = ref;
690 break;
692 /* Fallthru. */
693 case PARM_DECL:
694 case CONST_DECL:
695 case RESULT_DECL:
696 /* Canonicalize decls to MEM[&decl] which is what we end up with
697 when valueizing MEM[ptr] with ptr = &decl. */
698 temp.opcode = MEM_REF;
699 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
700 temp.off = 0;
701 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
702 temp.opcode = ADDR_EXPR;
703 temp.op0 = build_fold_addr_expr (ref);
704 temp.type = TREE_TYPE (temp.op0);
705 temp.off = -1;
706 break;
707 case STRING_CST:
708 case INTEGER_CST:
709 case COMPLEX_CST:
710 case VECTOR_CST:
711 case REAL_CST:
712 case FIXED_CST:
713 case CONSTRUCTOR:
714 case SSA_NAME:
715 temp.op0 = ref;
716 break;
717 case ADDR_EXPR:
718 if (is_gimple_min_invariant (ref))
720 temp.op0 = ref;
721 break;
723 /* Fallthrough. */
724 /* These are only interesting for their operands, their
725 existence, and their type. They will never be the last
726 ref in the chain of references (IE they require an
727 operand), so we don't have to put anything
728 for op* as it will be handled by the iteration */
729 case REALPART_EXPR:
730 case VIEW_CONVERT_EXPR:
731 temp.off = 0;
732 break;
733 case IMAGPART_EXPR:
734 /* This is only interesting for its constant offset. */
735 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
736 break;
737 default:
738 gcc_unreachable ();
740 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
742 if (REFERENCE_CLASS_P (ref)
743 || (TREE_CODE (ref) == ADDR_EXPR
744 && !is_gimple_min_invariant (ref)))
745 ref = TREE_OPERAND (ref, 0);
746 else
747 ref = NULL_TREE;
751 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
752 operands in *OPS, the reference alias set SET and the reference type TYPE.
753 Return true if something useful was produced. */
755 bool
756 ao_ref_init_from_vn_reference (ao_ref *ref,
757 alias_set_type set, tree type,
758 VEC (vn_reference_op_s, heap) *ops)
760 vn_reference_op_t op;
761 unsigned i;
762 tree base = NULL_TREE;
763 tree *op0_p = &base;
764 HOST_WIDE_INT offset = 0;
765 HOST_WIDE_INT max_size;
766 HOST_WIDE_INT size = -1;
767 tree size_tree = NULL_TREE;
768 alias_set_type base_alias_set = -1;
770 /* First get the final access size from just the outermost expression. */
771 op = VEC_index (vn_reference_op_s, ops, 0);
772 if (op->opcode == COMPONENT_REF)
773 size_tree = DECL_SIZE (op->op0);
774 else if (op->opcode == BIT_FIELD_REF)
775 size_tree = op->op0;
776 else
778 enum machine_mode mode = TYPE_MODE (type);
779 if (mode == BLKmode)
780 size_tree = TYPE_SIZE (type);
781 else
782 size = GET_MODE_BITSIZE (mode);
784 if (size_tree != NULL_TREE)
786 if (!host_integerp (size_tree, 1))
787 size = -1;
788 else
789 size = TREE_INT_CST_LOW (size_tree);
792 /* Initially, maxsize is the same as the accessed element size.
793 In the following it will only grow (or become -1). */
794 max_size = size;
796 /* Compute cumulative bit-offset for nested component-refs and array-refs,
797 and find the ultimate containing object. */
798 FOR_EACH_VEC_ELT (vn_reference_op_s, ops, i, op)
800 switch (op->opcode)
802 /* These may be in the reference ops, but we cannot do anything
803 sensible with them here. */
804 case ADDR_EXPR:
805 /* Apart from ADDR_EXPR arguments to MEM_REF. */
806 if (base != NULL_TREE
807 && TREE_CODE (base) == MEM_REF
808 && op->op0
809 && DECL_P (TREE_OPERAND (op->op0, 0)))
811 vn_reference_op_t pop = VEC_index (vn_reference_op_s, ops, i-1);
812 base = TREE_OPERAND (op->op0, 0);
813 if (pop->off == -1)
815 max_size = -1;
816 offset = 0;
818 else
819 offset += pop->off * BITS_PER_UNIT;
820 op0_p = NULL;
821 break;
823 /* Fallthru. */
824 case CALL_EXPR:
825 return false;
827 /* Record the base objects. */
828 case MEM_REF:
829 base_alias_set = get_deref_alias_set (op->op0);
830 *op0_p = build2 (MEM_REF, op->type,
831 NULL_TREE, op->op0);
832 op0_p = &TREE_OPERAND (*op0_p, 0);
833 break;
835 case VAR_DECL:
836 case PARM_DECL:
837 case RESULT_DECL:
838 case SSA_NAME:
839 *op0_p = op->op0;
840 op0_p = NULL;
841 break;
843 /* And now the usual component-reference style ops. */
844 case BIT_FIELD_REF:
845 offset += tree_low_cst (op->op1, 0);
846 break;
848 case COMPONENT_REF:
850 tree field = op->op0;
851 /* We do not have a complete COMPONENT_REF tree here so we
852 cannot use component_ref_field_offset. Do the interesting
853 parts manually. */
855 if (op->op1
856 || !host_integerp (DECL_FIELD_OFFSET (field), 1))
857 max_size = -1;
858 else
860 offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
861 * BITS_PER_UNIT);
862 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
864 break;
867 case ARRAY_RANGE_REF:
868 case ARRAY_REF:
869 /* We recorded the lower bound and the element size. */
870 if (!host_integerp (op->op0, 0)
871 || !host_integerp (op->op1, 0)
872 || !host_integerp (op->op2, 0))
873 max_size = -1;
874 else
876 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
877 hindex -= TREE_INT_CST_LOW (op->op1);
878 hindex *= TREE_INT_CST_LOW (op->op2);
879 hindex *= BITS_PER_UNIT;
880 offset += hindex;
882 break;
884 case REALPART_EXPR:
885 break;
887 case IMAGPART_EXPR:
888 offset += size;
889 break;
891 case VIEW_CONVERT_EXPR:
892 break;
894 case STRING_CST:
895 case INTEGER_CST:
896 case COMPLEX_CST:
897 case VECTOR_CST:
898 case REAL_CST:
899 case CONSTRUCTOR:
900 case CONST_DECL:
901 return false;
903 default:
904 return false;
908 if (base == NULL_TREE)
909 return false;
911 ref->ref = NULL_TREE;
912 ref->base = base;
913 ref->offset = offset;
914 ref->size = size;
915 ref->max_size = max_size;
916 ref->ref_alias_set = set;
917 if (base_alias_set != -1)
918 ref->base_alias_set = base_alias_set;
919 else
920 ref->base_alias_set = get_alias_set (base);
921 /* We discount volatiles from value-numbering elsewhere. */
922 ref->volatile_p = false;
924 return true;
927 /* Copy the operations present in load/store/call REF into RESULT, a vector of
928 vn_reference_op_s's. */
930 void
931 copy_reference_ops_from_call (gimple call,
932 VEC(vn_reference_op_s, heap) **result)
934 vn_reference_op_s temp;
935 unsigned i;
937 /* Copy the type, opcode, function being called and static chain. */
938 memset (&temp, 0, sizeof (temp));
939 temp.type = gimple_call_return_type (call);
940 temp.opcode = CALL_EXPR;
941 temp.op0 = gimple_call_fn (call);
942 temp.op1 = gimple_call_chain (call);
943 temp.off = -1;
944 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
946 /* Copy the call arguments. As they can be references as well,
947 just chain them together. */
948 for (i = 0; i < gimple_call_num_args (call); ++i)
950 tree callarg = gimple_call_arg (call, i);
951 copy_reference_ops_from_ref (callarg, result);
955 /* Create a vector of vn_reference_op_s structures from REF, a
956 REFERENCE_CLASS_P tree. The vector is not shared. */
958 static VEC(vn_reference_op_s, heap) *
959 create_reference_ops_from_ref (tree ref)
961 VEC (vn_reference_op_s, heap) *result = NULL;
963 copy_reference_ops_from_ref (ref, &result);
964 return result;
967 /* Create a vector of vn_reference_op_s structures from CALL, a
968 call statement. The vector is not shared. */
970 static VEC(vn_reference_op_s, heap) *
971 create_reference_ops_from_call (gimple call)
973 VEC (vn_reference_op_s, heap) *result = NULL;
975 copy_reference_ops_from_call (call, &result);
976 return result;
979 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
980 *I_P to point to the last element of the replacement. */
981 void
982 vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
983 unsigned int *i_p)
985 unsigned int i = *i_p;
986 vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
987 vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
988 tree addr_base;
989 HOST_WIDE_INT addr_offset;
991 /* The only thing we have to do is from &OBJ.foo.bar add the offset
992 from .foo.bar to the preceeding MEM_REF offset and replace the
993 address with &OBJ. */
994 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
995 &addr_offset);
996 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
997 if (addr_base != op->op0)
999 double_int off = tree_to_double_int (mem_op->op0);
1000 off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1001 off = double_int_add (off, shwi_to_double_int (addr_offset));
1002 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1003 op->op0 = build_fold_addr_expr (addr_base);
1004 if (host_integerp (mem_op->op0, 0))
1005 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1006 else
1007 mem_op->off = -1;
1011 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1012 *I_P to point to the last element of the replacement. */
1013 static void
1014 vn_reference_maybe_forwprop_address (VEC (vn_reference_op_s, heap) **ops,
1015 unsigned int *i_p)
1017 unsigned int i = *i_p;
1018 vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
1019 vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
1020 gimple def_stmt;
1021 enum tree_code code;
1022 double_int off;
1024 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1025 if (!is_gimple_assign (def_stmt))
1026 return;
1028 code = gimple_assign_rhs_code (def_stmt);
1029 if (code != ADDR_EXPR
1030 && code != POINTER_PLUS_EXPR)
1031 return;
1033 off = tree_to_double_int (mem_op->op0);
1034 off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1036 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1037 from .foo.bar to the preceeding MEM_REF offset and replace the
1038 address with &OBJ. */
1039 if (code == ADDR_EXPR)
1041 tree addr, addr_base;
1042 HOST_WIDE_INT addr_offset;
1044 addr = gimple_assign_rhs1 (def_stmt);
1045 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1046 &addr_offset);
1047 if (!addr_base
1048 || TREE_CODE (addr_base) != MEM_REF)
1049 return;
1051 off = double_int_add (off, shwi_to_double_int (addr_offset));
1052 off = double_int_add (off, mem_ref_offset (addr_base));
1053 op->op0 = TREE_OPERAND (addr_base, 0);
1055 else
1057 tree ptr, ptroff;
1058 ptr = gimple_assign_rhs1 (def_stmt);
1059 ptroff = gimple_assign_rhs2 (def_stmt);
1060 if (TREE_CODE (ptr) != SSA_NAME
1061 || TREE_CODE (ptroff) != INTEGER_CST)
1062 return;
1064 off = double_int_add (off, tree_to_double_int (ptroff));
1065 op->op0 = ptr;
1068 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1069 if (host_integerp (mem_op->op0, 0))
1070 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1071 else
1072 mem_op->off = -1;
1073 if (TREE_CODE (op->op0) == SSA_NAME)
1074 op->op0 = SSA_VAL (op->op0);
1075 if (TREE_CODE (op->op0) != SSA_NAME)
1076 op->opcode = TREE_CODE (op->op0);
1078 /* And recurse. */
1079 if (TREE_CODE (op->op0) == SSA_NAME)
1080 vn_reference_maybe_forwprop_address (ops, i_p);
1081 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1082 vn_reference_fold_indirect (ops, i_p);
1085 /* Optimize the reference REF to a constant if possible or return
1086 NULL_TREE if not. */
1088 tree
1089 fully_constant_vn_reference_p (vn_reference_t ref)
1091 VEC (vn_reference_op_s, heap) *operands = ref->operands;
1092 vn_reference_op_t op;
1094 /* Try to simplify the translated expression if it is
1095 a call to a builtin function with at most two arguments. */
1096 op = VEC_index (vn_reference_op_s, operands, 0);
1097 if (op->opcode == CALL_EXPR
1098 && TREE_CODE (op->op0) == ADDR_EXPR
1099 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1100 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1101 && VEC_length (vn_reference_op_s, operands) >= 2
1102 && VEC_length (vn_reference_op_s, operands) <= 3)
1104 vn_reference_op_t arg0, arg1 = NULL;
1105 bool anyconst = false;
1106 arg0 = VEC_index (vn_reference_op_s, operands, 1);
1107 if (VEC_length (vn_reference_op_s, operands) > 2)
1108 arg1 = VEC_index (vn_reference_op_s, operands, 2);
1109 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1110 || (arg0->opcode == ADDR_EXPR
1111 && is_gimple_min_invariant (arg0->op0)))
1112 anyconst = true;
1113 if (arg1
1114 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1115 || (arg1->opcode == ADDR_EXPR
1116 && is_gimple_min_invariant (arg1->op0))))
1117 anyconst = true;
1118 if (anyconst)
1120 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1121 arg1 ? 2 : 1,
1122 arg0->op0,
1123 arg1 ? arg1->op0 : NULL);
1124 if (folded
1125 && TREE_CODE (folded) == NOP_EXPR)
1126 folded = TREE_OPERAND (folded, 0);
1127 if (folded
1128 && is_gimple_min_invariant (folded))
1129 return folded;
1133 /* Simplify reads from constant strings. */
1134 else if (op->opcode == ARRAY_REF
1135 && TREE_CODE (op->op0) == INTEGER_CST
1136 && integer_zerop (op->op1)
1137 && VEC_length (vn_reference_op_s, operands) == 2)
1139 vn_reference_op_t arg0;
1140 arg0 = VEC_index (vn_reference_op_s, operands, 1);
1141 if (arg0->opcode == STRING_CST
1142 && (TYPE_MODE (op->type)
1143 == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
1144 && GET_MODE_CLASS (TYPE_MODE (op->type)) == MODE_INT
1145 && GET_MODE_SIZE (TYPE_MODE (op->type)) == 1
1146 && compare_tree_int (op->op0, TREE_STRING_LENGTH (arg0->op0)) < 0)
1147 return build_int_cst_type (op->type,
1148 (TREE_STRING_POINTER (arg0->op0)
1149 [TREE_INT_CST_LOW (op->op0)]));
1152 return NULL_TREE;
1155 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1156 structures into their value numbers. This is done in-place, and
1157 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1158 whether any operands were valueized. */
1160 static VEC (vn_reference_op_s, heap) *
1161 valueize_refs_1 (VEC (vn_reference_op_s, heap) *orig, bool *valueized_anything)
1163 vn_reference_op_t vro;
1164 unsigned int i;
1166 *valueized_anything = false;
1168 FOR_EACH_VEC_ELT (vn_reference_op_s, orig, i, vro)
1170 if (vro->opcode == SSA_NAME
1171 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1173 tree tem = SSA_VAL (vro->op0);
1174 if (tem != vro->op0)
1176 *valueized_anything = true;
1177 vro->op0 = tem;
1179 /* If it transforms from an SSA_NAME to a constant, update
1180 the opcode. */
1181 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1182 vro->opcode = TREE_CODE (vro->op0);
1184 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1186 tree tem = SSA_VAL (vro->op1);
1187 if (tem != vro->op1)
1189 *valueized_anything = true;
1190 vro->op1 = tem;
1193 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1195 tree tem = SSA_VAL (vro->op2);
1196 if (tem != vro->op2)
1198 *valueized_anything = true;
1199 vro->op2 = tem;
1202 /* If it transforms from an SSA_NAME to an address, fold with
1203 a preceding indirect reference. */
1204 if (i > 0
1205 && vro->op0
1206 && TREE_CODE (vro->op0) == ADDR_EXPR
1207 && VEC_index (vn_reference_op_s,
1208 orig, i - 1)->opcode == MEM_REF)
1209 vn_reference_fold_indirect (&orig, &i);
1210 else if (i > 0
1211 && vro->opcode == SSA_NAME
1212 && VEC_index (vn_reference_op_s,
1213 orig, i - 1)->opcode == MEM_REF)
1214 vn_reference_maybe_forwprop_address (&orig, &i);
1215 /* If it transforms a non-constant ARRAY_REF into a constant
1216 one, adjust the constant offset. */
1217 else if (vro->opcode == ARRAY_REF
1218 && vro->off == -1
1219 && TREE_CODE (vro->op0) == INTEGER_CST
1220 && TREE_CODE (vro->op1) == INTEGER_CST
1221 && TREE_CODE (vro->op2) == INTEGER_CST)
1223 double_int off = tree_to_double_int (vro->op0);
1224 off = double_int_add (off,
1225 double_int_neg
1226 (tree_to_double_int (vro->op1)));
1227 off = double_int_mul (off, tree_to_double_int (vro->op2));
1228 if (double_int_fits_in_shwi_p (off))
1229 vro->off = off.low;
1233 return orig;
1236 static VEC (vn_reference_op_s, heap) *
1237 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
1239 bool tem;
1240 return valueize_refs_1 (orig, &tem);
1243 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
1245 /* Create a vector of vn_reference_op_s structures from REF, a
1246 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1247 this function. *VALUEIZED_ANYTHING will specify whether any
1248 operands were valueized. */
1250 static VEC(vn_reference_op_s, heap) *
1251 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1253 if (!ref)
1254 return NULL;
1255 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1256 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1257 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1258 valueized_anything);
1259 return shared_lookup_references;
1262 /* Create a vector of vn_reference_op_s structures from CALL, a
1263 call statement. The vector is shared among all callers of
1264 this function. */
1266 static VEC(vn_reference_op_s, heap) *
1267 valueize_shared_reference_ops_from_call (gimple call)
1269 if (!call)
1270 return NULL;
1271 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1272 copy_reference_ops_from_call (call, &shared_lookup_references);
1273 shared_lookup_references = valueize_refs (shared_lookup_references);
1274 return shared_lookup_references;
1277 /* Lookup a SCCVN reference operation VR in the current hash table.
1278 Returns the resulting value number if it exists in the hash table,
1279 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1280 vn_reference_t stored in the hashtable if something is found. */
1282 static tree
1283 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1285 void **slot;
1286 hashval_t hash;
1288 hash = vr->hashcode;
1289 slot = htab_find_slot_with_hash (current_info->references, vr,
1290 hash, NO_INSERT);
1291 if (!slot && current_info == optimistic_info)
1292 slot = htab_find_slot_with_hash (valid_info->references, vr,
1293 hash, NO_INSERT);
1294 if (slot)
1296 if (vnresult)
1297 *vnresult = (vn_reference_t)*slot;
1298 return ((vn_reference_t)*slot)->result;
1301 return NULL_TREE;
1304 static tree *last_vuse_ptr;
1305 static vn_lookup_kind vn_walk_kind;
1306 static vn_lookup_kind default_vn_walk_kind;
1308 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1309 with the current VUSE and performs the expression lookup. */
1311 static void *
1312 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
1314 vn_reference_t vr = (vn_reference_t)vr_;
1315 void **slot;
1316 hashval_t hash;
1318 if (last_vuse_ptr)
1319 *last_vuse_ptr = vuse;
1321 /* Fixup vuse and hash. */
1322 if (vr->vuse)
1323 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1324 vr->vuse = SSA_VAL (vuse);
1325 if (vr->vuse)
1326 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1328 hash = vr->hashcode;
1329 slot = htab_find_slot_with_hash (current_info->references, vr,
1330 hash, NO_INSERT);
1331 if (!slot && current_info == optimistic_info)
1332 slot = htab_find_slot_with_hash (valid_info->references, vr,
1333 hash, NO_INSERT);
1334 if (slot)
1335 return *slot;
1337 return NULL;
1340 /* Lookup an existing or insert a new vn_reference entry into the
1341 value table for the VUSE, SET, TYPE, OPERANDS reference which
1342 has the constant value CST. */
1344 static vn_reference_t
1345 vn_reference_lookup_or_insert_constant_for_pieces (tree vuse,
1346 alias_set_type set,
1347 tree type,
1348 VEC (vn_reference_op_s,
1349 heap) *operands,
1350 tree cst)
1352 struct vn_reference_s vr1;
1353 vn_reference_t result;
1354 vr1.vuse = vuse;
1355 vr1.operands = operands;
1356 vr1.type = type;
1357 vr1.set = set;
1358 vr1.hashcode = vn_reference_compute_hash (&vr1);
1359 if (vn_reference_lookup_1 (&vr1, &result))
1360 return result;
1361 return vn_reference_insert_pieces (vuse, set, type,
1362 VEC_copy (vn_reference_op_s, heap,
1363 operands), cst,
1364 get_or_alloc_constant_value_id (cst));
1367 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1368 from the statement defining VUSE and if not successful tries to
1369 translate *REFP and VR_ through an aggregate copy at the defintion
1370 of VUSE. */
1372 static void *
1373 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
1375 vn_reference_t vr = (vn_reference_t)vr_;
1376 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1377 tree base;
1378 HOST_WIDE_INT offset, maxsize;
1379 static VEC (vn_reference_op_s, heap) *lhs_ops = NULL;
1380 ao_ref lhs_ref;
1381 bool lhs_ref_ok = false;
1383 /* First try to disambiguate after value-replacing in the definitions LHS. */
1384 if (is_gimple_assign (def_stmt))
1386 VEC (vn_reference_op_s, heap) *tem;
1387 tree lhs = gimple_assign_lhs (def_stmt);
1388 bool valueized_anything = false;
1389 /* Avoid re-allocation overhead. */
1390 VEC_truncate (vn_reference_op_s, lhs_ops, 0);
1391 copy_reference_ops_from_ref (lhs, &lhs_ops);
1392 tem = lhs_ops;
1393 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1394 gcc_assert (lhs_ops == tem);
1395 if (valueized_anything)
1397 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1398 get_alias_set (lhs),
1399 TREE_TYPE (lhs), lhs_ops);
1400 if (lhs_ref_ok
1401 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1402 return NULL;
1404 else
1406 ao_ref_init (&lhs_ref, lhs);
1407 lhs_ref_ok = true;
1411 base = ao_ref_base (ref);
1412 offset = ref->offset;
1413 maxsize = ref->max_size;
1415 /* If we cannot constrain the size of the reference we cannot
1416 test if anything kills it. */
1417 if (maxsize == -1)
1418 return (void *)-1;
1420 /* We can't deduce anything useful from clobbers. */
1421 if (gimple_clobber_p (def_stmt))
1422 return (void *)-1;
1424 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1425 from that definition.
1426 1) Memset. */
1427 if (is_gimple_reg_type (vr->type)
1428 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1429 && integer_zerop (gimple_call_arg (def_stmt, 1))
1430 && host_integerp (gimple_call_arg (def_stmt, 2), 1)
1431 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1433 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1434 tree base2;
1435 HOST_WIDE_INT offset2, size2, maxsize2;
1436 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1437 size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
1438 if ((unsigned HOST_WIDE_INT)size2 / 8
1439 == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
1440 && maxsize2 != -1
1441 && operand_equal_p (base, base2, 0)
1442 && offset2 <= offset
1443 && offset2 + size2 >= offset + maxsize)
1445 tree val = build_zero_cst (vr->type);
1446 return vn_reference_lookup_or_insert_constant_for_pieces
1447 (vuse, vr->set, vr->type, vr->operands, val);
1451 /* 2) Assignment from an empty CONSTRUCTOR. */
1452 else if (is_gimple_reg_type (vr->type)
1453 && gimple_assign_single_p (def_stmt)
1454 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1455 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1457 tree base2;
1458 HOST_WIDE_INT offset2, size2, maxsize2;
1459 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1460 &offset2, &size2, &maxsize2);
1461 if (maxsize2 != -1
1462 && operand_equal_p (base, base2, 0)
1463 && offset2 <= offset
1464 && offset2 + size2 >= offset + maxsize)
1466 tree val = build_zero_cst (vr->type);
1467 return vn_reference_lookup_or_insert_constant_for_pieces
1468 (vuse, vr->set, vr->type, vr->operands, val);
1472 /* 3) Assignment from a constant. We can use folds native encode/interpret
1473 routines to extract the assigned bits. */
1474 else if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
1475 && ref->size == maxsize
1476 && maxsize % BITS_PER_UNIT == 0
1477 && offset % BITS_PER_UNIT == 0
1478 && is_gimple_reg_type (vr->type)
1479 && gimple_assign_single_p (def_stmt)
1480 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
1482 tree base2;
1483 HOST_WIDE_INT offset2, size2, maxsize2;
1484 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1485 &offset2, &size2, &maxsize2);
1486 if (maxsize2 != -1
1487 && maxsize2 == size2
1488 && size2 % BITS_PER_UNIT == 0
1489 && offset2 % BITS_PER_UNIT == 0
1490 && operand_equal_p (base, base2, 0)
1491 && offset2 <= offset
1492 && offset2 + size2 >= offset + maxsize)
1494 /* We support up to 512-bit values (for V8DFmode). */
1495 unsigned char buffer[64];
1496 int len;
1498 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1499 buffer, sizeof (buffer));
1500 if (len > 0)
1502 tree val = native_interpret_expr (vr->type,
1503 buffer
1504 + ((offset - offset2)
1505 / BITS_PER_UNIT),
1506 ref->size / BITS_PER_UNIT);
1507 if (val)
1508 return vn_reference_lookup_or_insert_constant_for_pieces
1509 (vuse, vr->set, vr->type, vr->operands, val);
1514 /* 4) Assignment from an SSA name which definition we may be able
1515 to access pieces from. */
1516 else if (ref->size == maxsize
1517 && is_gimple_reg_type (vr->type)
1518 && gimple_assign_single_p (def_stmt)
1519 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
1521 tree rhs1 = gimple_assign_rhs1 (def_stmt);
1522 gimple def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
1523 if (is_gimple_assign (def_stmt2)
1524 && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
1525 || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
1526 && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1))))
1528 tree base2;
1529 HOST_WIDE_INT offset2, size2, maxsize2, off;
1530 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1531 &offset2, &size2, &maxsize2);
1532 off = offset - offset2;
1533 if (maxsize2 != -1
1534 && maxsize2 == size2
1535 && operand_equal_p (base, base2, 0)
1536 && offset2 <= offset
1537 && offset2 + size2 >= offset + maxsize)
1539 tree val = NULL_TREE;
1540 HOST_WIDE_INT elsz
1541 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1))));
1542 if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR)
1544 if (off == 0)
1545 val = gimple_assign_rhs1 (def_stmt2);
1546 else if (off == elsz)
1547 val = gimple_assign_rhs2 (def_stmt2);
1549 else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR
1550 && off % elsz == 0)
1552 tree ctor = gimple_assign_rhs1 (def_stmt2);
1553 unsigned i = off / elsz;
1554 if (i < CONSTRUCTOR_NELTS (ctor))
1556 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
1557 if (compare_tree_int (elt->index, i) == 0)
1558 val = elt->value;
1561 if (val)
1562 return vn_reference_lookup_or_insert_constant_for_pieces
1563 (vuse, vr->set, vr->type, vr->operands, val);
1568 /* 5) For aggregate copies translate the reference through them if
1569 the copy kills ref. */
1570 else if (vn_walk_kind == VN_WALKREWRITE
1571 && gimple_assign_single_p (def_stmt)
1572 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1573 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1574 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1576 tree base2;
1577 HOST_WIDE_INT offset2, size2, maxsize2;
1578 int i, j;
1579 VEC (vn_reference_op_s, heap) *rhs = NULL;
1580 vn_reference_op_t vro;
1581 ao_ref r;
1583 if (!lhs_ref_ok)
1584 return (void *)-1;
1586 /* See if the assignment kills REF. */
1587 base2 = ao_ref_base (&lhs_ref);
1588 offset2 = lhs_ref.offset;
1589 size2 = lhs_ref.size;
1590 maxsize2 = lhs_ref.max_size;
1591 if (maxsize2 == -1
1592 || (base != base2 && !operand_equal_p (base, base2, 0))
1593 || offset2 > offset
1594 || offset2 + size2 < offset + maxsize)
1595 return (void *)-1;
1597 /* Find the common base of ref and the lhs. lhs_ops already
1598 contains valueized operands for the lhs. */
1599 i = VEC_length (vn_reference_op_s, vr->operands) - 1;
1600 j = VEC_length (vn_reference_op_s, lhs_ops) - 1;
1601 while (j >= 0 && i >= 0
1602 && vn_reference_op_eq (VEC_index (vn_reference_op_s,
1603 vr->operands, i),
1604 VEC_index (vn_reference_op_s, lhs_ops, j)))
1606 i--;
1607 j--;
1610 /* ??? The innermost op should always be a MEM_REF and we already
1611 checked that the assignment to the lhs kills vr. Thus for
1612 aggregate copies using char[] types the vn_reference_op_eq
1613 may fail when comparing types for compatibility. But we really
1614 don't care here - further lookups with the rewritten operands
1615 will simply fail if we messed up types too badly. */
1616 if (j == 0 && i >= 0
1617 && VEC_index (vn_reference_op_s, lhs_ops, 0)->opcode == MEM_REF
1618 && VEC_index (vn_reference_op_s, lhs_ops, 0)->off != -1
1619 && (VEC_index (vn_reference_op_s, lhs_ops, 0)->off
1620 == VEC_index (vn_reference_op_s, vr->operands, i)->off))
1621 i--, j--;
1623 /* i now points to the first additional op.
1624 ??? LHS may not be completely contained in VR, one or more
1625 VIEW_CONVERT_EXPRs could be in its way. We could at least
1626 try handling outermost VIEW_CONVERT_EXPRs. */
1627 if (j != -1)
1628 return (void *)-1;
1630 /* Now re-write REF to be based on the rhs of the assignment. */
1631 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1632 /* We need to pre-pend vr->operands[0..i] to rhs. */
1633 if (i + 1 + VEC_length (vn_reference_op_s, rhs)
1634 > VEC_length (vn_reference_op_s, vr->operands))
1636 VEC (vn_reference_op_s, heap) *old = vr->operands;
1637 VEC_safe_grow (vn_reference_op_s, heap, vr->operands,
1638 i + 1 + VEC_length (vn_reference_op_s, rhs));
1639 if (old == shared_lookup_references
1640 && vr->operands != old)
1641 shared_lookup_references = NULL;
1643 else
1644 VEC_truncate (vn_reference_op_s, vr->operands,
1645 i + 1 + VEC_length (vn_reference_op_s, rhs));
1646 FOR_EACH_VEC_ELT (vn_reference_op_s, rhs, j, vro)
1647 VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, vro);
1648 VEC_free (vn_reference_op_s, heap, rhs);
1649 vr->operands = valueize_refs (vr->operands);
1650 vr->hashcode = vn_reference_compute_hash (vr);
1652 /* Adjust *ref from the new operands. */
1653 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1654 return (void *)-1;
1655 /* This can happen with bitfields. */
1656 if (ref->size != r.size)
1657 return (void *)-1;
1658 *ref = r;
1660 /* Do not update last seen VUSE after translating. */
1661 last_vuse_ptr = NULL;
1663 /* Keep looking for the adjusted *REF / VR pair. */
1664 return NULL;
1667 /* 6) For memcpy copies translate the reference through them if
1668 the copy kills ref. */
1669 else if (vn_walk_kind == VN_WALKREWRITE
1670 && is_gimple_reg_type (vr->type)
1671 /* ??? Handle BCOPY as well. */
1672 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
1673 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
1674 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
1675 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
1676 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
1677 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
1678 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
1679 && host_integerp (gimple_call_arg (def_stmt, 2), 1))
1681 tree lhs, rhs;
1682 ao_ref r;
1683 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
1684 vn_reference_op_s op;
1685 HOST_WIDE_INT at;
1688 /* Only handle non-variable, addressable refs. */
1689 if (ref->size != maxsize
1690 || offset % BITS_PER_UNIT != 0
1691 || ref->size % BITS_PER_UNIT != 0)
1692 return (void *)-1;
1694 /* Extract a pointer base and an offset for the destination. */
1695 lhs = gimple_call_arg (def_stmt, 0);
1696 lhs_offset = 0;
1697 if (TREE_CODE (lhs) == SSA_NAME)
1698 lhs = SSA_VAL (lhs);
1699 if (TREE_CODE (lhs) == ADDR_EXPR)
1701 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
1702 &lhs_offset);
1703 if (!tem)
1704 return (void *)-1;
1705 if (TREE_CODE (tem) == MEM_REF
1706 && host_integerp (TREE_OPERAND (tem, 1), 1))
1708 lhs = TREE_OPERAND (tem, 0);
1709 lhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1711 else if (DECL_P (tem))
1712 lhs = build_fold_addr_expr (tem);
1713 else
1714 return (void *)-1;
1716 if (TREE_CODE (lhs) != SSA_NAME
1717 && TREE_CODE (lhs) != ADDR_EXPR)
1718 return (void *)-1;
1720 /* Extract a pointer base and an offset for the source. */
1721 rhs = gimple_call_arg (def_stmt, 1);
1722 rhs_offset = 0;
1723 if (TREE_CODE (rhs) == SSA_NAME)
1724 rhs = SSA_VAL (rhs);
1725 if (TREE_CODE (rhs) == ADDR_EXPR)
1727 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
1728 &rhs_offset);
1729 if (!tem)
1730 return (void *)-1;
1731 if (TREE_CODE (tem) == MEM_REF
1732 && host_integerp (TREE_OPERAND (tem, 1), 1))
1734 rhs = TREE_OPERAND (tem, 0);
1735 rhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1737 else if (DECL_P (tem))
1738 rhs = build_fold_addr_expr (tem);
1739 else
1740 return (void *)-1;
1742 if (TREE_CODE (rhs) != SSA_NAME
1743 && TREE_CODE (rhs) != ADDR_EXPR)
1744 return (void *)-1;
1746 copy_size = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2));
1748 /* The bases of the destination and the references have to agree. */
1749 if ((TREE_CODE (base) != MEM_REF
1750 && !DECL_P (base))
1751 || (TREE_CODE (base) == MEM_REF
1752 && (TREE_OPERAND (base, 0) != lhs
1753 || !host_integerp (TREE_OPERAND (base, 1), 1)))
1754 || (DECL_P (base)
1755 && (TREE_CODE (lhs) != ADDR_EXPR
1756 || TREE_OPERAND (lhs, 0) != base)))
1757 return (void *)-1;
1759 /* And the access has to be contained within the memcpy destination. */
1760 at = offset / BITS_PER_UNIT;
1761 if (TREE_CODE (base) == MEM_REF)
1762 at += TREE_INT_CST_LOW (TREE_OPERAND (base, 1));
1763 if (lhs_offset > at
1764 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
1765 return (void *)-1;
1767 /* Make room for 2 operands in the new reference. */
1768 if (VEC_length (vn_reference_op_s, vr->operands) < 2)
1770 VEC (vn_reference_op_s, heap) *old = vr->operands;
1771 VEC_safe_grow (vn_reference_op_s, heap, vr->operands, 2);
1772 if (old == shared_lookup_references
1773 && vr->operands != old)
1774 shared_lookup_references = NULL;
1776 else
1777 VEC_truncate (vn_reference_op_s, vr->operands, 2);
1779 /* The looked-through reference is a simple MEM_REF. */
1780 memset (&op, 0, sizeof (op));
1781 op.type = vr->type;
1782 op.opcode = MEM_REF;
1783 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
1784 op.off = at - lhs_offset + rhs_offset;
1785 VEC_replace (vn_reference_op_s, vr->operands, 0, &op);
1786 op.type = TREE_TYPE (rhs);
1787 op.opcode = TREE_CODE (rhs);
1788 op.op0 = rhs;
1789 op.off = -1;
1790 VEC_replace (vn_reference_op_s, vr->operands, 1, &op);
1791 vr->hashcode = vn_reference_compute_hash (vr);
1793 /* Adjust *ref from the new operands. */
1794 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1795 return (void *)-1;
1796 /* This can happen with bitfields. */
1797 if (ref->size != r.size)
1798 return (void *)-1;
1799 *ref = r;
1801 /* Do not update last seen VUSE after translating. */
1802 last_vuse_ptr = NULL;
1804 /* Keep looking for the adjusted *REF / VR pair. */
1805 return NULL;
1808 /* Bail out and stop walking. */
1809 return (void *)-1;
1812 /* Lookup a reference operation by it's parts, in the current hash table.
1813 Returns the resulting value number if it exists in the hash table,
1814 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1815 vn_reference_t stored in the hashtable if something is found. */
1817 tree
1818 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
1819 VEC (vn_reference_op_s, heap) *operands,
1820 vn_reference_t *vnresult, vn_lookup_kind kind)
1822 struct vn_reference_s vr1;
1823 vn_reference_t tmp;
1824 tree cst;
1826 if (!vnresult)
1827 vnresult = &tmp;
1828 *vnresult = NULL;
1830 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1831 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1832 VEC_safe_grow (vn_reference_op_s, heap, shared_lookup_references,
1833 VEC_length (vn_reference_op_s, operands));
1834 memcpy (VEC_address (vn_reference_op_s, shared_lookup_references),
1835 VEC_address (vn_reference_op_s, operands),
1836 sizeof (vn_reference_op_s)
1837 * VEC_length (vn_reference_op_s, operands));
1838 vr1.operands = operands = shared_lookup_references
1839 = valueize_refs (shared_lookup_references);
1840 vr1.type = type;
1841 vr1.set = set;
1842 vr1.hashcode = vn_reference_compute_hash (&vr1);
1843 if ((cst = fully_constant_vn_reference_p (&vr1)))
1844 return cst;
1846 vn_reference_lookup_1 (&vr1, vnresult);
1847 if (!*vnresult
1848 && kind != VN_NOWALK
1849 && vr1.vuse)
1851 ao_ref r;
1852 vn_walk_kind = kind;
1853 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
1854 *vnresult =
1855 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1856 vn_reference_lookup_2,
1857 vn_reference_lookup_3, &vr1);
1858 if (vr1.operands != operands)
1859 VEC_free (vn_reference_op_s, heap, vr1.operands);
1862 if (*vnresult)
1863 return (*vnresult)->result;
1865 return NULL_TREE;
1868 /* Lookup OP in the current hash table, and return the resulting value
1869 number if it exists in the hash table. Return NULL_TREE if it does
1870 not exist in the hash table or if the result field of the structure
1871 was NULL.. VNRESULT will be filled in with the vn_reference_t
1872 stored in the hashtable if one exists. */
1874 tree
1875 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
1876 vn_reference_t *vnresult)
1878 VEC (vn_reference_op_s, heap) *operands;
1879 struct vn_reference_s vr1;
1880 tree cst;
1881 bool valuezied_anything;
1883 if (vnresult)
1884 *vnresult = NULL;
1886 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1887 vr1.operands = operands
1888 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
1889 vr1.type = TREE_TYPE (op);
1890 vr1.set = get_alias_set (op);
1891 vr1.hashcode = vn_reference_compute_hash (&vr1);
1892 if ((cst = fully_constant_vn_reference_p (&vr1)))
1893 return cst;
1895 if (kind != VN_NOWALK
1896 && vr1.vuse)
1898 vn_reference_t wvnresult;
1899 ao_ref r;
1900 /* Make sure to use a valueized reference if we valueized anything.
1901 Otherwise preserve the full reference for advanced TBAA. */
1902 if (!valuezied_anything
1903 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
1904 vr1.operands))
1905 ao_ref_init (&r, op);
1906 vn_walk_kind = kind;
1907 wvnresult =
1908 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1909 vn_reference_lookup_2,
1910 vn_reference_lookup_3, &vr1);
1911 if (vr1.operands != operands)
1912 VEC_free (vn_reference_op_s, heap, vr1.operands);
1913 if (wvnresult)
1915 if (vnresult)
1916 *vnresult = wvnresult;
1917 return wvnresult->result;
1920 return NULL_TREE;
1923 return vn_reference_lookup_1 (&vr1, vnresult);
1927 /* Insert OP into the current hash table with a value number of
1928 RESULT, and return the resulting reference structure we created. */
1930 vn_reference_t
1931 vn_reference_insert (tree op, tree result, tree vuse)
1933 void **slot;
1934 vn_reference_t vr1;
1936 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1937 if (TREE_CODE (result) == SSA_NAME)
1938 vr1->value_id = VN_INFO (result)->value_id;
1939 else
1940 vr1->value_id = get_or_alloc_constant_value_id (result);
1941 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1942 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
1943 vr1->type = TREE_TYPE (op);
1944 vr1->set = get_alias_set (op);
1945 vr1->hashcode = vn_reference_compute_hash (vr1);
1946 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
1948 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1949 INSERT);
1951 /* Because we lookup stores using vuses, and value number failures
1952 using the vdefs (see visit_reference_op_store for how and why),
1953 it's possible that on failure we may try to insert an already
1954 inserted store. This is not wrong, there is no ssa name for a
1955 store that we could use as a differentiator anyway. Thus, unlike
1956 the other lookup functions, you cannot gcc_assert (!*slot)
1957 here. */
1959 /* But free the old slot in case of a collision. */
1960 if (*slot)
1961 free_reference (*slot);
1963 *slot = vr1;
1964 return vr1;
1967 /* Insert a reference by it's pieces into the current hash table with
1968 a value number of RESULT. Return the resulting reference
1969 structure we created. */
1971 vn_reference_t
1972 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
1973 VEC (vn_reference_op_s, heap) *operands,
1974 tree result, unsigned int value_id)
1977 void **slot;
1978 vn_reference_t vr1;
1980 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1981 vr1->value_id = value_id;
1982 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1983 vr1->operands = valueize_refs (operands);
1984 vr1->type = type;
1985 vr1->set = set;
1986 vr1->hashcode = vn_reference_compute_hash (vr1);
1987 if (result && TREE_CODE (result) == SSA_NAME)
1988 result = SSA_VAL (result);
1989 vr1->result = result;
1991 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1992 INSERT);
1994 /* At this point we should have all the things inserted that we have
1995 seen before, and we should never try inserting something that
1996 already exists. */
1997 gcc_assert (!*slot);
1998 if (*slot)
1999 free_reference (*slot);
2001 *slot = vr1;
2002 return vr1;
2005 /* Compute and return the hash value for nary operation VBO1. */
2007 hashval_t
2008 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2010 hashval_t hash;
2011 unsigned i;
2013 for (i = 0; i < vno1->length; ++i)
2014 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2015 vno1->op[i] = SSA_VAL (vno1->op[i]);
2017 if (vno1->length == 2
2018 && commutative_tree_code (vno1->opcode)
2019 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2021 tree temp = vno1->op[0];
2022 vno1->op[0] = vno1->op[1];
2023 vno1->op[1] = temp;
2026 hash = iterative_hash_hashval_t (vno1->opcode, 0);
2027 for (i = 0; i < vno1->length; ++i)
2028 hash = iterative_hash_expr (vno1->op[i], hash);
2030 return hash;
2033 /* Return the computed hashcode for nary operation P1. */
2035 static hashval_t
2036 vn_nary_op_hash (const void *p1)
2038 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
2039 return vno1->hashcode;
2042 /* Compare nary operations P1 and P2 and return true if they are
2043 equivalent. */
2046 vn_nary_op_eq (const void *p1, const void *p2)
2048 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
2049 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
2050 unsigned i;
2052 if (vno1->hashcode != vno2->hashcode)
2053 return false;
2055 if (vno1->length != vno2->length)
2056 return false;
2058 if (vno1->opcode != vno2->opcode
2059 || !types_compatible_p (vno1->type, vno2->type))
2060 return false;
2062 for (i = 0; i < vno1->length; ++i)
2063 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2064 return false;
2066 return true;
2069 /* Initialize VNO from the pieces provided. */
2071 static void
2072 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2073 enum tree_code code, tree type, tree *ops)
2075 vno->opcode = code;
2076 vno->length = length;
2077 vno->type = type;
2078 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2081 /* Initialize VNO from OP. */
2083 static void
2084 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2086 unsigned i;
2088 vno->opcode = TREE_CODE (op);
2089 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2090 vno->type = TREE_TYPE (op);
2091 for (i = 0; i < vno->length; ++i)
2092 vno->op[i] = TREE_OPERAND (op, i);
2095 /* Return the number of operands for a vn_nary ops structure from STMT. */
2097 static unsigned int
2098 vn_nary_length_from_stmt (gimple stmt)
2100 switch (gimple_assign_rhs_code (stmt))
2102 case REALPART_EXPR:
2103 case IMAGPART_EXPR:
2104 case VIEW_CONVERT_EXPR:
2105 return 1;
2107 case CONSTRUCTOR:
2108 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2110 default:
2111 return gimple_num_ops (stmt) - 1;
2115 /* Initialize VNO from STMT. */
2117 static void
2118 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
2120 unsigned i;
2122 vno->opcode = gimple_assign_rhs_code (stmt);
2123 vno->type = gimple_expr_type (stmt);
2124 switch (vno->opcode)
2126 case REALPART_EXPR:
2127 case IMAGPART_EXPR:
2128 case VIEW_CONVERT_EXPR:
2129 vno->length = 1;
2130 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2131 break;
2133 case CONSTRUCTOR:
2134 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2135 for (i = 0; i < vno->length; ++i)
2136 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2137 break;
2139 default:
2140 vno->length = gimple_num_ops (stmt) - 1;
2141 for (i = 0; i < vno->length; ++i)
2142 vno->op[i] = gimple_op (stmt, i + 1);
2146 /* Compute the hashcode for VNO and look for it in the hash table;
2147 return the resulting value number if it exists in the hash table.
2148 Return NULL_TREE if it does not exist in the hash table or if the
2149 result field of the operation is NULL. VNRESULT will contain the
2150 vn_nary_op_t from the hashtable if it exists. */
2152 static tree
2153 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2155 void **slot;
2157 if (vnresult)
2158 *vnresult = NULL;
2160 vno->hashcode = vn_nary_op_compute_hash (vno);
2161 slot = htab_find_slot_with_hash (current_info->nary, vno, vno->hashcode,
2162 NO_INSERT);
2163 if (!slot && current_info == optimistic_info)
2164 slot = htab_find_slot_with_hash (valid_info->nary, vno, vno->hashcode,
2165 NO_INSERT);
2166 if (!slot)
2167 return NULL_TREE;
2168 if (vnresult)
2169 *vnresult = (vn_nary_op_t)*slot;
2170 return ((vn_nary_op_t)*slot)->result;
2173 /* Lookup a n-ary operation by its pieces and return the resulting value
2174 number if it exists in the hash table. Return NULL_TREE if it does
2175 not exist in the hash table or if the result field of the operation
2176 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2177 if it exists. */
2179 tree
2180 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2181 tree type, tree *ops, vn_nary_op_t *vnresult)
2183 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2184 sizeof_vn_nary_op (length));
2185 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2186 return vn_nary_op_lookup_1 (vno1, vnresult);
2189 /* Lookup OP in the current hash table, and return the resulting value
2190 number if it exists in the hash table. Return NULL_TREE if it does
2191 not exist in the hash table or if the result field of the operation
2192 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2193 if it exists. */
2195 tree
2196 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2198 vn_nary_op_t vno1
2199 = XALLOCAVAR (struct vn_nary_op_s,
2200 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2201 init_vn_nary_op_from_op (vno1, op);
2202 return vn_nary_op_lookup_1 (vno1, vnresult);
2205 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2206 value number if it exists in the hash table. Return NULL_TREE if
2207 it does not exist in the hash table. VNRESULT will contain the
2208 vn_nary_op_t from the hashtable if it exists. */
2210 tree
2211 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
2213 vn_nary_op_t vno1
2214 = XALLOCAVAR (struct vn_nary_op_s,
2215 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2216 init_vn_nary_op_from_stmt (vno1, stmt);
2217 return vn_nary_op_lookup_1 (vno1, vnresult);
2220 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2222 static vn_nary_op_t
2223 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2225 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2228 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2229 obstack. */
2231 static vn_nary_op_t
2232 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2234 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2235 &current_info->nary_obstack);
2237 vno1->value_id = value_id;
2238 vno1->length = length;
2239 vno1->result = result;
2241 return vno1;
2244 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2245 VNO->HASHCODE first. */
2247 static vn_nary_op_t
2248 vn_nary_op_insert_into (vn_nary_op_t vno, htab_t table, bool compute_hash)
2250 void **slot;
2252 if (compute_hash)
2253 vno->hashcode = vn_nary_op_compute_hash (vno);
2255 slot = htab_find_slot_with_hash (table, vno, vno->hashcode, INSERT);
2256 gcc_assert (!*slot);
2258 *slot = vno;
2259 return vno;
2262 /* Insert a n-ary operation into the current hash table using it's
2263 pieces. Return the vn_nary_op_t structure we created and put in
2264 the hashtable. */
2266 vn_nary_op_t
2267 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2268 tree type, tree *ops,
2269 tree result, unsigned int value_id)
2271 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2272 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2273 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2276 /* Insert OP into the current hash table with a value number of
2277 RESULT. Return the vn_nary_op_t structure we created and put in
2278 the hashtable. */
2280 vn_nary_op_t
2281 vn_nary_op_insert (tree op, tree result)
2283 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2284 vn_nary_op_t vno1;
2286 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2287 init_vn_nary_op_from_op (vno1, op);
2288 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2291 /* Insert the rhs of STMT into the current hash table with a value number of
2292 RESULT. */
2294 vn_nary_op_t
2295 vn_nary_op_insert_stmt (gimple stmt, tree result)
2297 vn_nary_op_t vno1
2298 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2299 result, VN_INFO (result)->value_id);
2300 init_vn_nary_op_from_stmt (vno1, stmt);
2301 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2304 /* Compute a hashcode for PHI operation VP1 and return it. */
2306 static inline hashval_t
2307 vn_phi_compute_hash (vn_phi_t vp1)
2309 hashval_t result;
2310 int i;
2311 tree phi1op;
2312 tree type;
2314 result = vp1->block->index;
2316 /* If all PHI arguments are constants we need to distinguish
2317 the PHI node via its type. */
2318 type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
2319 result += (INTEGRAL_TYPE_P (type)
2320 + (INTEGRAL_TYPE_P (type)
2321 ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
2323 FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
2325 if (phi1op == VN_TOP)
2326 continue;
2327 result = iterative_hash_expr (phi1op, result);
2330 return result;
2333 /* Return the computed hashcode for phi operation P1. */
2335 static hashval_t
2336 vn_phi_hash (const void *p1)
2338 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
2339 return vp1->hashcode;
2342 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2344 static int
2345 vn_phi_eq (const void *p1, const void *p2)
2347 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
2348 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
2350 if (vp1->hashcode != vp2->hashcode)
2351 return false;
2353 if (vp1->block == vp2->block)
2355 int i;
2356 tree phi1op;
2358 /* If the PHI nodes do not have compatible types
2359 they are not the same. */
2360 if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
2361 TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
2362 return false;
2364 /* Any phi in the same block will have it's arguments in the
2365 same edge order, because of how we store phi nodes. */
2366 FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
2368 tree phi2op = VEC_index (tree, vp2->phiargs, i);
2369 if (phi1op == VN_TOP || phi2op == VN_TOP)
2370 continue;
2371 if (!expressions_equal_p (phi1op, phi2op))
2372 return false;
2374 return true;
2376 return false;
2379 static VEC(tree, heap) *shared_lookup_phiargs;
2381 /* Lookup PHI in the current hash table, and return the resulting
2382 value number if it exists in the hash table. Return NULL_TREE if
2383 it does not exist in the hash table. */
2385 static tree
2386 vn_phi_lookup (gimple phi)
2388 void **slot;
2389 struct vn_phi_s vp1;
2390 unsigned i;
2392 VEC_truncate (tree, shared_lookup_phiargs, 0);
2394 /* Canonicalize the SSA_NAME's to their value number. */
2395 for (i = 0; i < gimple_phi_num_args (phi); i++)
2397 tree def = PHI_ARG_DEF (phi, i);
2398 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2399 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
2401 vp1.phiargs = shared_lookup_phiargs;
2402 vp1.block = gimple_bb (phi);
2403 vp1.hashcode = vn_phi_compute_hash (&vp1);
2404 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
2405 NO_INSERT);
2406 if (!slot && current_info == optimistic_info)
2407 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
2408 NO_INSERT);
2409 if (!slot)
2410 return NULL_TREE;
2411 return ((vn_phi_t)*slot)->result;
2414 /* Insert PHI into the current hash table with a value number of
2415 RESULT. */
2417 static vn_phi_t
2418 vn_phi_insert (gimple phi, tree result)
2420 void **slot;
2421 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
2422 unsigned i;
2423 VEC (tree, heap) *args = NULL;
2425 /* Canonicalize the SSA_NAME's to their value number. */
2426 for (i = 0; i < gimple_phi_num_args (phi); i++)
2428 tree def = PHI_ARG_DEF (phi, i);
2429 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2430 VEC_safe_push (tree, heap, args, def);
2432 vp1->value_id = VN_INFO (result)->value_id;
2433 vp1->phiargs = args;
2434 vp1->block = gimple_bb (phi);
2435 vp1->result = result;
2436 vp1->hashcode = vn_phi_compute_hash (vp1);
2438 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
2439 INSERT);
2441 /* Because we iterate over phi operations more than once, it's
2442 possible the slot might already exist here, hence no assert.*/
2443 *slot = vp1;
2444 return vp1;
2448 /* Print set of components in strongly connected component SCC to OUT. */
2450 static void
2451 print_scc (FILE *out, VEC (tree, heap) *scc)
2453 tree var;
2454 unsigned int i;
2456 fprintf (out, "SCC consists of: ");
2457 FOR_EACH_VEC_ELT (tree, scc, i, var)
2459 print_generic_expr (out, var, 0);
2460 fprintf (out, " ");
2462 fprintf (out, "\n");
2465 /* Set the value number of FROM to TO, return true if it has changed
2466 as a result. */
2468 static inline bool
2469 set_ssa_val_to (tree from, tree to)
2471 tree currval = SSA_VAL (from);
2473 if (from != to)
2475 if (currval == from)
2477 if (dump_file && (dump_flags & TDF_DETAILS))
2479 fprintf (dump_file, "Not changing value number of ");
2480 print_generic_expr (dump_file, from, 0);
2481 fprintf (dump_file, " from VARYING to ");
2482 print_generic_expr (dump_file, to, 0);
2483 fprintf (dump_file, "\n");
2485 return false;
2487 else if (TREE_CODE (to) == SSA_NAME
2488 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2489 to = from;
2492 /* The only thing we allow as value numbers are VN_TOP, ssa_names
2493 and invariants. So assert that here. */
2494 gcc_assert (to != NULL_TREE
2495 && (to == VN_TOP
2496 || TREE_CODE (to) == SSA_NAME
2497 || is_gimple_min_invariant (to)));
2499 if (dump_file && (dump_flags & TDF_DETAILS))
2501 fprintf (dump_file, "Setting value number of ");
2502 print_generic_expr (dump_file, from, 0);
2503 fprintf (dump_file, " to ");
2504 print_generic_expr (dump_file, to, 0);
2507 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
2509 VN_INFO (from)->valnum = to;
2510 if (dump_file && (dump_flags & TDF_DETAILS))
2511 fprintf (dump_file, " (changed)\n");
2512 return true;
2514 if (dump_file && (dump_flags & TDF_DETAILS))
2515 fprintf (dump_file, "\n");
2516 return false;
2519 /* Set all definitions in STMT to value number to themselves.
2520 Return true if a value number changed. */
2522 static bool
2523 defs_to_varying (gimple stmt)
2525 bool changed = false;
2526 ssa_op_iter iter;
2527 def_operand_p defp;
2529 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2531 tree def = DEF_FROM_PTR (defp);
2533 VN_INFO (def)->use_processed = true;
2534 changed |= set_ssa_val_to (def, def);
2536 return changed;
2539 static bool expr_has_constants (tree expr);
2540 static tree valueize_expr (tree expr);
2542 /* Visit a copy between LHS and RHS, return true if the value number
2543 changed. */
2545 static bool
2546 visit_copy (tree lhs, tree rhs)
2548 /* Follow chains of copies to their destination. */
2549 while (TREE_CODE (rhs) == SSA_NAME
2550 && SSA_VAL (rhs) != rhs)
2551 rhs = SSA_VAL (rhs);
2553 /* The copy may have a more interesting constant filled expression
2554 (we don't, since we know our RHS is just an SSA name). */
2555 if (TREE_CODE (rhs) == SSA_NAME)
2557 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2558 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2561 return set_ssa_val_to (lhs, rhs);
2564 /* Visit a nary operator RHS, value number it, and return true if the
2565 value number of LHS has changed as a result. */
2567 static bool
2568 visit_nary_op (tree lhs, gimple stmt)
2570 bool changed = false;
2571 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2573 if (result)
2574 changed = set_ssa_val_to (lhs, result);
2575 else
2577 changed = set_ssa_val_to (lhs, lhs);
2578 vn_nary_op_insert_stmt (stmt, lhs);
2581 return changed;
2584 /* Visit a call STMT storing into LHS. Return true if the value number
2585 of the LHS has changed as a result. */
2587 static bool
2588 visit_reference_op_call (tree lhs, gimple stmt)
2590 bool changed = false;
2591 struct vn_reference_s vr1;
2592 tree result;
2593 tree vuse = gimple_vuse (stmt);
2595 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2596 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
2597 vr1.type = gimple_expr_type (stmt);
2598 vr1.set = 0;
2599 vr1.hashcode = vn_reference_compute_hash (&vr1);
2600 result = vn_reference_lookup_1 (&vr1, NULL);
2601 if (result)
2603 changed = set_ssa_val_to (lhs, result);
2604 if (TREE_CODE (result) == SSA_NAME
2605 && VN_INFO (result)->has_constants)
2606 VN_INFO (lhs)->has_constants = true;
2608 else
2610 void **slot;
2611 vn_reference_t vr2;
2612 changed = set_ssa_val_to (lhs, lhs);
2613 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
2614 vr2->vuse = vr1.vuse;
2615 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
2616 vr2->type = vr1.type;
2617 vr2->set = vr1.set;
2618 vr2->hashcode = vr1.hashcode;
2619 vr2->result = lhs;
2620 slot = htab_find_slot_with_hash (current_info->references,
2621 vr2, vr2->hashcode, INSERT);
2622 if (*slot)
2623 free_reference (*slot);
2624 *slot = vr2;
2627 return changed;
2630 /* Visit a load from a reference operator RHS, part of STMT, value number it,
2631 and return true if the value number of the LHS has changed as a result. */
2633 static bool
2634 visit_reference_op_load (tree lhs, tree op, gimple stmt)
2636 bool changed = false;
2637 tree last_vuse;
2638 tree result;
2640 last_vuse = gimple_vuse (stmt);
2641 last_vuse_ptr = &last_vuse;
2642 result = vn_reference_lookup (op, gimple_vuse (stmt),
2643 default_vn_walk_kind, NULL);
2644 last_vuse_ptr = NULL;
2646 /* If we have a VCE, try looking up its operand as it might be stored in
2647 a different type. */
2648 if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
2649 result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
2650 default_vn_walk_kind, NULL);
2652 /* We handle type-punning through unions by value-numbering based
2653 on offset and size of the access. Be prepared to handle a
2654 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
2655 if (result
2656 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
2658 /* We will be setting the value number of lhs to the value number
2659 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2660 So first simplify and lookup this expression to see if it
2661 is already available. */
2662 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
2663 if ((CONVERT_EXPR_P (val)
2664 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
2665 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
2667 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
2668 if ((CONVERT_EXPR_P (tem)
2669 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
2670 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
2671 TREE_TYPE (val), tem)))
2672 val = tem;
2674 result = val;
2675 if (!is_gimple_min_invariant (val)
2676 && TREE_CODE (val) != SSA_NAME)
2677 result = vn_nary_op_lookup (val, NULL);
2678 /* If the expression is not yet available, value-number lhs to
2679 a new SSA_NAME we create. */
2680 if (!result)
2682 result = make_ssa_name (SSA_NAME_VAR (lhs), gimple_build_nop ());
2683 /* Initialize value-number information properly. */
2684 VN_INFO_GET (result)->valnum = result;
2685 VN_INFO (result)->value_id = get_next_value_id ();
2686 VN_INFO (result)->expr = val;
2687 VN_INFO (result)->has_constants = expr_has_constants (val);
2688 VN_INFO (result)->needs_insertion = true;
2689 /* As all "inserted" statements are singleton SCCs, insert
2690 to the valid table. This is strictly needed to
2691 avoid re-generating new value SSA_NAMEs for the same
2692 expression during SCC iteration over and over (the
2693 optimistic table gets cleared after each iteration).
2694 We do not need to insert into the optimistic table, as
2695 lookups there will fall back to the valid table. */
2696 if (current_info == optimistic_info)
2698 current_info = valid_info;
2699 vn_nary_op_insert (val, result);
2700 current_info = optimistic_info;
2702 else
2703 vn_nary_op_insert (val, result);
2704 if (dump_file && (dump_flags & TDF_DETAILS))
2706 fprintf (dump_file, "Inserting name ");
2707 print_generic_expr (dump_file, result, 0);
2708 fprintf (dump_file, " for expression ");
2709 print_generic_expr (dump_file, val, 0);
2710 fprintf (dump_file, "\n");
2715 if (result)
2717 changed = set_ssa_val_to (lhs, result);
2718 if (TREE_CODE (result) == SSA_NAME
2719 && VN_INFO (result)->has_constants)
2721 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2722 VN_INFO (lhs)->has_constants = true;
2725 else
2727 changed = set_ssa_val_to (lhs, lhs);
2728 vn_reference_insert (op, lhs, last_vuse);
2731 return changed;
2735 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2736 and return true if the value number of the LHS has changed as a result. */
2738 static bool
2739 visit_reference_op_store (tree lhs, tree op, gimple stmt)
2741 bool changed = false;
2742 tree result;
2743 bool resultsame = false;
2745 /* First we want to lookup using the *vuses* from the store and see
2746 if there the last store to this location with the same address
2747 had the same value.
2749 The vuses represent the memory state before the store. If the
2750 memory state, address, and value of the store is the same as the
2751 last store to this location, then this store will produce the
2752 same memory state as that store.
2754 In this case the vdef versions for this store are value numbered to those
2755 vuse versions, since they represent the same memory state after
2756 this store.
2758 Otherwise, the vdefs for the store are used when inserting into
2759 the table, since the store generates a new memory state. */
2761 result = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_NOWALK, NULL);
2763 if (result)
2765 if (TREE_CODE (result) == SSA_NAME)
2766 result = SSA_VAL (result);
2767 if (TREE_CODE (op) == SSA_NAME)
2768 op = SSA_VAL (op);
2769 resultsame = expressions_equal_p (result, op);
2772 if (!result || !resultsame)
2774 tree vdef;
2776 if (dump_file && (dump_flags & TDF_DETAILS))
2778 fprintf (dump_file, "No store match\n");
2779 fprintf (dump_file, "Value numbering store ");
2780 print_generic_expr (dump_file, lhs, 0);
2781 fprintf (dump_file, " to ");
2782 print_generic_expr (dump_file, op, 0);
2783 fprintf (dump_file, "\n");
2785 /* Have to set value numbers before insert, since insert is
2786 going to valueize the references in-place. */
2787 if ((vdef = gimple_vdef (stmt)))
2789 VN_INFO (vdef)->use_processed = true;
2790 changed |= set_ssa_val_to (vdef, vdef);
2793 /* Do not insert structure copies into the tables. */
2794 if (is_gimple_min_invariant (op)
2795 || is_gimple_reg (op))
2796 vn_reference_insert (lhs, op, vdef);
2798 else
2800 /* We had a match, so value number the vdef to have the value
2801 number of the vuse it came from. */
2802 tree def, use;
2804 if (dump_file && (dump_flags & TDF_DETAILS))
2805 fprintf (dump_file, "Store matched earlier value,"
2806 "value numbering store vdefs to matching vuses.\n");
2808 def = gimple_vdef (stmt);
2809 use = gimple_vuse (stmt);
2811 VN_INFO (def)->use_processed = true;
2812 changed |= set_ssa_val_to (def, SSA_VAL (use));
2815 return changed;
2818 /* Visit and value number PHI, return true if the value number
2819 changed. */
2821 static bool
2822 visit_phi (gimple phi)
2824 bool changed = false;
2825 tree result;
2826 tree sameval = VN_TOP;
2827 bool allsame = true;
2828 unsigned i;
2830 /* TODO: We could check for this in init_sccvn, and replace this
2831 with a gcc_assert. */
2832 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
2833 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2835 /* See if all non-TOP arguments have the same value. TOP is
2836 equivalent to everything, so we can ignore it. */
2837 for (i = 0; i < gimple_phi_num_args (phi); i++)
2839 tree def = PHI_ARG_DEF (phi, i);
2841 if (TREE_CODE (def) == SSA_NAME)
2842 def = SSA_VAL (def);
2843 if (def == VN_TOP)
2844 continue;
2845 if (sameval == VN_TOP)
2847 sameval = def;
2849 else
2851 if (!expressions_equal_p (def, sameval))
2853 allsame = false;
2854 break;
2859 /* If all value numbered to the same value, the phi node has that
2860 value. */
2861 if (allsame)
2863 if (is_gimple_min_invariant (sameval))
2865 VN_INFO (PHI_RESULT (phi))->has_constants = true;
2866 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2868 else
2870 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2871 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2874 if (TREE_CODE (sameval) == SSA_NAME)
2875 return visit_copy (PHI_RESULT (phi), sameval);
2877 return set_ssa_val_to (PHI_RESULT (phi), sameval);
2880 /* Otherwise, see if it is equivalent to a phi node in this block. */
2881 result = vn_phi_lookup (phi);
2882 if (result)
2884 if (TREE_CODE (result) == SSA_NAME)
2885 changed = visit_copy (PHI_RESULT (phi), result);
2886 else
2887 changed = set_ssa_val_to (PHI_RESULT (phi), result);
2889 else
2891 vn_phi_insert (phi, PHI_RESULT (phi));
2892 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2893 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
2894 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2897 return changed;
2900 /* Return true if EXPR contains constants. */
2902 static bool
2903 expr_has_constants (tree expr)
2905 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2907 case tcc_unary:
2908 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
2910 case tcc_binary:
2911 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
2912 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
2913 /* Constants inside reference ops are rarely interesting, but
2914 it can take a lot of looking to find them. */
2915 case tcc_reference:
2916 case tcc_declaration:
2917 return false;
2918 default:
2919 return is_gimple_min_invariant (expr);
2921 return false;
2924 /* Return true if STMT contains constants. */
2926 static bool
2927 stmt_has_constants (gimple stmt)
2929 if (gimple_code (stmt) != GIMPLE_ASSIGN)
2930 return false;
2932 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2934 case GIMPLE_UNARY_RHS:
2935 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2937 case GIMPLE_BINARY_RHS:
2938 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2939 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
2940 case GIMPLE_TERNARY_RHS:
2941 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2942 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt))
2943 || is_gimple_min_invariant (gimple_assign_rhs3 (stmt)));
2944 case GIMPLE_SINGLE_RHS:
2945 /* Constants inside reference ops are rarely interesting, but
2946 it can take a lot of looking to find them. */
2947 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2948 default:
2949 gcc_unreachable ();
2951 return false;
2954 /* Replace SSA_NAMES in expr with their value numbers, and return the
2955 result.
2956 This is performed in place. */
2958 static tree
2959 valueize_expr (tree expr)
2961 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2963 case tcc_binary:
2964 TREE_OPERAND (expr, 1) = vn_valueize (TREE_OPERAND (expr, 1));
2965 /* Fallthru. */
2966 case tcc_unary:
2967 TREE_OPERAND (expr, 0) = vn_valueize (TREE_OPERAND (expr, 0));
2968 break;
2969 default:;
2971 return expr;
2974 /* Simplify the binary expression RHS, and return the result if
2975 simplified. */
2977 static tree
2978 simplify_binary_expression (gimple stmt)
2980 tree result = NULL_TREE;
2981 tree op0 = gimple_assign_rhs1 (stmt);
2982 tree op1 = gimple_assign_rhs2 (stmt);
2983 enum tree_code code = gimple_assign_rhs_code (stmt);
2985 /* This will not catch every single case we could combine, but will
2986 catch those with constants. The goal here is to simultaneously
2987 combine constants between expressions, but avoid infinite
2988 expansion of expressions during simplification. */
2989 if (TREE_CODE (op0) == SSA_NAME)
2991 if (VN_INFO (op0)->has_constants
2992 || TREE_CODE_CLASS (code) == tcc_comparison
2993 || code == COMPLEX_EXPR)
2994 op0 = valueize_expr (vn_get_expr_for (op0));
2995 else
2996 op0 = vn_valueize (op0);
2999 if (TREE_CODE (op1) == SSA_NAME)
3001 if (VN_INFO (op1)->has_constants
3002 || code == COMPLEX_EXPR)
3003 op1 = valueize_expr (vn_get_expr_for (op1));
3004 else
3005 op1 = vn_valueize (op1);
3008 /* Pointer plus constant can be represented as invariant address.
3009 Do so to allow further propatation, see also tree forwprop. */
3010 if (code == POINTER_PLUS_EXPR
3011 && host_integerp (op1, 1)
3012 && TREE_CODE (op0) == ADDR_EXPR
3013 && is_gimple_min_invariant (op0))
3014 return build_invariant_address (TREE_TYPE (op0),
3015 TREE_OPERAND (op0, 0),
3016 TREE_INT_CST_LOW (op1));
3018 /* Avoid folding if nothing changed. */
3019 if (op0 == gimple_assign_rhs1 (stmt)
3020 && op1 == gimple_assign_rhs2 (stmt))
3021 return NULL_TREE;
3023 fold_defer_overflow_warnings ();
3025 result = fold_binary (code, gimple_expr_type (stmt), op0, op1);
3026 if (result)
3027 STRIP_USELESS_TYPE_CONVERSION (result);
3029 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
3030 stmt, 0);
3032 /* Make sure result is not a complex expression consisting
3033 of operators of operators (IE (a + b) + (a + c))
3034 Otherwise, we will end up with unbounded expressions if
3035 fold does anything at all. */
3036 if (result && valid_gimple_rhs_p (result))
3037 return result;
3039 return NULL_TREE;
3042 /* Simplify the unary expression RHS, and return the result if
3043 simplified. */
3045 static tree
3046 simplify_unary_expression (gimple stmt)
3048 tree result = NULL_TREE;
3049 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
3050 enum tree_code code = gimple_assign_rhs_code (stmt);
3052 /* We handle some tcc_reference codes here that are all
3053 GIMPLE_ASSIGN_SINGLE codes. */
3054 if (code == REALPART_EXPR
3055 || code == IMAGPART_EXPR
3056 || code == VIEW_CONVERT_EXPR
3057 || code == BIT_FIELD_REF)
3058 op0 = TREE_OPERAND (op0, 0);
3060 if (TREE_CODE (op0) != SSA_NAME)
3061 return NULL_TREE;
3063 orig_op0 = op0;
3064 if (VN_INFO (op0)->has_constants)
3065 op0 = valueize_expr (vn_get_expr_for (op0));
3066 else if (CONVERT_EXPR_CODE_P (code)
3067 || code == REALPART_EXPR
3068 || code == IMAGPART_EXPR
3069 || code == VIEW_CONVERT_EXPR
3070 || code == BIT_FIELD_REF)
3072 /* We want to do tree-combining on conversion-like expressions.
3073 Make sure we feed only SSA_NAMEs or constants to fold though. */
3074 tree tem = valueize_expr (vn_get_expr_for (op0));
3075 if (UNARY_CLASS_P (tem)
3076 || BINARY_CLASS_P (tem)
3077 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
3078 || TREE_CODE (tem) == SSA_NAME
3079 || TREE_CODE (tem) == CONSTRUCTOR
3080 || is_gimple_min_invariant (tem))
3081 op0 = tem;
3084 /* Avoid folding if nothing changed, but remember the expression. */
3085 if (op0 == orig_op0)
3086 return NULL_TREE;
3088 if (code == BIT_FIELD_REF)
3090 tree rhs = gimple_assign_rhs1 (stmt);
3091 result = fold_ternary (BIT_FIELD_REF, TREE_TYPE (rhs),
3092 op0, TREE_OPERAND (rhs, 1), TREE_OPERAND (rhs, 2));
3094 else
3095 result = fold_unary_ignore_overflow (code, gimple_expr_type (stmt), op0);
3096 if (result)
3098 STRIP_USELESS_TYPE_CONVERSION (result);
3099 if (valid_gimple_rhs_p (result))
3100 return result;
3103 return NULL_TREE;
3106 /* Try to simplify RHS using equivalences and constant folding. */
3108 static tree
3109 try_to_simplify (gimple stmt)
3111 enum tree_code code = gimple_assign_rhs_code (stmt);
3112 tree tem;
3114 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3115 in this case, there is no point in doing extra work. */
3116 if (code == SSA_NAME)
3117 return NULL_TREE;
3119 /* First try constant folding based on our current lattice. */
3120 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize);
3121 if (tem
3122 && (TREE_CODE (tem) == SSA_NAME
3123 || is_gimple_min_invariant (tem)))
3124 return tem;
3126 /* If that didn't work try combining multiple statements. */
3127 switch (TREE_CODE_CLASS (code))
3129 case tcc_reference:
3130 /* Fallthrough for some unary codes that can operate on registers. */
3131 if (!(code == REALPART_EXPR
3132 || code == IMAGPART_EXPR
3133 || code == VIEW_CONVERT_EXPR
3134 || code == BIT_FIELD_REF))
3135 break;
3136 /* We could do a little more with unary ops, if they expand
3137 into binary ops, but it's debatable whether it is worth it. */
3138 case tcc_unary:
3139 return simplify_unary_expression (stmt);
3141 case tcc_comparison:
3142 case tcc_binary:
3143 return simplify_binary_expression (stmt);
3145 default:
3146 break;
3149 return NULL_TREE;
3152 /* Visit and value number USE, return true if the value number
3153 changed. */
3155 static bool
3156 visit_use (tree use)
3158 bool changed = false;
3159 gimple stmt = SSA_NAME_DEF_STMT (use);
3161 VN_INFO (use)->use_processed = true;
3163 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3164 if (dump_file && (dump_flags & TDF_DETAILS)
3165 && !SSA_NAME_IS_DEFAULT_DEF (use))
3167 fprintf (dump_file, "Value numbering ");
3168 print_generic_expr (dump_file, use, 0);
3169 fprintf (dump_file, " stmt = ");
3170 print_gimple_stmt (dump_file, stmt, 0, 0);
3173 /* Handle uninitialized uses. */
3174 if (SSA_NAME_IS_DEFAULT_DEF (use))
3175 changed = set_ssa_val_to (use, use);
3176 else
3178 if (gimple_code (stmt) == GIMPLE_PHI)
3179 changed = visit_phi (stmt);
3180 else if (!gimple_has_lhs (stmt)
3181 || gimple_has_volatile_ops (stmt)
3182 || stmt_could_throw_p (stmt))
3183 changed = defs_to_varying (stmt);
3184 else if (is_gimple_assign (stmt))
3186 enum tree_code code = gimple_assign_rhs_code (stmt);
3187 tree lhs = gimple_assign_lhs (stmt);
3188 tree rhs1 = gimple_assign_rhs1 (stmt);
3189 tree simplified;
3191 /* Shortcut for copies. Simplifying copies is pointless,
3192 since we copy the expression and value they represent. */
3193 if (code == SSA_NAME
3194 && TREE_CODE (lhs) == SSA_NAME)
3196 changed = visit_copy (lhs, rhs1);
3197 goto done;
3199 simplified = try_to_simplify (stmt);
3200 if (simplified)
3202 if (dump_file && (dump_flags & TDF_DETAILS))
3204 fprintf (dump_file, "RHS ");
3205 print_gimple_expr (dump_file, stmt, 0, 0);
3206 fprintf (dump_file, " simplified to ");
3207 print_generic_expr (dump_file, simplified, 0);
3208 if (TREE_CODE (lhs) == SSA_NAME)
3209 fprintf (dump_file, " has constants %d\n",
3210 expr_has_constants (simplified));
3211 else
3212 fprintf (dump_file, "\n");
3215 /* Setting value numbers to constants will occasionally
3216 screw up phi congruence because constants are not
3217 uniquely associated with a single ssa name that can be
3218 looked up. */
3219 if (simplified
3220 && is_gimple_min_invariant (simplified)
3221 && TREE_CODE (lhs) == SSA_NAME)
3223 VN_INFO (lhs)->expr = simplified;
3224 VN_INFO (lhs)->has_constants = true;
3225 changed = set_ssa_val_to (lhs, simplified);
3226 goto done;
3228 else if (simplified
3229 && TREE_CODE (simplified) == SSA_NAME
3230 && TREE_CODE (lhs) == SSA_NAME)
3232 changed = visit_copy (lhs, simplified);
3233 goto done;
3235 else if (simplified)
3237 if (TREE_CODE (lhs) == SSA_NAME)
3239 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
3240 /* We have to unshare the expression or else
3241 valuizing may change the IL stream. */
3242 VN_INFO (lhs)->expr = unshare_expr (simplified);
3245 else if (stmt_has_constants (stmt)
3246 && TREE_CODE (lhs) == SSA_NAME)
3247 VN_INFO (lhs)->has_constants = true;
3248 else if (TREE_CODE (lhs) == SSA_NAME)
3250 /* We reset expr and constantness here because we may
3251 have been value numbering optimistically, and
3252 iterating. They may become non-constant in this case,
3253 even if they were optimistically constant. */
3255 VN_INFO (lhs)->has_constants = false;
3256 VN_INFO (lhs)->expr = NULL_TREE;
3259 if ((TREE_CODE (lhs) == SSA_NAME
3260 /* We can substitute SSA_NAMEs that are live over
3261 abnormal edges with their constant value. */
3262 && !(gimple_assign_copy_p (stmt)
3263 && is_gimple_min_invariant (rhs1))
3264 && !(simplified
3265 && is_gimple_min_invariant (simplified))
3266 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3267 /* Stores or copies from SSA_NAMEs that are live over
3268 abnormal edges are a problem. */
3269 || (code == SSA_NAME
3270 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3271 changed = defs_to_varying (stmt);
3272 else if (REFERENCE_CLASS_P (lhs)
3273 || DECL_P (lhs))
3274 changed = visit_reference_op_store (lhs, rhs1, stmt);
3275 else if (TREE_CODE (lhs) == SSA_NAME)
3277 if ((gimple_assign_copy_p (stmt)
3278 && is_gimple_min_invariant (rhs1))
3279 || (simplified
3280 && is_gimple_min_invariant (simplified)))
3282 VN_INFO (lhs)->has_constants = true;
3283 if (simplified)
3284 changed = set_ssa_val_to (lhs, simplified);
3285 else
3286 changed = set_ssa_val_to (lhs, rhs1);
3288 else
3290 switch (get_gimple_rhs_class (code))
3292 case GIMPLE_UNARY_RHS:
3293 case GIMPLE_BINARY_RHS:
3294 case GIMPLE_TERNARY_RHS:
3295 changed = visit_nary_op (lhs, stmt);
3296 break;
3297 case GIMPLE_SINGLE_RHS:
3298 switch (TREE_CODE_CLASS (code))
3300 case tcc_reference:
3301 /* VOP-less references can go through unary case. */
3302 if ((code == REALPART_EXPR
3303 || code == IMAGPART_EXPR
3304 || code == VIEW_CONVERT_EXPR
3305 || code == BIT_FIELD_REF)
3306 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
3308 changed = visit_nary_op (lhs, stmt);
3309 break;
3311 /* Fallthrough. */
3312 case tcc_declaration:
3313 changed = visit_reference_op_load (lhs, rhs1, stmt);
3314 break;
3315 default:
3316 if (code == ADDR_EXPR)
3318 changed = visit_nary_op (lhs, stmt);
3319 break;
3321 else if (code == CONSTRUCTOR)
3323 changed = visit_nary_op (lhs, stmt);
3324 break;
3326 changed = defs_to_varying (stmt);
3328 break;
3329 default:
3330 changed = defs_to_varying (stmt);
3331 break;
3335 else
3336 changed = defs_to_varying (stmt);
3338 else if (is_gimple_call (stmt))
3340 tree lhs = gimple_call_lhs (stmt);
3342 /* ??? We could try to simplify calls. */
3344 if (stmt_has_constants (stmt)
3345 && TREE_CODE (lhs) == SSA_NAME)
3346 VN_INFO (lhs)->has_constants = true;
3347 else if (TREE_CODE (lhs) == SSA_NAME)
3349 /* We reset expr and constantness here because we may
3350 have been value numbering optimistically, and
3351 iterating. They may become non-constant in this case,
3352 even if they were optimistically constant. */
3353 VN_INFO (lhs)->has_constants = false;
3354 VN_INFO (lhs)->expr = NULL_TREE;
3357 if (TREE_CODE (lhs) == SSA_NAME
3358 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3359 changed = defs_to_varying (stmt);
3360 /* ??? We should handle stores from calls. */
3361 else if (TREE_CODE (lhs) == SSA_NAME)
3363 if (!gimple_call_internal_p (stmt)
3364 && gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
3365 changed = visit_reference_op_call (lhs, stmt);
3366 else
3367 changed = defs_to_varying (stmt);
3369 else
3370 changed = defs_to_varying (stmt);
3373 done:
3374 return changed;
3377 /* Compare two operands by reverse postorder index */
3379 static int
3380 compare_ops (const void *pa, const void *pb)
3382 const tree opa = *((const tree *)pa);
3383 const tree opb = *((const tree *)pb);
3384 gimple opstmta = SSA_NAME_DEF_STMT (opa);
3385 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
3386 basic_block bba;
3387 basic_block bbb;
3389 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3390 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3391 else if (gimple_nop_p (opstmta))
3392 return -1;
3393 else if (gimple_nop_p (opstmtb))
3394 return 1;
3396 bba = gimple_bb (opstmta);
3397 bbb = gimple_bb (opstmtb);
3399 if (!bba && !bbb)
3400 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3401 else if (!bba)
3402 return -1;
3403 else if (!bbb)
3404 return 1;
3406 if (bba == bbb)
3408 if (gimple_code (opstmta) == GIMPLE_PHI
3409 && gimple_code (opstmtb) == GIMPLE_PHI)
3410 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3411 else if (gimple_code (opstmta) == GIMPLE_PHI)
3412 return -1;
3413 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3414 return 1;
3415 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3416 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3417 else
3418 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3420 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3423 /* Sort an array containing members of a strongly connected component
3424 SCC so that the members are ordered by RPO number.
3425 This means that when the sort is complete, iterating through the
3426 array will give you the members in RPO order. */
3428 static void
3429 sort_scc (VEC (tree, heap) *scc)
3431 VEC_qsort (tree, scc, compare_ops);
3434 /* Insert the no longer used nary ONARY to the hash INFO. */
3436 static void
3437 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3439 size_t size = sizeof_vn_nary_op (onary->length);
3440 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3441 &info->nary_obstack);
3442 memcpy (nary, onary, size);
3443 vn_nary_op_insert_into (nary, info->nary, false);
3446 /* Insert the no longer used phi OPHI to the hash INFO. */
3448 static void
3449 copy_phi (vn_phi_t ophi, vn_tables_t info)
3451 vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
3452 void **slot;
3453 memcpy (phi, ophi, sizeof (*phi));
3454 ophi->phiargs = NULL;
3455 slot = htab_find_slot_with_hash (info->phis, phi, phi->hashcode, INSERT);
3456 gcc_assert (!*slot);
3457 *slot = phi;
3460 /* Insert the no longer used reference OREF to the hash INFO. */
3462 static void
3463 copy_reference (vn_reference_t oref, vn_tables_t info)
3465 vn_reference_t ref;
3466 void **slot;
3467 ref = (vn_reference_t) pool_alloc (info->references_pool);
3468 memcpy (ref, oref, sizeof (*ref));
3469 oref->operands = NULL;
3470 slot = htab_find_slot_with_hash (info->references, ref, ref->hashcode,
3471 INSERT);
3472 if (*slot)
3473 free_reference (*slot);
3474 *slot = ref;
3477 /* Process a strongly connected component in the SSA graph. */
3479 static void
3480 process_scc (VEC (tree, heap) *scc)
3482 tree var;
3483 unsigned int i;
3484 unsigned int iterations = 0;
3485 bool changed = true;
3486 htab_iterator hi;
3487 vn_nary_op_t nary;
3488 vn_phi_t phi;
3489 vn_reference_t ref;
3491 /* If the SCC has a single member, just visit it. */
3492 if (VEC_length (tree, scc) == 1)
3494 tree use = VEC_index (tree, scc, 0);
3495 if (VN_INFO (use)->use_processed)
3496 return;
3497 /* We need to make sure it doesn't form a cycle itself, which can
3498 happen for self-referential PHI nodes. In that case we would
3499 end up inserting an expression with VN_TOP operands into the
3500 valid table which makes us derive bogus equivalences later.
3501 The cheapest way to check this is to assume it for all PHI nodes. */
3502 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3503 /* Fallthru to iteration. */ ;
3504 else
3506 visit_use (use);
3507 return;
3511 /* Iterate over the SCC with the optimistic table until it stops
3512 changing. */
3513 current_info = optimistic_info;
3514 while (changed)
3516 changed = false;
3517 iterations++;
3518 if (dump_file && (dump_flags & TDF_DETAILS))
3519 fprintf (dump_file, "Starting iteration %d\n", iterations);
3520 /* As we are value-numbering optimistically we have to
3521 clear the expression tables and the simplified expressions
3522 in each iteration until we converge. */
3523 htab_empty (optimistic_info->nary);
3524 htab_empty (optimistic_info->phis);
3525 htab_empty (optimistic_info->references);
3526 obstack_free (&optimistic_info->nary_obstack, NULL);
3527 gcc_obstack_init (&optimistic_info->nary_obstack);
3528 empty_alloc_pool (optimistic_info->phis_pool);
3529 empty_alloc_pool (optimistic_info->references_pool);
3530 FOR_EACH_VEC_ELT (tree, scc, i, var)
3531 VN_INFO (var)->expr = NULL_TREE;
3532 FOR_EACH_VEC_ELT (tree, scc, i, var)
3533 changed |= visit_use (var);
3536 statistics_histogram_event (cfun, "SCC iterations", iterations);
3538 /* Finally, copy the contents of the no longer used optimistic
3539 table to the valid table. */
3540 FOR_EACH_HTAB_ELEMENT (optimistic_info->nary, nary, vn_nary_op_t, hi)
3541 copy_nary (nary, valid_info);
3542 FOR_EACH_HTAB_ELEMENT (optimistic_info->phis, phi, vn_phi_t, hi)
3543 copy_phi (phi, valid_info);
3544 FOR_EACH_HTAB_ELEMENT (optimistic_info->references, ref, vn_reference_t, hi)
3545 copy_reference (ref, valid_info);
3547 current_info = valid_info;
3550 DEF_VEC_O(ssa_op_iter);
3551 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
3553 /* Pop the components of the found SCC for NAME off the SCC stack
3554 and process them. Returns true if all went well, false if
3555 we run into resource limits. */
3557 static bool
3558 extract_and_process_scc_for_name (tree name)
3560 VEC (tree, heap) *scc = NULL;
3561 tree x;
3563 /* Found an SCC, pop the components off the SCC stack and
3564 process them. */
3567 x = VEC_pop (tree, sccstack);
3569 VN_INFO (x)->on_sccstack = false;
3570 VEC_safe_push (tree, heap, scc, x);
3571 } while (x != name);
3573 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
3574 if (VEC_length (tree, scc)
3575 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
3577 if (dump_file)
3578 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
3579 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
3580 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
3581 return false;
3584 if (VEC_length (tree, scc) > 1)
3585 sort_scc (scc);
3587 if (dump_file && (dump_flags & TDF_DETAILS))
3588 print_scc (dump_file, scc);
3590 process_scc (scc);
3592 VEC_free (tree, heap, scc);
3594 return true;
3597 /* Depth first search on NAME to discover and process SCC's in the SSA
3598 graph.
3599 Execution of this algorithm relies on the fact that the SCC's are
3600 popped off the stack in topological order.
3601 Returns true if successful, false if we stopped processing SCC's due
3602 to resource constraints. */
3604 static bool
3605 DFS (tree name)
3607 VEC(ssa_op_iter, heap) *itervec = NULL;
3608 VEC(tree, heap) *namevec = NULL;
3609 use_operand_p usep = NULL;
3610 gimple defstmt;
3611 tree use;
3612 ssa_op_iter iter;
3614 start_over:
3615 /* SCC info */
3616 VN_INFO (name)->dfsnum = next_dfs_num++;
3617 VN_INFO (name)->visited = true;
3618 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
3620 VEC_safe_push (tree, heap, sccstack, name);
3621 VN_INFO (name)->on_sccstack = true;
3622 defstmt = SSA_NAME_DEF_STMT (name);
3624 /* Recursively DFS on our operands, looking for SCC's. */
3625 if (!gimple_nop_p (defstmt))
3627 /* Push a new iterator. */
3628 if (gimple_code (defstmt) == GIMPLE_PHI)
3629 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
3630 else
3631 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
3633 else
3634 clear_and_done_ssa_iter (&iter);
3636 while (1)
3638 /* If we are done processing uses of a name, go up the stack
3639 of iterators and process SCCs as we found them. */
3640 if (op_iter_done (&iter))
3642 /* See if we found an SCC. */
3643 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
3644 if (!extract_and_process_scc_for_name (name))
3646 VEC_free (tree, heap, namevec);
3647 VEC_free (ssa_op_iter, heap, itervec);
3648 return false;
3651 /* Check if we are done. */
3652 if (VEC_empty (tree, namevec))
3654 VEC_free (tree, heap, namevec);
3655 VEC_free (ssa_op_iter, heap, itervec);
3656 return true;
3659 /* Restore the last use walker and continue walking there. */
3660 use = name;
3661 name = VEC_pop (tree, namevec);
3662 memcpy (&iter, VEC_last (ssa_op_iter, itervec),
3663 sizeof (ssa_op_iter));
3664 VEC_pop (ssa_op_iter, itervec);
3665 goto continue_walking;
3668 use = USE_FROM_PTR (usep);
3670 /* Since we handle phi nodes, we will sometimes get
3671 invariants in the use expression. */
3672 if (TREE_CODE (use) == SSA_NAME)
3674 if (! (VN_INFO (use)->visited))
3676 /* Recurse by pushing the current use walking state on
3677 the stack and starting over. */
3678 VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
3679 VEC_safe_push(tree, heap, namevec, name);
3680 name = use;
3681 goto start_over;
3683 continue_walking:
3684 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3685 VN_INFO (use)->low);
3687 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3688 && VN_INFO (use)->on_sccstack)
3690 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3691 VN_INFO (name)->low);
3695 usep = op_iter_next_use (&iter);
3699 /* Allocate a value number table. */
3701 static void
3702 allocate_vn_table (vn_tables_t table)
3704 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
3705 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
3706 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
3707 free_reference);
3709 gcc_obstack_init (&table->nary_obstack);
3710 table->phis_pool = create_alloc_pool ("VN phis",
3711 sizeof (struct vn_phi_s),
3712 30);
3713 table->references_pool = create_alloc_pool ("VN references",
3714 sizeof (struct vn_reference_s),
3715 30);
3718 /* Free a value number table. */
3720 static void
3721 free_vn_table (vn_tables_t table)
3723 htab_delete (table->phis);
3724 htab_delete (table->nary);
3725 htab_delete (table->references);
3726 obstack_free (&table->nary_obstack, NULL);
3727 free_alloc_pool (table->phis_pool);
3728 free_alloc_pool (table->references_pool);
3731 static void
3732 init_scc_vn (void)
3734 size_t i;
3735 int j;
3736 int *rpo_numbers_temp;
3738 calculate_dominance_info (CDI_DOMINATORS);
3739 sccstack = NULL;
3740 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
3741 free);
3743 constant_value_ids = BITMAP_ALLOC (NULL);
3745 next_dfs_num = 1;
3746 next_value_id = 1;
3748 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
3749 /* VEC_alloc doesn't actually grow it to the right size, it just
3750 preallocates the space to do so. */
3751 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
3752 gcc_obstack_init (&vn_ssa_aux_obstack);
3754 shared_lookup_phiargs = NULL;
3755 shared_lookup_references = NULL;
3756 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3757 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3758 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
3760 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3761 the i'th block in RPO order is bb. We want to map bb's to RPO
3762 numbers, so we need to rearrange this array. */
3763 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
3764 rpo_numbers[rpo_numbers_temp[j]] = j;
3766 XDELETE (rpo_numbers_temp);
3768 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
3770 /* Create the VN_INFO structures, and initialize value numbers to
3771 TOP. */
3772 for (i = 0; i < num_ssa_names; i++)
3774 tree name = ssa_name (i);
3775 if (name)
3777 VN_INFO_GET (name)->valnum = VN_TOP;
3778 VN_INFO (name)->expr = NULL_TREE;
3779 VN_INFO (name)->value_id = 0;
3783 renumber_gimple_stmt_uids ();
3785 /* Create the valid and optimistic value numbering tables. */
3786 valid_info = XCNEW (struct vn_tables_s);
3787 allocate_vn_table (valid_info);
3788 optimistic_info = XCNEW (struct vn_tables_s);
3789 allocate_vn_table (optimistic_info);
3792 void
3793 free_scc_vn (void)
3795 size_t i;
3797 htab_delete (constant_to_value_id);
3798 BITMAP_FREE (constant_value_ids);
3799 VEC_free (tree, heap, shared_lookup_phiargs);
3800 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
3801 XDELETEVEC (rpo_numbers);
3803 for (i = 0; i < num_ssa_names; i++)
3805 tree name = ssa_name (i);
3806 if (name
3807 && VN_INFO (name)->needs_insertion)
3808 release_ssa_name (name);
3810 obstack_free (&vn_ssa_aux_obstack, NULL);
3811 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
3813 VEC_free (tree, heap, sccstack);
3814 free_vn_table (valid_info);
3815 XDELETE (valid_info);
3816 free_vn_table (optimistic_info);
3817 XDELETE (optimistic_info);
3820 /* Set *ID if we computed something useful in RESULT. */
3822 static void
3823 set_value_id_for_result (tree result, unsigned int *id)
3825 if (result)
3827 if (TREE_CODE (result) == SSA_NAME)
3828 *id = VN_INFO (result)->value_id;
3829 else if (is_gimple_min_invariant (result))
3830 *id = get_or_alloc_constant_value_id (result);
3834 /* Set the value ids in the valid hash tables. */
3836 static void
3837 set_hashtable_value_ids (void)
3839 htab_iterator hi;
3840 vn_nary_op_t vno;
3841 vn_reference_t vr;
3842 vn_phi_t vp;
3844 /* Now set the value ids of the things we had put in the hash
3845 table. */
3847 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
3848 vno, vn_nary_op_t, hi)
3849 set_value_id_for_result (vno->result, &vno->value_id);
3851 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
3852 vp, vn_phi_t, hi)
3853 set_value_id_for_result (vp->result, &vp->value_id);
3855 FOR_EACH_HTAB_ELEMENT (valid_info->references,
3856 vr, vn_reference_t, hi)
3857 set_value_id_for_result (vr->result, &vr->value_id);
3860 /* Do SCCVN. Returns true if it finished, false if we bailed out
3861 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
3862 how we use the alias oracle walking during the VN process. */
3864 bool
3865 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
3867 size_t i;
3868 tree param;
3869 bool changed = true;
3871 default_vn_walk_kind = default_vn_walk_kind_;
3873 init_scc_vn ();
3874 current_info = valid_info;
3876 for (param = DECL_ARGUMENTS (current_function_decl);
3877 param;
3878 param = DECL_CHAIN (param))
3880 if (gimple_default_def (cfun, param) != NULL)
3882 tree def = gimple_default_def (cfun, param);
3883 VN_INFO (def)->valnum = def;
3887 for (i = 1; i < num_ssa_names; ++i)
3889 tree name = ssa_name (i);
3890 if (name
3891 && VN_INFO (name)->visited == false
3892 && !has_zero_uses (name))
3893 if (!DFS (name))
3895 free_scc_vn ();
3896 return false;
3900 /* Initialize the value ids. */
3902 for (i = 1; i < num_ssa_names; ++i)
3904 tree name = ssa_name (i);
3905 vn_ssa_aux_t info;
3906 if (!name)
3907 continue;
3908 info = VN_INFO (name);
3909 if (info->valnum == name
3910 || info->valnum == VN_TOP)
3911 info->value_id = get_next_value_id ();
3912 else if (is_gimple_min_invariant (info->valnum))
3913 info->value_id = get_or_alloc_constant_value_id (info->valnum);
3916 /* Propagate until they stop changing. */
3917 while (changed)
3919 changed = false;
3920 for (i = 1; i < num_ssa_names; ++i)
3922 tree name = ssa_name (i);
3923 vn_ssa_aux_t info;
3924 if (!name)
3925 continue;
3926 info = VN_INFO (name);
3927 if (TREE_CODE (info->valnum) == SSA_NAME
3928 && info->valnum != name
3929 && info->value_id != VN_INFO (info->valnum)->value_id)
3931 changed = true;
3932 info->value_id = VN_INFO (info->valnum)->value_id;
3937 set_hashtable_value_ids ();
3939 if (dump_file && (dump_flags & TDF_DETAILS))
3941 fprintf (dump_file, "Value numbers:\n");
3942 for (i = 0; i < num_ssa_names; i++)
3944 tree name = ssa_name (i);
3945 if (name
3946 && VN_INFO (name)->visited
3947 && SSA_VAL (name) != name)
3949 print_generic_expr (dump_file, name, 0);
3950 fprintf (dump_file, " = ");
3951 print_generic_expr (dump_file, SSA_VAL (name), 0);
3952 fprintf (dump_file, "\n");
3957 return true;
3960 /* Return the maximum value id we have ever seen. */
3962 unsigned int
3963 get_max_value_id (void)
3965 return next_value_id;
3968 /* Return the next unique value id. */
3970 unsigned int
3971 get_next_value_id (void)
3973 return next_value_id++;
3977 /* Compare two expressions E1 and E2 and return true if they are equal. */
3979 bool
3980 expressions_equal_p (tree e1, tree e2)
3982 /* The obvious case. */
3983 if (e1 == e2)
3984 return true;
3986 /* If only one of them is null, they cannot be equal. */
3987 if (!e1 || !e2)
3988 return false;
3990 /* Now perform the actual comparison. */
3991 if (TREE_CODE (e1) == TREE_CODE (e2)
3992 && operand_equal_p (e1, e2, OEP_PURE_SAME))
3993 return true;
3995 return false;
3999 /* Return true if the nary operation NARY may trap. This is a copy
4000 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4002 bool
4003 vn_nary_may_trap (vn_nary_op_t nary)
4005 tree type;
4006 tree rhs2 = NULL_TREE;
4007 bool honor_nans = false;
4008 bool honor_snans = false;
4009 bool fp_operation = false;
4010 bool honor_trapv = false;
4011 bool handled, ret;
4012 unsigned i;
4014 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4015 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4016 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4018 type = nary->type;
4019 fp_operation = FLOAT_TYPE_P (type);
4020 if (fp_operation)
4022 honor_nans = flag_trapping_math && !flag_finite_math_only;
4023 honor_snans = flag_signaling_nans != 0;
4025 else if (INTEGRAL_TYPE_P (type)
4026 && TYPE_OVERFLOW_TRAPS (type))
4027 honor_trapv = true;
4029 if (nary->length >= 2)
4030 rhs2 = nary->op[1];
4031 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4032 honor_trapv,
4033 honor_nans, honor_snans, rhs2,
4034 &handled);
4035 if (handled
4036 && ret)
4037 return true;
4039 for (i = 0; i < nary->length; ++i)
4040 if (tree_could_trap_p (nary->op[i]))
4041 return true;
4043 return false;