2010-07-27 Paolo Carlini <paolo.carlini@oracle.com>
[official-gcc/alias-decl.git] / gcc / tree-ssa-sccvn.c
blobcfd22dbedba1837e99608a26cedda88a14a46622
1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "basic-block.h"
28 #include "tree-pretty-print.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "gimple.h"
33 #include "tree-dump.h"
34 #include "timevar.h"
35 #include "fibheap.h"
36 #include "hashtab.h"
37 #include "tree-iterator.h"
38 #include "alloc-pool.h"
39 #include "tree-pass.h"
40 #include "flags.h"
41 #include "bitmap.h"
42 #include "langhooks.h"
43 #include "cfgloop.h"
44 #include "params.h"
45 #include "tree-ssa-propagate.h"
46 #include "tree-ssa-sccvn.h"
48 /* This algorithm is based on the SCC algorithm presented by Keith
49 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
50 (http://citeseer.ist.psu.edu/41805.html). In
51 straight line code, it is equivalent to a regular hash based value
52 numbering that is performed in reverse postorder.
54 For code with cycles, there are two alternatives, both of which
55 require keeping the hashtables separate from the actual list of
56 value numbers for SSA names.
58 1. Iterate value numbering in an RPO walk of the blocks, removing
59 all the entries from the hashtable after each iteration (but
60 keeping the SSA name->value number mapping between iterations).
61 Iterate until it does not change.
63 2. Perform value numbering as part of an SCC walk on the SSA graph,
64 iterating only the cycles in the SSA graph until they do not change
65 (using a separate, optimistic hashtable for value numbering the SCC
66 operands).
68 The second is not just faster in practice (because most SSA graph
69 cycles do not involve all the variables in the graph), it also has
70 some nice properties.
72 One of these nice properties is that when we pop an SCC off the
73 stack, we are guaranteed to have processed all the operands coming from
74 *outside of that SCC*, so we do not need to do anything special to
75 ensure they have value numbers.
77 Another nice property is that the SCC walk is done as part of a DFS
78 of the SSA graph, which makes it easy to perform combining and
79 simplifying operations at the same time.
81 The code below is deliberately written in a way that makes it easy
82 to separate the SCC walk from the other work it does.
84 In order to propagate constants through the code, we track which
85 expressions contain constants, and use those while folding. In
86 theory, we could also track expressions whose value numbers are
87 replaced, in case we end up folding based on expression
88 identities.
90 In order to value number memory, we assign value numbers to vuses.
91 This enables us to note that, for example, stores to the same
92 address of the same value from the same starting memory states are
93 equivalent.
94 TODO:
96 1. We can iterate only the changing portions of the SCC's, but
97 I have not seen an SCC big enough for this to be a win.
98 2. If you differentiate between phi nodes for loops and phi nodes
99 for if-then-else, you can properly consider phi nodes in different
100 blocks for equivalence.
101 3. We could value number vuses in more cases, particularly, whole
102 structure copies.
105 /* The set of hashtables and alloc_pool's for their items. */
107 typedef struct vn_tables_s
109 htab_t nary;
110 htab_t phis;
111 htab_t references;
112 struct obstack nary_obstack;
113 alloc_pool phis_pool;
114 alloc_pool references_pool;
115 } *vn_tables_t;
117 static htab_t constant_to_value_id;
118 static bitmap constant_value_ids;
121 /* Valid hashtables storing information we have proven to be
122 correct. */
124 static vn_tables_t valid_info;
126 /* Optimistic hashtables storing information we are making assumptions about
127 during iterations. */
129 static vn_tables_t optimistic_info;
131 /* Pointer to the set of hashtables that is currently being used.
132 Should always point to either the optimistic_info, or the
133 valid_info. */
135 static vn_tables_t current_info;
138 /* Reverse post order index for each basic block. */
140 static int *rpo_numbers;
142 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
144 /* This represents the top of the VN lattice, which is the universal
145 value. */
147 tree VN_TOP;
149 /* Unique counter for our value ids. */
151 static unsigned int next_value_id;
153 /* Next DFS number and the stack for strongly connected component
154 detection. */
156 static unsigned int next_dfs_num;
157 static VEC (tree, heap) *sccstack;
160 DEF_VEC_P(vn_ssa_aux_t);
161 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
163 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
164 are allocated on an obstack for locality reasons, and to free them
165 without looping over the VEC. */
167 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
168 static struct obstack vn_ssa_aux_obstack;
170 /* Return the value numbering information for a given SSA name. */
172 vn_ssa_aux_t
173 VN_INFO (tree name)
175 vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
176 SSA_NAME_VERSION (name));
177 gcc_checking_assert (res);
178 return res;
181 /* Set the value numbering info for a given SSA name to a given
182 value. */
184 static inline void
185 VN_INFO_SET (tree name, vn_ssa_aux_t value)
187 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
188 SSA_NAME_VERSION (name), value);
191 /* Initialize the value numbering info for a given SSA name.
192 This should be called just once for every SSA name. */
194 vn_ssa_aux_t
195 VN_INFO_GET (tree name)
197 vn_ssa_aux_t newinfo;
199 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
200 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
201 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
202 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
203 SSA_NAME_VERSION (name) + 1);
204 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
205 SSA_NAME_VERSION (name), newinfo);
206 return newinfo;
210 /* Get the representative expression for the SSA_NAME NAME. Returns
211 the representative SSA_NAME if there is no expression associated with it. */
213 tree
214 vn_get_expr_for (tree name)
216 vn_ssa_aux_t vn = VN_INFO (name);
217 gimple def_stmt;
218 tree expr = NULL_TREE;
220 if (vn->valnum == VN_TOP)
221 return name;
223 /* If the value-number is a constant it is the representative
224 expression. */
225 if (TREE_CODE (vn->valnum) != SSA_NAME)
226 return vn->valnum;
228 /* Get to the information of the value of this SSA_NAME. */
229 vn = VN_INFO (vn->valnum);
231 /* If the value-number is a constant it is the representative
232 expression. */
233 if (TREE_CODE (vn->valnum) != SSA_NAME)
234 return vn->valnum;
236 /* Else if we have an expression, return it. */
237 if (vn->expr != NULL_TREE)
238 return vn->expr;
240 /* Otherwise use the defining statement to build the expression. */
241 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
243 /* If the value number is a default-definition or a PHI result
244 use it directly. */
245 if (gimple_nop_p (def_stmt)
246 || gimple_code (def_stmt) == GIMPLE_PHI)
247 return vn->valnum;
249 if (!is_gimple_assign (def_stmt))
250 return vn->valnum;
252 /* FIXME tuples. This is incomplete and likely will miss some
253 simplifications. */
254 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)))
256 case tcc_reference:
257 if ((gimple_assign_rhs_code (def_stmt) == VIEW_CONVERT_EXPR
258 || gimple_assign_rhs_code (def_stmt) == REALPART_EXPR
259 || gimple_assign_rhs_code (def_stmt) == IMAGPART_EXPR)
260 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
261 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
262 gimple_expr_type (def_stmt),
263 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
264 break;
266 case tcc_unary:
267 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
268 gimple_expr_type (def_stmt),
269 gimple_assign_rhs1 (def_stmt));
270 break;
272 case tcc_binary:
273 expr = fold_build2 (gimple_assign_rhs_code (def_stmt),
274 gimple_expr_type (def_stmt),
275 gimple_assign_rhs1 (def_stmt),
276 gimple_assign_rhs2 (def_stmt));
277 break;
279 default:;
281 if (expr == NULL_TREE)
282 return vn->valnum;
284 /* Cache the expression. */
285 vn->expr = expr;
287 return expr;
291 /* Free a phi operation structure VP. */
293 static void
294 free_phi (void *vp)
296 vn_phi_t phi = (vn_phi_t) vp;
297 VEC_free (tree, heap, phi->phiargs);
300 /* Free a reference operation structure VP. */
302 static void
303 free_reference (void *vp)
305 vn_reference_t vr = (vn_reference_t) vp;
306 VEC_free (vn_reference_op_s, heap, vr->operands);
309 /* Hash table equality function for vn_constant_t. */
311 static int
312 vn_constant_eq (const void *p1, const void *p2)
314 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
315 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
317 if (vc1->hashcode != vc2->hashcode)
318 return false;
320 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
323 /* Hash table hash function for vn_constant_t. */
325 static hashval_t
326 vn_constant_hash (const void *p1)
328 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
329 return vc1->hashcode;
332 /* Lookup a value id for CONSTANT and return it. If it does not
333 exist returns 0. */
335 unsigned int
336 get_constant_value_id (tree constant)
338 void **slot;
339 struct vn_constant_s vc;
341 vc.hashcode = vn_hash_constant_with_type (constant);
342 vc.constant = constant;
343 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
344 vc.hashcode, NO_INSERT);
345 if (slot)
346 return ((vn_constant_t)*slot)->value_id;
347 return 0;
350 /* Lookup a value id for CONSTANT, and if it does not exist, create a
351 new one and return it. If it does exist, return it. */
353 unsigned int
354 get_or_alloc_constant_value_id (tree constant)
356 void **slot;
357 struct vn_constant_s vc;
358 vn_constant_t vcp;
360 vc.hashcode = vn_hash_constant_with_type (constant);
361 vc.constant = constant;
362 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
363 vc.hashcode, INSERT);
364 if (*slot)
365 return ((vn_constant_t)*slot)->value_id;
367 vcp = XNEW (struct vn_constant_s);
368 vcp->hashcode = vc.hashcode;
369 vcp->constant = constant;
370 vcp->value_id = get_next_value_id ();
371 *slot = (void *) vcp;
372 bitmap_set_bit (constant_value_ids, vcp->value_id);
373 return vcp->value_id;
376 /* Return true if V is a value id for a constant. */
378 bool
379 value_id_constant_p (unsigned int v)
381 return bitmap_bit_p (constant_value_ids, v);
384 /* Compare two reference operands P1 and P2 for equality. Return true if
385 they are equal, and false otherwise. */
387 static int
388 vn_reference_op_eq (const void *p1, const void *p2)
390 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
391 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
393 return vro1->opcode == vro2->opcode
394 && types_compatible_p (vro1->type, vro2->type)
395 && expressions_equal_p (vro1->op0, vro2->op0)
396 && expressions_equal_p (vro1->op1, vro2->op1)
397 && expressions_equal_p (vro1->op2, vro2->op2);
400 /* Compute the hash for a reference operand VRO1. */
402 static hashval_t
403 vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result)
405 result = iterative_hash_hashval_t (vro1->opcode, result);
406 if (vro1->op0)
407 result = iterative_hash_expr (vro1->op0, result);
408 if (vro1->op1)
409 result = iterative_hash_expr (vro1->op1, result);
410 if (vro1->op2)
411 result = iterative_hash_expr (vro1->op2, result);
412 return result;
415 /* Return the hashcode for a given reference operation P1. */
417 static hashval_t
418 vn_reference_hash (const void *p1)
420 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
421 return vr1->hashcode;
424 /* Compute a hash for the reference operation VR1 and return it. */
426 hashval_t
427 vn_reference_compute_hash (const vn_reference_t vr1)
429 hashval_t result = 0;
430 int i;
431 vn_reference_op_t vro;
432 HOST_WIDE_INT off = -1;
433 bool deref = false;
435 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
437 if (vro->opcode == MEM_REF)
438 deref = true;
439 else if (vro->opcode != ADDR_EXPR)
440 deref = false;
441 if (vro->off != -1)
443 if (off == -1)
444 off = 0;
445 off += vro->off;
447 else
449 if (off != -1
450 && off != 0)
451 result = iterative_hash_hashval_t (off, result);
452 off = -1;
453 if (deref
454 && vro->opcode == ADDR_EXPR)
456 if (vro->op0)
458 tree op = TREE_OPERAND (vro->op0, 0);
459 result = iterative_hash_hashval_t (TREE_CODE (op), result);
460 result = iterative_hash_expr (op, result);
463 else
464 result = vn_reference_op_compute_hash (vro, result);
467 if (vr1->vuse)
468 result += SSA_NAME_VERSION (vr1->vuse);
470 return result;
473 /* Return true if reference operations P1 and P2 are equivalent. This
474 means they have the same set of operands and vuses. */
477 vn_reference_eq (const void *p1, const void *p2)
479 unsigned i, j;
481 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
482 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
483 if (vr1->hashcode != vr2->hashcode)
484 return false;
486 /* Early out if this is not a hash collision. */
487 if (vr1->hashcode != vr2->hashcode)
488 return false;
490 /* The VOP needs to be the same. */
491 if (vr1->vuse != vr2->vuse)
492 return false;
494 /* If the operands are the same we are done. */
495 if (vr1->operands == vr2->operands)
496 return true;
498 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
499 return false;
501 if (INTEGRAL_TYPE_P (vr1->type)
502 && INTEGRAL_TYPE_P (vr2->type))
504 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
505 return false;
507 else if (INTEGRAL_TYPE_P (vr1->type)
508 && (TYPE_PRECISION (vr1->type)
509 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
510 return false;
511 else if (INTEGRAL_TYPE_P (vr2->type)
512 && (TYPE_PRECISION (vr2->type)
513 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
514 return false;
516 i = 0;
517 j = 0;
520 HOST_WIDE_INT off1 = 0, off2 = 0;
521 vn_reference_op_t vro1, vro2;
522 vn_reference_op_s tem1, tem2;
523 bool deref1 = false, deref2 = false;
524 for (; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro1); i++)
526 if (vro1->opcode == MEM_REF)
527 deref1 = true;
528 if (vro1->off == -1)
529 break;
530 off1 += vro1->off;
532 for (; VEC_iterate (vn_reference_op_s, vr2->operands, j, vro2); j++)
534 if (vro2->opcode == MEM_REF)
535 deref2 = true;
536 if (vro2->off == -1)
537 break;
538 off2 += vro2->off;
540 if (off1 != off2)
541 return false;
542 if (deref1 && vro1->opcode == ADDR_EXPR)
544 memset (&tem1, 0, sizeof (tem1));
545 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
546 tem1.type = TREE_TYPE (tem1.op0);
547 tem1.opcode = TREE_CODE (tem1.op0);
548 vro1 = &tem1;
550 if (deref2 && vro2->opcode == ADDR_EXPR)
552 memset (&tem2, 0, sizeof (tem2));
553 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
554 tem2.type = TREE_TYPE (tem2.op0);
555 tem2.opcode = TREE_CODE (tem2.op0);
556 vro2 = &tem2;
558 if (!vn_reference_op_eq (vro1, vro2))
559 return false;
560 ++j;
561 ++i;
563 while (VEC_length (vn_reference_op_s, vr1->operands) != i
564 || VEC_length (vn_reference_op_s, vr2->operands) != j);
566 return true;
569 /* Copy the operations present in load/store REF into RESULT, a vector of
570 vn_reference_op_s's. */
572 void
573 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
575 if (TREE_CODE (ref) == TARGET_MEM_REF)
577 vn_reference_op_s temp;
578 tree base;
580 base = TMR_SYMBOL (ref) ? TMR_SYMBOL (ref) : TMR_BASE (ref);
581 if (!base)
582 base = null_pointer_node;
584 memset (&temp, 0, sizeof (temp));
585 /* We do not care for spurious type qualifications. */
586 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
587 temp.opcode = TREE_CODE (ref);
588 temp.op0 = TMR_INDEX (ref);
589 temp.op1 = TMR_STEP (ref);
590 temp.op2 = TMR_OFFSET (ref);
591 temp.off = -1;
592 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
594 memset (&temp, 0, sizeof (temp));
595 temp.type = NULL_TREE;
596 temp.opcode = TREE_CODE (base);
597 temp.op0 = base;
598 temp.op1 = TMR_ORIGINAL (ref);
599 temp.off = -1;
600 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
601 return;
604 /* For non-calls, store the information that makes up the address. */
606 while (ref)
608 vn_reference_op_s temp;
610 memset (&temp, 0, sizeof (temp));
611 /* We do not care for spurious type qualifications. */
612 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
613 temp.opcode = TREE_CODE (ref);
614 temp.off = -1;
616 switch (temp.opcode)
618 case MISALIGNED_INDIRECT_REF:
619 temp.op0 = TREE_OPERAND (ref, 1);
620 break;
621 case MEM_REF:
622 /* The base address gets its own vn_reference_op_s structure. */
623 temp.op0 = TREE_OPERAND (ref, 1);
624 if (host_integerp (TREE_OPERAND (ref, 1), 0))
625 temp.off = TREE_INT_CST_LOW (TREE_OPERAND (ref, 1));
626 break;
627 case BIT_FIELD_REF:
628 /* Record bits and position. */
629 temp.op0 = TREE_OPERAND (ref, 1);
630 temp.op1 = TREE_OPERAND (ref, 2);
631 break;
632 case COMPONENT_REF:
633 /* The field decl is enough to unambiguously specify the field,
634 a matching type is not necessary and a mismatching type
635 is always a spurious difference. */
636 temp.type = NULL_TREE;
637 temp.op0 = TREE_OPERAND (ref, 1);
638 temp.op1 = TREE_OPERAND (ref, 2);
640 tree this_offset = component_ref_field_offset (ref);
641 if (this_offset
642 && TREE_CODE (this_offset) == INTEGER_CST)
644 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
645 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
647 double_int off
648 = double_int_add (tree_to_double_int (this_offset),
649 double_int_sdiv
650 (tree_to_double_int (bit_offset),
651 uhwi_to_double_int (BITS_PER_UNIT),
652 TRUNC_DIV_EXPR));
653 if (double_int_fits_in_shwi_p (off))
654 temp.off = off.low;
658 break;
659 case ARRAY_RANGE_REF:
660 case ARRAY_REF:
661 /* Record index as operand. */
662 temp.op0 = TREE_OPERAND (ref, 1);
663 /* Always record lower bounds and element size. */
664 temp.op1 = array_ref_low_bound (ref);
665 temp.op2 = array_ref_element_size (ref);
666 if (TREE_CODE (temp.op0) == INTEGER_CST
667 && TREE_CODE (temp.op1) == INTEGER_CST
668 && TREE_CODE (temp.op2) == INTEGER_CST)
670 double_int off = tree_to_double_int (temp.op0);
671 off = double_int_add (off,
672 double_int_neg
673 (tree_to_double_int (temp.op1)));
674 off = double_int_mul (off, tree_to_double_int (temp.op2));
675 if (double_int_fits_in_shwi_p (off))
676 temp.off = off.low;
678 break;
679 case STRING_CST:
680 case INTEGER_CST:
681 case COMPLEX_CST:
682 case VECTOR_CST:
683 case REAL_CST:
684 case CONSTRUCTOR:
685 case VAR_DECL:
686 case PARM_DECL:
687 case CONST_DECL:
688 case RESULT_DECL:
689 case SSA_NAME:
690 temp.op0 = ref;
691 break;
692 case ADDR_EXPR:
693 if (is_gimple_min_invariant (ref))
695 temp.op0 = ref;
696 break;
698 /* Fallthrough. */
699 /* These are only interesting for their operands, their
700 existence, and their type. They will never be the last
701 ref in the chain of references (IE they require an
702 operand), so we don't have to put anything
703 for op* as it will be handled by the iteration */
704 case REALPART_EXPR:
705 case VIEW_CONVERT_EXPR:
706 temp.off = 0;
707 break;
708 case IMAGPART_EXPR:
709 /* This is only interesting for its constant offset. */
710 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
711 break;
712 default:
713 gcc_unreachable ();
715 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
717 if (REFERENCE_CLASS_P (ref)
718 || (TREE_CODE (ref) == ADDR_EXPR
719 && !is_gimple_min_invariant (ref)))
720 ref = TREE_OPERAND (ref, 0);
721 else
722 ref = NULL_TREE;
726 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
727 operands in *OPS, the reference alias set SET and the reference type TYPE.
728 Return true if something useful was produced. */
730 bool
731 ao_ref_init_from_vn_reference (ao_ref *ref,
732 alias_set_type set, tree type,
733 VEC (vn_reference_op_s, heap) *ops)
735 vn_reference_op_t op;
736 unsigned i;
737 tree base = NULL_TREE;
738 tree *op0_p = &base;
739 HOST_WIDE_INT offset = 0;
740 HOST_WIDE_INT max_size;
741 HOST_WIDE_INT size = -1;
742 tree size_tree = NULL_TREE;
743 alias_set_type base_alias_set = -1;
745 /* First get the final access size from just the outermost expression. */
746 op = VEC_index (vn_reference_op_s, ops, 0);
747 if (op->opcode == COMPONENT_REF)
748 size_tree = DECL_SIZE (op->op0);
749 else if (op->opcode == BIT_FIELD_REF)
750 size_tree = op->op0;
751 else
753 enum machine_mode mode = TYPE_MODE (type);
754 if (mode == BLKmode)
755 size_tree = TYPE_SIZE (type);
756 else
757 size = GET_MODE_BITSIZE (mode);
759 if (size_tree != NULL_TREE)
761 if (!host_integerp (size_tree, 1))
762 size = -1;
763 else
764 size = TREE_INT_CST_LOW (size_tree);
767 /* Initially, maxsize is the same as the accessed element size.
768 In the following it will only grow (or become -1). */
769 max_size = size;
771 /* Compute cumulative bit-offset for nested component-refs and array-refs,
772 and find the ultimate containing object. */
773 for (i = 0; VEC_iterate (vn_reference_op_s, ops, i, op); ++i)
775 switch (op->opcode)
777 /* These may be in the reference ops, but we cannot do anything
778 sensible with them here. */
779 case ADDR_EXPR:
780 /* Apart from ADDR_EXPR arguments to MEM_REF. */
781 if (base != NULL_TREE
782 && TREE_CODE (base) == MEM_REF
783 && op->op0
784 && DECL_P (TREE_OPERAND (op->op0, 0)))
786 vn_reference_op_t pop = VEC_index (vn_reference_op_s, ops, i-1);
787 base = TREE_OPERAND (op->op0, 0);
788 if (pop->off == -1)
790 max_size = -1;
791 offset = 0;
793 else
794 offset += pop->off * BITS_PER_UNIT;
795 op0_p = NULL;
796 break;
798 /* Fallthru. */
799 case CALL_EXPR:
800 return false;
802 /* Record the base objects. */
803 case MISALIGNED_INDIRECT_REF:
804 *op0_p = build2 (MISALIGNED_INDIRECT_REF, op->type,
805 NULL_TREE, op->op0);
806 op0_p = &TREE_OPERAND (*op0_p, 0);
807 break;
809 case MEM_REF:
810 base_alias_set = get_deref_alias_set (op->op0);
811 *op0_p = build2 (MEM_REF, op->type,
812 NULL_TREE, op->op0);
813 op0_p = &TREE_OPERAND (*op0_p, 0);
814 break;
816 case VAR_DECL:
817 case PARM_DECL:
818 case RESULT_DECL:
819 case SSA_NAME:
820 *op0_p = op->op0;
821 op0_p = NULL;
822 break;
824 /* And now the usual component-reference style ops. */
825 case BIT_FIELD_REF:
826 offset += tree_low_cst (op->op1, 0);
827 break;
829 case COMPONENT_REF:
831 tree field = op->op0;
832 /* We do not have a complete COMPONENT_REF tree here so we
833 cannot use component_ref_field_offset. Do the interesting
834 parts manually. */
836 if (op->op1
837 || !host_integerp (DECL_FIELD_OFFSET (field), 1))
838 max_size = -1;
839 else
841 offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
842 * BITS_PER_UNIT);
843 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
845 break;
848 case ARRAY_RANGE_REF:
849 case ARRAY_REF:
850 /* We recorded the lower bound and the element size. */
851 if (!host_integerp (op->op0, 0)
852 || !host_integerp (op->op1, 0)
853 || !host_integerp (op->op2, 0))
854 max_size = -1;
855 else
857 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
858 hindex -= TREE_INT_CST_LOW (op->op1);
859 hindex *= TREE_INT_CST_LOW (op->op2);
860 hindex *= BITS_PER_UNIT;
861 offset += hindex;
863 break;
865 case REALPART_EXPR:
866 break;
868 case IMAGPART_EXPR:
869 offset += size;
870 break;
872 case VIEW_CONVERT_EXPR:
873 break;
875 case STRING_CST:
876 case INTEGER_CST:
877 case COMPLEX_CST:
878 case VECTOR_CST:
879 case REAL_CST:
880 case CONSTRUCTOR:
881 case CONST_DECL:
882 return false;
884 default:
885 return false;
889 if (base == NULL_TREE)
890 return false;
892 ref->ref = NULL_TREE;
893 ref->base = base;
894 ref->offset = offset;
895 ref->size = size;
896 ref->max_size = max_size;
897 ref->ref_alias_set = set;
898 if (base_alias_set != -1)
899 ref->base_alias_set = base_alias_set;
900 else
901 ref->base_alias_set = get_alias_set (base);
903 return true;
906 /* Copy the operations present in load/store/call REF into RESULT, a vector of
907 vn_reference_op_s's. */
909 void
910 copy_reference_ops_from_call (gimple call,
911 VEC(vn_reference_op_s, heap) **result)
913 vn_reference_op_s temp;
914 unsigned i;
916 /* Copy the type, opcode, function being called and static chain. */
917 memset (&temp, 0, sizeof (temp));
918 temp.type = gimple_call_return_type (call);
919 temp.opcode = CALL_EXPR;
920 temp.op0 = gimple_call_fn (call);
921 temp.op1 = gimple_call_chain (call);
922 temp.off = -1;
923 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
925 /* Copy the call arguments. As they can be references as well,
926 just chain them together. */
927 for (i = 0; i < gimple_call_num_args (call); ++i)
929 tree callarg = gimple_call_arg (call, i);
930 copy_reference_ops_from_ref (callarg, result);
934 /* Create a vector of vn_reference_op_s structures from REF, a
935 REFERENCE_CLASS_P tree. The vector is not shared. */
937 static VEC(vn_reference_op_s, heap) *
938 create_reference_ops_from_ref (tree ref)
940 VEC (vn_reference_op_s, heap) *result = NULL;
942 copy_reference_ops_from_ref (ref, &result);
943 return result;
946 /* Create a vector of vn_reference_op_s structures from CALL, a
947 call statement. The vector is not shared. */
949 static VEC(vn_reference_op_s, heap) *
950 create_reference_ops_from_call (gimple call)
952 VEC (vn_reference_op_s, heap) *result = NULL;
954 copy_reference_ops_from_call (call, &result);
955 return result;
958 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
959 *I_P to point to the last element of the replacement. */
960 void
961 vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
962 unsigned int *i_p)
964 unsigned int i = *i_p;
965 vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
966 vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
967 tree addr_base;
968 HOST_WIDE_INT addr_offset;
970 /* The only thing we have to do is from &OBJ.foo.bar add the offset
971 from .foo.bar to the preceeding MEM_REF offset and replace the
972 address with &OBJ. */
973 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
974 &addr_offset);
975 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
976 if (addr_base != op->op0)
978 double_int off = tree_to_double_int (mem_op->op0);
979 off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
980 off = double_int_add (off, shwi_to_double_int (addr_offset));
981 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
982 op->op0 = build_fold_addr_expr (addr_base);
983 if (host_integerp (mem_op->op0, 0))
984 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
985 else
986 mem_op->off = -1;
990 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
991 *I_P to point to the last element of the replacement. */
992 static void
993 vn_reference_maybe_forwprop_address (VEC (vn_reference_op_s, heap) **ops,
994 unsigned int *i_p)
996 unsigned int i = *i_p;
997 vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
998 vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
999 gimple def_stmt;
1000 enum tree_code code;
1001 double_int off;
1003 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1004 if (!is_gimple_assign (def_stmt))
1005 return;
1007 code = gimple_assign_rhs_code (def_stmt);
1008 if (code != ADDR_EXPR
1009 && code != POINTER_PLUS_EXPR)
1010 return;
1012 off = tree_to_double_int (mem_op->op0);
1013 off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1015 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1016 from .foo.bar to the preceeding MEM_REF offset and replace the
1017 address with &OBJ. */
1018 if (code == ADDR_EXPR)
1020 tree addr, addr_base;
1021 HOST_WIDE_INT addr_offset;
1023 addr = gimple_assign_rhs1 (def_stmt);
1024 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1025 &addr_offset);
1026 if (!addr_base
1027 || TREE_CODE (addr_base) != MEM_REF)
1028 return;
1030 off = double_int_add (off, shwi_to_double_int (addr_offset));
1031 off = double_int_add (off, mem_ref_offset (addr_base));
1032 op->op0 = TREE_OPERAND (addr_base, 0);
1034 else
1036 tree ptr, ptroff;
1037 ptr = gimple_assign_rhs1 (def_stmt);
1038 ptroff = gimple_assign_rhs2 (def_stmt);
1039 if (TREE_CODE (ptr) != SSA_NAME
1040 || TREE_CODE (ptroff) != INTEGER_CST)
1041 return;
1043 off = double_int_add (off, tree_to_double_int (ptroff));
1044 op->op0 = ptr;
1047 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1048 if (host_integerp (mem_op->op0, 0))
1049 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1050 else
1051 mem_op->off = -1;
1052 if (TREE_CODE (op->op0) == SSA_NAME)
1053 op->op0 = SSA_VAL (op->op0);
1054 if (TREE_CODE (op->op0) != SSA_NAME)
1055 op->opcode = TREE_CODE (op->op0);
1057 /* And recurse. */
1058 if (TREE_CODE (op->op0) == SSA_NAME)
1059 vn_reference_maybe_forwprop_address (ops, i_p);
1060 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1061 vn_reference_fold_indirect (ops, i_p);
1064 /* Optimize the reference REF to a constant if possible or return
1065 NULL_TREE if not. */
1067 tree
1068 fully_constant_vn_reference_p (vn_reference_t ref)
1070 VEC (vn_reference_op_s, heap) *operands = ref->operands;
1071 vn_reference_op_t op;
1073 /* Try to simplify the translated expression if it is
1074 a call to a builtin function with at most two arguments. */
1075 op = VEC_index (vn_reference_op_s, operands, 0);
1076 if (op->opcode == CALL_EXPR
1077 && TREE_CODE (op->op0) == ADDR_EXPR
1078 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1079 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1080 && VEC_length (vn_reference_op_s, operands) >= 2
1081 && VEC_length (vn_reference_op_s, operands) <= 3)
1083 vn_reference_op_t arg0, arg1 = NULL;
1084 bool anyconst = false;
1085 arg0 = VEC_index (vn_reference_op_s, operands, 1);
1086 if (VEC_length (vn_reference_op_s, operands) > 2)
1087 arg1 = VEC_index (vn_reference_op_s, operands, 2);
1088 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1089 || (arg0->opcode == ADDR_EXPR
1090 && is_gimple_min_invariant (arg0->op0)))
1091 anyconst = true;
1092 if (arg1
1093 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1094 || (arg1->opcode == ADDR_EXPR
1095 && is_gimple_min_invariant (arg1->op0))))
1096 anyconst = true;
1097 if (anyconst)
1099 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1100 arg1 ? 2 : 1,
1101 arg0->op0,
1102 arg1 ? arg1->op0 : NULL);
1103 if (folded
1104 && TREE_CODE (folded) == NOP_EXPR)
1105 folded = TREE_OPERAND (folded, 0);
1106 if (folded
1107 && is_gimple_min_invariant (folded))
1108 return folded;
1112 /* Simplify reads from constant strings. */
1113 else if (op->opcode == ARRAY_REF
1114 && TREE_CODE (op->op0) == INTEGER_CST
1115 && integer_zerop (op->op1)
1116 && VEC_length (vn_reference_op_s, operands) == 2)
1118 vn_reference_op_t arg0;
1119 arg0 = VEC_index (vn_reference_op_s, operands, 1);
1120 if (arg0->opcode == STRING_CST
1121 && (TYPE_MODE (op->type)
1122 == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
1123 && GET_MODE_CLASS (TYPE_MODE (op->type)) == MODE_INT
1124 && GET_MODE_SIZE (TYPE_MODE (op->type)) == 1
1125 && compare_tree_int (op->op0, TREE_STRING_LENGTH (arg0->op0)) < 0)
1126 return build_int_cst_type (op->type,
1127 (TREE_STRING_POINTER (arg0->op0)
1128 [TREE_INT_CST_LOW (op->op0)]));
1131 return NULL_TREE;
1134 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1135 structures into their value numbers. This is done in-place, and
1136 the vector passed in is returned. */
1138 static VEC (vn_reference_op_s, heap) *
1139 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
1141 vn_reference_op_t vro;
1142 unsigned int i;
1144 for (i = 0; VEC_iterate (vn_reference_op_s, orig, i, vro); i++)
1146 if (vro->opcode == SSA_NAME
1147 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1149 vro->op0 = SSA_VAL (vro->op0);
1150 /* If it transforms from an SSA_NAME to a constant, update
1151 the opcode. */
1152 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1153 vro->opcode = TREE_CODE (vro->op0);
1155 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1156 vro->op1 = SSA_VAL (vro->op1);
1157 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1158 vro->op2 = SSA_VAL (vro->op2);
1159 /* If it transforms from an SSA_NAME to an address, fold with
1160 a preceding indirect reference. */
1161 if (i > 0
1162 && vro->op0
1163 && TREE_CODE (vro->op0) == ADDR_EXPR
1164 && VEC_index (vn_reference_op_s,
1165 orig, i - 1)->opcode == MEM_REF)
1166 vn_reference_fold_indirect (&orig, &i);
1167 else if (i > 0
1168 && vro->opcode == SSA_NAME
1169 && VEC_index (vn_reference_op_s,
1170 orig, i - 1)->opcode == MEM_REF)
1171 vn_reference_maybe_forwprop_address (&orig, &i);
1172 /* If it transforms a non-constant ARRAY_REF into a constant
1173 one, adjust the constant offset. */
1174 else if (vro->opcode == ARRAY_REF
1175 && vro->off == -1
1176 && TREE_CODE (vro->op0) == INTEGER_CST
1177 && TREE_CODE (vro->op1) == INTEGER_CST
1178 && TREE_CODE (vro->op2) == INTEGER_CST)
1180 double_int off = tree_to_double_int (vro->op0);
1181 off = double_int_add (off,
1182 double_int_neg
1183 (tree_to_double_int (vro->op1)));
1184 off = double_int_mul (off, tree_to_double_int (vro->op2));
1185 if (double_int_fits_in_shwi_p (off))
1186 vro->off = off.low;
1190 return orig;
1193 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
1195 /* Create a vector of vn_reference_op_s structures from REF, a
1196 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1197 this function. */
1199 static VEC(vn_reference_op_s, heap) *
1200 valueize_shared_reference_ops_from_ref (tree ref)
1202 if (!ref)
1203 return NULL;
1204 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1205 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1206 shared_lookup_references = valueize_refs (shared_lookup_references);
1207 return shared_lookup_references;
1210 /* Create a vector of vn_reference_op_s structures from CALL, a
1211 call statement. The vector is shared among all callers of
1212 this function. */
1214 static VEC(vn_reference_op_s, heap) *
1215 valueize_shared_reference_ops_from_call (gimple call)
1217 if (!call)
1218 return NULL;
1219 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1220 copy_reference_ops_from_call (call, &shared_lookup_references);
1221 shared_lookup_references = valueize_refs (shared_lookup_references);
1222 return shared_lookup_references;
1225 /* Lookup a SCCVN reference operation VR in the current hash table.
1226 Returns the resulting value number if it exists in the hash table,
1227 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1228 vn_reference_t stored in the hashtable if something is found. */
1230 static tree
1231 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1233 void **slot;
1234 hashval_t hash;
1236 hash = vr->hashcode;
1237 slot = htab_find_slot_with_hash (current_info->references, vr,
1238 hash, NO_INSERT);
1239 if (!slot && current_info == optimistic_info)
1240 slot = htab_find_slot_with_hash (valid_info->references, vr,
1241 hash, NO_INSERT);
1242 if (slot)
1244 if (vnresult)
1245 *vnresult = (vn_reference_t)*slot;
1246 return ((vn_reference_t)*slot)->result;
1249 return NULL_TREE;
1252 static tree *last_vuse_ptr;
1254 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1255 with the current VUSE and performs the expression lookup. */
1257 static void *
1258 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
1260 vn_reference_t vr = (vn_reference_t)vr_;
1261 void **slot;
1262 hashval_t hash;
1264 if (last_vuse_ptr)
1265 *last_vuse_ptr = vuse;
1267 /* Fixup vuse and hash. */
1268 if (vr->vuse)
1269 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1270 vr->vuse = SSA_VAL (vuse);
1271 if (vr->vuse)
1272 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1274 hash = vr->hashcode;
1275 slot = htab_find_slot_with_hash (current_info->references, vr,
1276 hash, NO_INSERT);
1277 if (!slot && current_info == optimistic_info)
1278 slot = htab_find_slot_with_hash (valid_info->references, vr,
1279 hash, NO_INSERT);
1280 if (slot)
1281 return *slot;
1283 return NULL;
1286 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1287 from the statement defining VUSE and if not successful tries to
1288 translate *REFP and VR_ through an aggregate copy at the defintion
1289 of VUSE. */
1291 static void *
1292 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
1294 vn_reference_t vr = (vn_reference_t)vr_;
1295 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1296 tree fndecl;
1297 tree base;
1298 HOST_WIDE_INT offset, maxsize;
1300 /* First try to disambiguate after value-replacing in the definitions LHS. */
1301 if (is_gimple_assign (def_stmt))
1303 tree lhs = gimple_assign_lhs (def_stmt);
1304 ao_ref ref1;
1305 VEC (vn_reference_op_s, heap) *operands = NULL;
1306 bool res = true;
1307 copy_reference_ops_from_ref (lhs, &operands);
1308 operands = valueize_refs (operands);
1309 if (ao_ref_init_from_vn_reference (&ref1, get_alias_set (lhs),
1310 TREE_TYPE (lhs), operands))
1311 res = refs_may_alias_p_1 (ref, &ref1, true);
1312 VEC_free (vn_reference_op_s, heap, operands);
1313 if (!res)
1314 return NULL;
1317 base = ao_ref_base (ref);
1318 offset = ref->offset;
1319 maxsize = ref->max_size;
1321 /* If we cannot constrain the size of the reference we cannot
1322 test if anything kills it. */
1323 if (maxsize == -1)
1324 return (void *)-1;
1326 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1327 from that defintion.
1328 1) Memset. */
1329 if (is_gimple_reg_type (vr->type)
1330 && is_gimple_call (def_stmt)
1331 && (fndecl = gimple_call_fndecl (def_stmt))
1332 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1333 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
1334 && integer_zerop (gimple_call_arg (def_stmt, 1))
1335 && host_integerp (gimple_call_arg (def_stmt, 2), 1)
1336 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1338 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1339 tree base2;
1340 HOST_WIDE_INT offset2, size2, maxsize2;
1341 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1342 size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
1343 if ((unsigned HOST_WIDE_INT)size2 / 8
1344 == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
1345 && operand_equal_p (base, base2, 0)
1346 && offset2 <= offset
1347 && offset2 + size2 >= offset + maxsize)
1349 tree val = fold_convert (vr->type, integer_zero_node);
1350 unsigned int value_id = get_or_alloc_constant_value_id (val);
1351 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1352 VEC_copy (vn_reference_op_s,
1353 heap, vr->operands),
1354 val, value_id);
1358 /* 2) Assignment from an empty CONSTRUCTOR. */
1359 else if (is_gimple_reg_type (vr->type)
1360 && gimple_assign_single_p (def_stmt)
1361 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1362 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1364 tree base2;
1365 HOST_WIDE_INT offset2, size2, maxsize2;
1366 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1367 &offset2, &size2, &maxsize2);
1368 if (operand_equal_p (base, base2, 0)
1369 && offset2 <= offset
1370 && offset2 + size2 >= offset + maxsize)
1372 tree val = fold_convert (vr->type, integer_zero_node);
1373 unsigned int value_id = get_or_alloc_constant_value_id (val);
1374 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1375 VEC_copy (vn_reference_op_s,
1376 heap, vr->operands),
1377 val, value_id);
1381 /* For aggregate copies translate the reference through them if
1382 the copy kills ref. */
1383 else if (gimple_assign_single_p (def_stmt)
1384 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1385 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1386 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1388 tree base2;
1389 HOST_WIDE_INT offset2, size2, maxsize2;
1390 int i, j;
1391 VEC (vn_reference_op_s, heap) *lhs = NULL, *rhs = NULL;
1392 vn_reference_op_t vro;
1393 ao_ref r;
1395 /* See if the assignment kills REF. */
1396 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1397 &offset2, &size2, &maxsize2);
1398 if (!operand_equal_p (base, base2, 0)
1399 || offset2 > offset
1400 || offset2 + size2 < offset + maxsize)
1401 return (void *)-1;
1403 /* Find the common base of ref and the lhs. */
1404 copy_reference_ops_from_ref (gimple_assign_lhs (def_stmt), &lhs);
1405 i = VEC_length (vn_reference_op_s, vr->operands) - 1;
1406 j = VEC_length (vn_reference_op_s, lhs) - 1;
1407 while (j >= 0 && i >= 0
1408 && vn_reference_op_eq (VEC_index (vn_reference_op_s,
1409 vr->operands, i),
1410 VEC_index (vn_reference_op_s, lhs, j)))
1412 i--;
1413 j--;
1416 VEC_free (vn_reference_op_s, heap, lhs);
1417 /* i now points to the first additional op.
1418 ??? LHS may not be completely contained in VR, one or more
1419 VIEW_CONVERT_EXPRs could be in its way. We could at least
1420 try handling outermost VIEW_CONVERT_EXPRs. */
1421 if (j != -1)
1422 return (void *)-1;
1424 /* Now re-write REF to be based on the rhs of the assignment. */
1425 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1426 /* We need to pre-pend vr->operands[0..i] to rhs. */
1427 if (i + 1 + VEC_length (vn_reference_op_s, rhs)
1428 > VEC_length (vn_reference_op_s, vr->operands))
1430 VEC (vn_reference_op_s, heap) *old = vr->operands;
1431 VEC_safe_grow (vn_reference_op_s, heap, vr->operands,
1432 i + 1 + VEC_length (vn_reference_op_s, rhs));
1433 if (old == shared_lookup_references
1434 && vr->operands != old)
1435 shared_lookup_references = NULL;
1437 else
1438 VEC_truncate (vn_reference_op_s, vr->operands,
1439 i + 1 + VEC_length (vn_reference_op_s, rhs));
1440 for (j = 0; VEC_iterate (vn_reference_op_s, rhs, j, vro); ++j)
1441 VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, vro);
1442 VEC_free (vn_reference_op_s, heap, rhs);
1443 vr->hashcode = vn_reference_compute_hash (vr);
1445 /* Adjust *ref from the new operands. */
1446 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1447 return (void *)-1;
1448 /* This can happen with bitfields. */
1449 if (ref->size != r.size)
1450 return (void *)-1;
1451 *ref = r;
1453 /* Do not update last seen VUSE after translating. */
1454 last_vuse_ptr = NULL;
1456 /* Keep looking for the adjusted *REF / VR pair. */
1457 return NULL;
1460 /* Bail out and stop walking. */
1461 return (void *)-1;
1464 /* Lookup a reference operation by it's parts, in the current hash table.
1465 Returns the resulting value number if it exists in the hash table,
1466 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1467 vn_reference_t stored in the hashtable if something is found. */
1469 tree
1470 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
1471 VEC (vn_reference_op_s, heap) *operands,
1472 vn_reference_t *vnresult, bool maywalk)
1474 struct vn_reference_s vr1;
1475 vn_reference_t tmp;
1476 tree cst;
1478 if (!vnresult)
1479 vnresult = &tmp;
1480 *vnresult = NULL;
1482 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1483 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1484 VEC_safe_grow (vn_reference_op_s, heap, shared_lookup_references,
1485 VEC_length (vn_reference_op_s, operands));
1486 memcpy (VEC_address (vn_reference_op_s, shared_lookup_references),
1487 VEC_address (vn_reference_op_s, operands),
1488 sizeof (vn_reference_op_s)
1489 * VEC_length (vn_reference_op_s, operands));
1490 vr1.operands = operands = shared_lookup_references
1491 = valueize_refs (shared_lookup_references);
1492 vr1.type = type;
1493 vr1.set = set;
1494 vr1.hashcode = vn_reference_compute_hash (&vr1);
1495 if ((cst = fully_constant_vn_reference_p (&vr1)))
1496 return cst;
1498 vn_reference_lookup_1 (&vr1, vnresult);
1499 if (!*vnresult
1500 && maywalk
1501 && vr1.vuse)
1503 ao_ref r;
1504 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
1505 *vnresult =
1506 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1507 vn_reference_lookup_2,
1508 vn_reference_lookup_3, &vr1);
1509 if (vr1.operands != operands)
1510 VEC_free (vn_reference_op_s, heap, vr1.operands);
1513 if (*vnresult)
1514 return (*vnresult)->result;
1516 return NULL_TREE;
1519 /* Lookup OP in the current hash table, and return the resulting value
1520 number if it exists in the hash table. Return NULL_TREE if it does
1521 not exist in the hash table or if the result field of the structure
1522 was NULL.. VNRESULT will be filled in with the vn_reference_t
1523 stored in the hashtable if one exists. */
1525 tree
1526 vn_reference_lookup (tree op, tree vuse, bool maywalk,
1527 vn_reference_t *vnresult)
1529 VEC (vn_reference_op_s, heap) *operands;
1530 struct vn_reference_s vr1;
1531 tree cst;
1533 if (vnresult)
1534 *vnresult = NULL;
1536 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1537 vr1.operands = operands = valueize_shared_reference_ops_from_ref (op);
1538 vr1.type = TREE_TYPE (op);
1539 vr1.set = get_alias_set (op);
1540 vr1.hashcode = vn_reference_compute_hash (&vr1);
1541 if ((cst = fully_constant_vn_reference_p (&vr1)))
1542 return cst;
1544 if (maywalk
1545 && vr1.vuse)
1547 vn_reference_t wvnresult;
1548 ao_ref r;
1549 ao_ref_init (&r, op);
1550 wvnresult =
1551 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1552 vn_reference_lookup_2,
1553 vn_reference_lookup_3, &vr1);
1554 if (vr1.operands != operands)
1555 VEC_free (vn_reference_op_s, heap, vr1.operands);
1556 if (wvnresult)
1558 if (vnresult)
1559 *vnresult = wvnresult;
1560 return wvnresult->result;
1563 return NULL_TREE;
1566 return vn_reference_lookup_1 (&vr1, vnresult);
1570 /* Insert OP into the current hash table with a value number of
1571 RESULT, and return the resulting reference structure we created. */
1573 vn_reference_t
1574 vn_reference_insert (tree op, tree result, tree vuse)
1576 void **slot;
1577 vn_reference_t vr1;
1579 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1580 if (TREE_CODE (result) == SSA_NAME)
1581 vr1->value_id = VN_INFO (result)->value_id;
1582 else
1583 vr1->value_id = get_or_alloc_constant_value_id (result);
1584 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1585 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
1586 vr1->type = TREE_TYPE (op);
1587 vr1->set = get_alias_set (op);
1588 vr1->hashcode = vn_reference_compute_hash (vr1);
1589 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
1591 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1592 INSERT);
1594 /* Because we lookup stores using vuses, and value number failures
1595 using the vdefs (see visit_reference_op_store for how and why),
1596 it's possible that on failure we may try to insert an already
1597 inserted store. This is not wrong, there is no ssa name for a
1598 store that we could use as a differentiator anyway. Thus, unlike
1599 the other lookup functions, you cannot gcc_assert (!*slot)
1600 here. */
1602 /* But free the old slot in case of a collision. */
1603 if (*slot)
1604 free_reference (*slot);
1606 *slot = vr1;
1607 return vr1;
1610 /* Insert a reference by it's pieces into the current hash table with
1611 a value number of RESULT. Return the resulting reference
1612 structure we created. */
1614 vn_reference_t
1615 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
1616 VEC (vn_reference_op_s, heap) *operands,
1617 tree result, unsigned int value_id)
1620 void **slot;
1621 vn_reference_t vr1;
1623 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1624 vr1->value_id = value_id;
1625 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1626 vr1->operands = valueize_refs (operands);
1627 vr1->type = type;
1628 vr1->set = set;
1629 vr1->hashcode = vn_reference_compute_hash (vr1);
1630 if (result && TREE_CODE (result) == SSA_NAME)
1631 result = SSA_VAL (result);
1632 vr1->result = result;
1634 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1635 INSERT);
1637 /* At this point we should have all the things inserted that we have
1638 seen before, and we should never try inserting something that
1639 already exists. */
1640 gcc_assert (!*slot);
1641 if (*slot)
1642 free_reference (*slot);
1644 *slot = vr1;
1645 return vr1;
1648 /* Compute and return the hash value for nary operation VBO1. */
1650 hashval_t
1651 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
1653 hashval_t hash;
1654 unsigned i;
1656 for (i = 0; i < vno1->length; ++i)
1657 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
1658 vno1->op[i] = SSA_VAL (vno1->op[i]);
1660 if (vno1->length == 2
1661 && commutative_tree_code (vno1->opcode)
1662 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
1664 tree temp = vno1->op[0];
1665 vno1->op[0] = vno1->op[1];
1666 vno1->op[1] = temp;
1669 hash = iterative_hash_hashval_t (vno1->opcode, 0);
1670 for (i = 0; i < vno1->length; ++i)
1671 hash = iterative_hash_expr (vno1->op[i], hash);
1673 return hash;
1676 /* Return the computed hashcode for nary operation P1. */
1678 static hashval_t
1679 vn_nary_op_hash (const void *p1)
1681 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1682 return vno1->hashcode;
1685 /* Compare nary operations P1 and P2 and return true if they are
1686 equivalent. */
1689 vn_nary_op_eq (const void *p1, const void *p2)
1691 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1692 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
1693 unsigned i;
1695 if (vno1->hashcode != vno2->hashcode)
1696 return false;
1698 if (vno1->opcode != vno2->opcode
1699 || !types_compatible_p (vno1->type, vno2->type))
1700 return false;
1702 for (i = 0; i < vno1->length; ++i)
1703 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
1704 return false;
1706 return true;
1709 /* Lookup a n-ary operation by its pieces and return the resulting value
1710 number if it exists in the hash table. Return NULL_TREE if it does
1711 not exist in the hash table or if the result field of the operation
1712 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1713 if it exists. */
1715 tree
1716 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
1717 tree type, tree op0, tree op1, tree op2,
1718 tree op3, vn_nary_op_t *vnresult)
1720 void **slot;
1721 struct vn_nary_op_s vno1;
1722 if (vnresult)
1723 *vnresult = NULL;
1724 vno1.opcode = code;
1725 vno1.length = length;
1726 vno1.type = type;
1727 vno1.op[0] = op0;
1728 vno1.op[1] = op1;
1729 vno1.op[2] = op2;
1730 vno1.op[3] = op3;
1731 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1732 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1733 NO_INSERT);
1734 if (!slot && current_info == optimistic_info)
1735 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1736 NO_INSERT);
1737 if (!slot)
1738 return NULL_TREE;
1739 if (vnresult)
1740 *vnresult = (vn_nary_op_t)*slot;
1741 return ((vn_nary_op_t)*slot)->result;
1744 /* Lookup OP in the current hash table, and return the resulting value
1745 number if it exists in the hash table. Return NULL_TREE if it does
1746 not exist in the hash table or if the result field of the operation
1747 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1748 if it exists. */
1750 tree
1751 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
1753 void **slot;
1754 struct vn_nary_op_s vno1;
1755 unsigned i;
1757 if (vnresult)
1758 *vnresult = NULL;
1759 vno1.opcode = TREE_CODE (op);
1760 vno1.length = TREE_CODE_LENGTH (TREE_CODE (op));
1761 vno1.type = TREE_TYPE (op);
1762 for (i = 0; i < vno1.length; ++i)
1763 vno1.op[i] = TREE_OPERAND (op, i);
1764 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1765 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1766 NO_INSERT);
1767 if (!slot && current_info == optimistic_info)
1768 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1769 NO_INSERT);
1770 if (!slot)
1771 return NULL_TREE;
1772 if (vnresult)
1773 *vnresult = (vn_nary_op_t)*slot;
1774 return ((vn_nary_op_t)*slot)->result;
1777 /* Lookup the rhs of STMT in the current hash table, and return the resulting
1778 value number if it exists in the hash table. Return NULL_TREE if
1779 it does not exist in the hash table. VNRESULT will contain the
1780 vn_nary_op_t from the hashtable if it exists. */
1782 tree
1783 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
1785 void **slot;
1786 struct vn_nary_op_s vno1;
1787 unsigned i;
1789 if (vnresult)
1790 *vnresult = NULL;
1791 vno1.opcode = gimple_assign_rhs_code (stmt);
1792 vno1.length = gimple_num_ops (stmt) - 1;
1793 vno1.type = gimple_expr_type (stmt);
1794 for (i = 0; i < vno1.length; ++i)
1795 vno1.op[i] = gimple_op (stmt, i + 1);
1796 if (vno1.opcode == REALPART_EXPR
1797 || vno1.opcode == IMAGPART_EXPR
1798 || vno1.opcode == VIEW_CONVERT_EXPR)
1799 vno1.op[0] = TREE_OPERAND (vno1.op[0], 0);
1800 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1801 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1802 NO_INSERT);
1803 if (!slot && current_info == optimistic_info)
1804 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1805 NO_INSERT);
1806 if (!slot)
1807 return NULL_TREE;
1808 if (vnresult)
1809 *vnresult = (vn_nary_op_t)*slot;
1810 return ((vn_nary_op_t)*slot)->result;
1813 /* Insert a n-ary operation into the current hash table using it's
1814 pieces. Return the vn_nary_op_t structure we created and put in
1815 the hashtable. */
1817 vn_nary_op_t
1818 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
1819 tree type, tree op0,
1820 tree op1, tree op2, tree op3,
1821 tree result,
1822 unsigned int value_id)
1824 void **slot;
1825 vn_nary_op_t vno1;
1827 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1828 (sizeof (struct vn_nary_op_s)
1829 - sizeof (tree) * (4 - length)));
1830 vno1->value_id = value_id;
1831 vno1->opcode = code;
1832 vno1->length = length;
1833 vno1->type = type;
1834 if (length >= 1)
1835 vno1->op[0] = op0;
1836 if (length >= 2)
1837 vno1->op[1] = op1;
1838 if (length >= 3)
1839 vno1->op[2] = op2;
1840 if (length >= 4)
1841 vno1->op[3] = op3;
1842 vno1->result = result;
1843 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1844 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1845 INSERT);
1846 gcc_assert (!*slot);
1848 *slot = vno1;
1849 return vno1;
1853 /* Insert OP into the current hash table with a value number of
1854 RESULT. Return the vn_nary_op_t structure we created and put in
1855 the hashtable. */
1857 vn_nary_op_t
1858 vn_nary_op_insert (tree op, tree result)
1860 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
1861 void **slot;
1862 vn_nary_op_t vno1;
1863 unsigned i;
1865 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1866 (sizeof (struct vn_nary_op_s)
1867 - sizeof (tree) * (4 - length)));
1868 vno1->value_id = VN_INFO (result)->value_id;
1869 vno1->opcode = TREE_CODE (op);
1870 vno1->length = length;
1871 vno1->type = TREE_TYPE (op);
1872 for (i = 0; i < vno1->length; ++i)
1873 vno1->op[i] = TREE_OPERAND (op, i);
1874 vno1->result = result;
1875 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1876 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1877 INSERT);
1878 gcc_assert (!*slot);
1880 *slot = vno1;
1881 return vno1;
1884 /* Insert the rhs of STMT into the current hash table with a value number of
1885 RESULT. */
1887 vn_nary_op_t
1888 vn_nary_op_insert_stmt (gimple stmt, tree result)
1890 unsigned length = gimple_num_ops (stmt) - 1;
1891 void **slot;
1892 vn_nary_op_t vno1;
1893 unsigned i;
1895 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1896 (sizeof (struct vn_nary_op_s)
1897 - sizeof (tree) * (4 - length)));
1898 vno1->value_id = VN_INFO (result)->value_id;
1899 vno1->opcode = gimple_assign_rhs_code (stmt);
1900 vno1->length = length;
1901 vno1->type = gimple_expr_type (stmt);
1902 for (i = 0; i < vno1->length; ++i)
1903 vno1->op[i] = gimple_op (stmt, i + 1);
1904 if (vno1->opcode == REALPART_EXPR
1905 || vno1->opcode == IMAGPART_EXPR
1906 || vno1->opcode == VIEW_CONVERT_EXPR)
1907 vno1->op[0] = TREE_OPERAND (vno1->op[0], 0);
1908 vno1->result = result;
1909 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1910 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1911 INSERT);
1912 gcc_assert (!*slot);
1914 *slot = vno1;
1915 return vno1;
1918 /* Compute a hashcode for PHI operation VP1 and return it. */
1920 static inline hashval_t
1921 vn_phi_compute_hash (vn_phi_t vp1)
1923 hashval_t result;
1924 int i;
1925 tree phi1op;
1926 tree type;
1928 result = vp1->block->index;
1930 /* If all PHI arguments are constants we need to distinguish
1931 the PHI node via its type. */
1932 type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
1933 result += (INTEGRAL_TYPE_P (type)
1934 + (INTEGRAL_TYPE_P (type)
1935 ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
1937 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1939 if (phi1op == VN_TOP)
1940 continue;
1941 result = iterative_hash_expr (phi1op, result);
1944 return result;
1947 /* Return the computed hashcode for phi operation P1. */
1949 static hashval_t
1950 vn_phi_hash (const void *p1)
1952 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1953 return vp1->hashcode;
1956 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
1958 static int
1959 vn_phi_eq (const void *p1, const void *p2)
1961 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1962 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
1964 if (vp1->hashcode != vp2->hashcode)
1965 return false;
1967 if (vp1->block == vp2->block)
1969 int i;
1970 tree phi1op;
1972 /* If the PHI nodes do not have compatible types
1973 they are not the same. */
1974 if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
1975 TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
1976 return false;
1978 /* Any phi in the same block will have it's arguments in the
1979 same edge order, because of how we store phi nodes. */
1980 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1982 tree phi2op = VEC_index (tree, vp2->phiargs, i);
1983 if (phi1op == VN_TOP || phi2op == VN_TOP)
1984 continue;
1985 if (!expressions_equal_p (phi1op, phi2op))
1986 return false;
1988 return true;
1990 return false;
1993 static VEC(tree, heap) *shared_lookup_phiargs;
1995 /* Lookup PHI in the current hash table, and return the resulting
1996 value number if it exists in the hash table. Return NULL_TREE if
1997 it does not exist in the hash table. */
1999 static tree
2000 vn_phi_lookup (gimple phi)
2002 void **slot;
2003 struct vn_phi_s vp1;
2004 unsigned i;
2006 VEC_truncate (tree, shared_lookup_phiargs, 0);
2008 /* Canonicalize the SSA_NAME's to their value number. */
2009 for (i = 0; i < gimple_phi_num_args (phi); i++)
2011 tree def = PHI_ARG_DEF (phi, i);
2012 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2013 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
2015 vp1.phiargs = shared_lookup_phiargs;
2016 vp1.block = gimple_bb (phi);
2017 vp1.hashcode = vn_phi_compute_hash (&vp1);
2018 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
2019 NO_INSERT);
2020 if (!slot && current_info == optimistic_info)
2021 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
2022 NO_INSERT);
2023 if (!slot)
2024 return NULL_TREE;
2025 return ((vn_phi_t)*slot)->result;
2028 /* Insert PHI into the current hash table with a value number of
2029 RESULT. */
2031 static vn_phi_t
2032 vn_phi_insert (gimple phi, tree result)
2034 void **slot;
2035 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
2036 unsigned i;
2037 VEC (tree, heap) *args = NULL;
2039 /* Canonicalize the SSA_NAME's to their value number. */
2040 for (i = 0; i < gimple_phi_num_args (phi); i++)
2042 tree def = PHI_ARG_DEF (phi, i);
2043 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2044 VEC_safe_push (tree, heap, args, def);
2046 vp1->value_id = VN_INFO (result)->value_id;
2047 vp1->phiargs = args;
2048 vp1->block = gimple_bb (phi);
2049 vp1->result = result;
2050 vp1->hashcode = vn_phi_compute_hash (vp1);
2052 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
2053 INSERT);
2055 /* Because we iterate over phi operations more than once, it's
2056 possible the slot might already exist here, hence no assert.*/
2057 *slot = vp1;
2058 return vp1;
2062 /* Print set of components in strongly connected component SCC to OUT. */
2064 static void
2065 print_scc (FILE *out, VEC (tree, heap) *scc)
2067 tree var;
2068 unsigned int i;
2070 fprintf (out, "SCC consists of: ");
2071 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2073 print_generic_expr (out, var, 0);
2074 fprintf (out, " ");
2076 fprintf (out, "\n");
2079 /* Set the value number of FROM to TO, return true if it has changed
2080 as a result. */
2082 static inline bool
2083 set_ssa_val_to (tree from, tree to)
2085 tree currval;
2087 if (from != to
2088 && TREE_CODE (to) == SSA_NAME
2089 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2090 to = from;
2092 /* The only thing we allow as value numbers are VN_TOP, ssa_names
2093 and invariants. So assert that here. */
2094 gcc_assert (to != NULL_TREE
2095 && (to == VN_TOP
2096 || TREE_CODE (to) == SSA_NAME
2097 || is_gimple_min_invariant (to)));
2099 if (dump_file && (dump_flags & TDF_DETAILS))
2101 fprintf (dump_file, "Setting value number of ");
2102 print_generic_expr (dump_file, from, 0);
2103 fprintf (dump_file, " to ");
2104 print_generic_expr (dump_file, to, 0);
2107 currval = SSA_VAL (from);
2109 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
2111 VN_INFO (from)->valnum = to;
2112 if (dump_file && (dump_flags & TDF_DETAILS))
2113 fprintf (dump_file, " (changed)\n");
2114 return true;
2116 if (dump_file && (dump_flags & TDF_DETAILS))
2117 fprintf (dump_file, "\n");
2118 return false;
2121 /* Set all definitions in STMT to value number to themselves.
2122 Return true if a value number changed. */
2124 static bool
2125 defs_to_varying (gimple stmt)
2127 bool changed = false;
2128 ssa_op_iter iter;
2129 def_operand_p defp;
2131 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2133 tree def = DEF_FROM_PTR (defp);
2135 VN_INFO (def)->use_processed = true;
2136 changed |= set_ssa_val_to (def, def);
2138 return changed;
2141 static bool expr_has_constants (tree expr);
2142 static tree valueize_expr (tree expr);
2144 /* Visit a copy between LHS and RHS, return true if the value number
2145 changed. */
2147 static bool
2148 visit_copy (tree lhs, tree rhs)
2150 /* Follow chains of copies to their destination. */
2151 while (TREE_CODE (rhs) == SSA_NAME
2152 && SSA_VAL (rhs) != rhs)
2153 rhs = SSA_VAL (rhs);
2155 /* The copy may have a more interesting constant filled expression
2156 (we don't, since we know our RHS is just an SSA name). */
2157 if (TREE_CODE (rhs) == SSA_NAME)
2159 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2160 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2163 return set_ssa_val_to (lhs, rhs);
2166 /* Visit a unary operator RHS, value number it, and return true if the
2167 value number of LHS has changed as a result. */
2169 static bool
2170 visit_unary_op (tree lhs, gimple stmt)
2172 bool changed = false;
2173 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2175 if (result)
2177 changed = set_ssa_val_to (lhs, result);
2179 else
2181 changed = set_ssa_val_to (lhs, lhs);
2182 vn_nary_op_insert_stmt (stmt, lhs);
2185 return changed;
2188 /* Visit a binary operator RHS, value number it, and return true if the
2189 value number of LHS has changed as a result. */
2191 static bool
2192 visit_binary_op (tree lhs, gimple stmt)
2194 bool changed = false;
2195 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2197 if (result)
2199 changed = set_ssa_val_to (lhs, result);
2201 else
2203 changed = set_ssa_val_to (lhs, lhs);
2204 vn_nary_op_insert_stmt (stmt, lhs);
2207 return changed;
2210 /* Visit a call STMT storing into LHS. Return true if the value number
2211 of the LHS has changed as a result. */
2213 static bool
2214 visit_reference_op_call (tree lhs, gimple stmt)
2216 bool changed = false;
2217 struct vn_reference_s vr1;
2218 tree result;
2219 tree vuse = gimple_vuse (stmt);
2221 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2222 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
2223 vr1.type = gimple_expr_type (stmt);
2224 vr1.set = 0;
2225 vr1.hashcode = vn_reference_compute_hash (&vr1);
2226 result = vn_reference_lookup_1 (&vr1, NULL);
2227 if (result)
2229 changed = set_ssa_val_to (lhs, result);
2230 if (TREE_CODE (result) == SSA_NAME
2231 && VN_INFO (result)->has_constants)
2232 VN_INFO (lhs)->has_constants = true;
2234 else
2236 void **slot;
2237 vn_reference_t vr2;
2238 changed = set_ssa_val_to (lhs, lhs);
2239 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
2240 vr2->vuse = vr1.vuse;
2241 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
2242 vr2->type = vr1.type;
2243 vr2->set = vr1.set;
2244 vr2->hashcode = vr1.hashcode;
2245 vr2->result = lhs;
2246 slot = htab_find_slot_with_hash (current_info->references,
2247 vr2, vr2->hashcode, INSERT);
2248 if (*slot)
2249 free_reference (*slot);
2250 *slot = vr2;
2253 return changed;
2256 /* Visit a load from a reference operator RHS, part of STMT, value number it,
2257 and return true if the value number of the LHS has changed as a result. */
2259 static bool
2260 visit_reference_op_load (tree lhs, tree op, gimple stmt)
2262 bool changed = false;
2263 tree last_vuse;
2264 tree result;
2266 last_vuse = gimple_vuse (stmt);
2267 last_vuse_ptr = &last_vuse;
2268 result = vn_reference_lookup (op, gimple_vuse (stmt), true, NULL);
2269 last_vuse_ptr = NULL;
2271 /* If we have a VCE, try looking up its operand as it might be stored in
2272 a different type. */
2273 if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
2274 result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
2275 true, NULL);
2277 /* We handle type-punning through unions by value-numbering based
2278 on offset and size of the access. Be prepared to handle a
2279 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
2280 if (result
2281 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
2283 /* We will be setting the value number of lhs to the value number
2284 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2285 So first simplify and lookup this expression to see if it
2286 is already available. */
2287 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
2288 if ((CONVERT_EXPR_P (val)
2289 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
2290 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
2292 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
2293 if ((CONVERT_EXPR_P (tem)
2294 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
2295 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
2296 TREE_TYPE (val), tem)))
2297 val = tem;
2299 result = val;
2300 if (!is_gimple_min_invariant (val)
2301 && TREE_CODE (val) != SSA_NAME)
2302 result = vn_nary_op_lookup (val, NULL);
2303 /* If the expression is not yet available, value-number lhs to
2304 a new SSA_NAME we create. */
2305 if (!result)
2307 result = make_ssa_name (SSA_NAME_VAR (lhs), gimple_build_nop ());
2308 /* Initialize value-number information properly. */
2309 VN_INFO_GET (result)->valnum = result;
2310 VN_INFO (result)->value_id = get_next_value_id ();
2311 VN_INFO (result)->expr = val;
2312 VN_INFO (result)->has_constants = expr_has_constants (val);
2313 VN_INFO (result)->needs_insertion = true;
2314 /* As all "inserted" statements are singleton SCCs, insert
2315 to the valid table. This is strictly needed to
2316 avoid re-generating new value SSA_NAMEs for the same
2317 expression during SCC iteration over and over (the
2318 optimistic table gets cleared after each iteration).
2319 We do not need to insert into the optimistic table, as
2320 lookups there will fall back to the valid table. */
2321 if (current_info == optimistic_info)
2323 current_info = valid_info;
2324 vn_nary_op_insert (val, result);
2325 current_info = optimistic_info;
2327 else
2328 vn_nary_op_insert (val, result);
2329 if (dump_file && (dump_flags & TDF_DETAILS))
2331 fprintf (dump_file, "Inserting name ");
2332 print_generic_expr (dump_file, result, 0);
2333 fprintf (dump_file, " for expression ");
2334 print_generic_expr (dump_file, val, 0);
2335 fprintf (dump_file, "\n");
2340 if (result)
2342 changed = set_ssa_val_to (lhs, result);
2343 if (TREE_CODE (result) == SSA_NAME
2344 && VN_INFO (result)->has_constants)
2346 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2347 VN_INFO (lhs)->has_constants = true;
2350 else
2352 changed = set_ssa_val_to (lhs, lhs);
2353 vn_reference_insert (op, lhs, last_vuse);
2356 return changed;
2360 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2361 and return true if the value number of the LHS has changed as a result. */
2363 static bool
2364 visit_reference_op_store (tree lhs, tree op, gimple stmt)
2366 bool changed = false;
2367 tree result;
2368 bool resultsame = false;
2370 /* First we want to lookup using the *vuses* from the store and see
2371 if there the last store to this location with the same address
2372 had the same value.
2374 The vuses represent the memory state before the store. If the
2375 memory state, address, and value of the store is the same as the
2376 last store to this location, then this store will produce the
2377 same memory state as that store.
2379 In this case the vdef versions for this store are value numbered to those
2380 vuse versions, since they represent the same memory state after
2381 this store.
2383 Otherwise, the vdefs for the store are used when inserting into
2384 the table, since the store generates a new memory state. */
2386 result = vn_reference_lookup (lhs, gimple_vuse (stmt), false, NULL);
2388 if (result)
2390 if (TREE_CODE (result) == SSA_NAME)
2391 result = SSA_VAL (result);
2392 if (TREE_CODE (op) == SSA_NAME)
2393 op = SSA_VAL (op);
2394 resultsame = expressions_equal_p (result, op);
2397 if (!result || !resultsame)
2399 tree vdef;
2401 if (dump_file && (dump_flags & TDF_DETAILS))
2403 fprintf (dump_file, "No store match\n");
2404 fprintf (dump_file, "Value numbering store ");
2405 print_generic_expr (dump_file, lhs, 0);
2406 fprintf (dump_file, " to ");
2407 print_generic_expr (dump_file, op, 0);
2408 fprintf (dump_file, "\n");
2410 /* Have to set value numbers before insert, since insert is
2411 going to valueize the references in-place. */
2412 if ((vdef = gimple_vdef (stmt)))
2414 VN_INFO (vdef)->use_processed = true;
2415 changed |= set_ssa_val_to (vdef, vdef);
2418 /* Do not insert structure copies into the tables. */
2419 if (is_gimple_min_invariant (op)
2420 || is_gimple_reg (op))
2421 vn_reference_insert (lhs, op, vdef);
2423 else
2425 /* We had a match, so value number the vdef to have the value
2426 number of the vuse it came from. */
2427 tree def, use;
2429 if (dump_file && (dump_flags & TDF_DETAILS))
2430 fprintf (dump_file, "Store matched earlier value,"
2431 "value numbering store vdefs to matching vuses.\n");
2433 def = gimple_vdef (stmt);
2434 use = gimple_vuse (stmt);
2436 VN_INFO (def)->use_processed = true;
2437 changed |= set_ssa_val_to (def, SSA_VAL (use));
2440 return changed;
2443 /* Visit and value number PHI, return true if the value number
2444 changed. */
2446 static bool
2447 visit_phi (gimple phi)
2449 bool changed = false;
2450 tree result;
2451 tree sameval = VN_TOP;
2452 bool allsame = true;
2453 unsigned i;
2455 /* TODO: We could check for this in init_sccvn, and replace this
2456 with a gcc_assert. */
2457 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
2458 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2460 /* See if all non-TOP arguments have the same value. TOP is
2461 equivalent to everything, so we can ignore it. */
2462 for (i = 0; i < gimple_phi_num_args (phi); i++)
2464 tree def = PHI_ARG_DEF (phi, i);
2466 if (TREE_CODE (def) == SSA_NAME)
2467 def = SSA_VAL (def);
2468 if (def == VN_TOP)
2469 continue;
2470 if (sameval == VN_TOP)
2472 sameval = def;
2474 else
2476 if (!expressions_equal_p (def, sameval))
2478 allsame = false;
2479 break;
2484 /* If all value numbered to the same value, the phi node has that
2485 value. */
2486 if (allsame)
2488 if (is_gimple_min_invariant (sameval))
2490 VN_INFO (PHI_RESULT (phi))->has_constants = true;
2491 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2493 else
2495 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2496 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2499 if (TREE_CODE (sameval) == SSA_NAME)
2500 return visit_copy (PHI_RESULT (phi), sameval);
2502 return set_ssa_val_to (PHI_RESULT (phi), sameval);
2505 /* Otherwise, see if it is equivalent to a phi node in this block. */
2506 result = vn_phi_lookup (phi);
2507 if (result)
2509 if (TREE_CODE (result) == SSA_NAME)
2510 changed = visit_copy (PHI_RESULT (phi), result);
2511 else
2512 changed = set_ssa_val_to (PHI_RESULT (phi), result);
2514 else
2516 vn_phi_insert (phi, PHI_RESULT (phi));
2517 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2518 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
2519 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2522 return changed;
2525 /* Return true if EXPR contains constants. */
2527 static bool
2528 expr_has_constants (tree expr)
2530 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2532 case tcc_unary:
2533 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
2535 case tcc_binary:
2536 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
2537 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
2538 /* Constants inside reference ops are rarely interesting, but
2539 it can take a lot of looking to find them. */
2540 case tcc_reference:
2541 case tcc_declaration:
2542 return false;
2543 default:
2544 return is_gimple_min_invariant (expr);
2546 return false;
2549 /* Return true if STMT contains constants. */
2551 static bool
2552 stmt_has_constants (gimple stmt)
2554 if (gimple_code (stmt) != GIMPLE_ASSIGN)
2555 return false;
2557 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2559 case GIMPLE_UNARY_RHS:
2560 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2562 case GIMPLE_BINARY_RHS:
2563 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2564 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
2565 case GIMPLE_TERNARY_RHS:
2566 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2567 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt))
2568 || is_gimple_min_invariant (gimple_assign_rhs3 (stmt)));
2569 case GIMPLE_SINGLE_RHS:
2570 /* Constants inside reference ops are rarely interesting, but
2571 it can take a lot of looking to find them. */
2572 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2573 default:
2574 gcc_unreachable ();
2576 return false;
2579 /* Replace SSA_NAMES in expr with their value numbers, and return the
2580 result.
2581 This is performed in place. */
2583 static tree
2584 valueize_expr (tree expr)
2586 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2588 case tcc_unary:
2589 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2590 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2591 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2592 break;
2593 case tcc_binary:
2594 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2595 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2596 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2597 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
2598 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
2599 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
2600 break;
2601 default:
2602 break;
2604 return expr;
2607 /* Simplify the binary expression RHS, and return the result if
2608 simplified. */
2610 static tree
2611 simplify_binary_expression (gimple stmt)
2613 tree result = NULL_TREE;
2614 tree op0 = gimple_assign_rhs1 (stmt);
2615 tree op1 = gimple_assign_rhs2 (stmt);
2617 /* This will not catch every single case we could combine, but will
2618 catch those with constants. The goal here is to simultaneously
2619 combine constants between expressions, but avoid infinite
2620 expansion of expressions during simplification. */
2621 if (TREE_CODE (op0) == SSA_NAME)
2623 if (VN_INFO (op0)->has_constants
2624 || TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)) == tcc_comparison)
2625 op0 = valueize_expr (vn_get_expr_for (op0));
2626 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
2627 op0 = SSA_VAL (op0);
2630 if (TREE_CODE (op1) == SSA_NAME)
2632 if (VN_INFO (op1)->has_constants)
2633 op1 = valueize_expr (vn_get_expr_for (op1));
2634 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
2635 op1 = SSA_VAL (op1);
2638 /* Avoid folding if nothing changed. */
2639 if (op0 == gimple_assign_rhs1 (stmt)
2640 && op1 == gimple_assign_rhs2 (stmt))
2641 return NULL_TREE;
2643 fold_defer_overflow_warnings ();
2645 result = fold_binary (gimple_assign_rhs_code (stmt),
2646 gimple_expr_type (stmt), op0, op1);
2647 if (result)
2648 STRIP_USELESS_TYPE_CONVERSION (result);
2650 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
2651 stmt, 0);
2653 /* Make sure result is not a complex expression consisting
2654 of operators of operators (IE (a + b) + (a + c))
2655 Otherwise, we will end up with unbounded expressions if
2656 fold does anything at all. */
2657 if (result && valid_gimple_rhs_p (result))
2658 return result;
2660 return NULL_TREE;
2663 /* Simplify the unary expression RHS, and return the result if
2664 simplified. */
2666 static tree
2667 simplify_unary_expression (gimple stmt)
2669 tree result = NULL_TREE;
2670 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
2672 /* We handle some tcc_reference codes here that are all
2673 GIMPLE_ASSIGN_SINGLE codes. */
2674 if (gimple_assign_rhs_code (stmt) == REALPART_EXPR
2675 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2676 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2677 op0 = TREE_OPERAND (op0, 0);
2679 if (TREE_CODE (op0) != SSA_NAME)
2680 return NULL_TREE;
2682 orig_op0 = op0;
2683 if (VN_INFO (op0)->has_constants)
2684 op0 = valueize_expr (vn_get_expr_for (op0));
2685 else if (gimple_assign_cast_p (stmt)
2686 || gimple_assign_rhs_code (stmt) == REALPART_EXPR
2687 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2688 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2690 /* We want to do tree-combining on conversion-like expressions.
2691 Make sure we feed only SSA_NAMEs or constants to fold though. */
2692 tree tem = valueize_expr (vn_get_expr_for (op0));
2693 if (UNARY_CLASS_P (tem)
2694 || BINARY_CLASS_P (tem)
2695 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
2696 || TREE_CODE (tem) == SSA_NAME
2697 || is_gimple_min_invariant (tem))
2698 op0 = tem;
2701 /* Avoid folding if nothing changed, but remember the expression. */
2702 if (op0 == orig_op0)
2703 return NULL_TREE;
2705 result = fold_unary_ignore_overflow (gimple_assign_rhs_code (stmt),
2706 gimple_expr_type (stmt), op0);
2707 if (result)
2709 STRIP_USELESS_TYPE_CONVERSION (result);
2710 if (valid_gimple_rhs_p (result))
2711 return result;
2714 return NULL_TREE;
2717 /* Try to simplify RHS using equivalences and constant folding. */
2719 static tree
2720 try_to_simplify (gimple stmt)
2722 tree tem;
2724 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
2725 in this case, there is no point in doing extra work. */
2726 if (gimple_assign_copy_p (stmt)
2727 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2728 return NULL_TREE;
2730 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2732 case tcc_declaration:
2733 tem = get_symbol_constant_value (gimple_assign_rhs1 (stmt));
2734 if (tem)
2735 return tem;
2736 break;
2738 case tcc_reference:
2739 /* Do not do full-blown reference lookup here, but simplify
2740 reads from constant aggregates. */
2741 tem = fold_const_aggregate_ref (gimple_assign_rhs1 (stmt));
2742 if (tem)
2743 return tem;
2745 /* Fallthrough for some codes that can operate on registers. */
2746 if (!(TREE_CODE (gimple_assign_rhs1 (stmt)) == REALPART_EXPR
2747 || TREE_CODE (gimple_assign_rhs1 (stmt)) == IMAGPART_EXPR
2748 || TREE_CODE (gimple_assign_rhs1 (stmt)) == VIEW_CONVERT_EXPR))
2749 break;
2750 /* We could do a little more with unary ops, if they expand
2751 into binary ops, but it's debatable whether it is worth it. */
2752 case tcc_unary:
2753 return simplify_unary_expression (stmt);
2754 break;
2755 case tcc_comparison:
2756 case tcc_binary:
2757 return simplify_binary_expression (stmt);
2758 break;
2759 default:
2760 break;
2763 return NULL_TREE;
2766 /* Visit and value number USE, return true if the value number
2767 changed. */
2769 static bool
2770 visit_use (tree use)
2772 bool changed = false;
2773 gimple stmt = SSA_NAME_DEF_STMT (use);
2775 VN_INFO (use)->use_processed = true;
2777 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
2778 if (dump_file && (dump_flags & TDF_DETAILS)
2779 && !SSA_NAME_IS_DEFAULT_DEF (use))
2781 fprintf (dump_file, "Value numbering ");
2782 print_generic_expr (dump_file, use, 0);
2783 fprintf (dump_file, " stmt = ");
2784 print_gimple_stmt (dump_file, stmt, 0, 0);
2787 /* Handle uninitialized uses. */
2788 if (SSA_NAME_IS_DEFAULT_DEF (use))
2789 changed = set_ssa_val_to (use, use);
2790 else
2792 if (gimple_code (stmt) == GIMPLE_PHI)
2793 changed = visit_phi (stmt);
2794 else if (!gimple_has_lhs (stmt)
2795 || gimple_has_volatile_ops (stmt)
2796 || stmt_could_throw_p (stmt))
2797 changed = defs_to_varying (stmt);
2798 else if (is_gimple_assign (stmt))
2800 tree lhs = gimple_assign_lhs (stmt);
2801 tree simplified;
2803 /* Shortcut for copies. Simplifying copies is pointless,
2804 since we copy the expression and value they represent. */
2805 if (gimple_assign_copy_p (stmt)
2806 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2807 && TREE_CODE (lhs) == SSA_NAME)
2809 changed = visit_copy (lhs, gimple_assign_rhs1 (stmt));
2810 goto done;
2812 simplified = try_to_simplify (stmt);
2813 if (simplified)
2815 if (dump_file && (dump_flags & TDF_DETAILS))
2817 fprintf (dump_file, "RHS ");
2818 print_gimple_expr (dump_file, stmt, 0, 0);
2819 fprintf (dump_file, " simplified to ");
2820 print_generic_expr (dump_file, simplified, 0);
2821 if (TREE_CODE (lhs) == SSA_NAME)
2822 fprintf (dump_file, " has constants %d\n",
2823 expr_has_constants (simplified));
2824 else
2825 fprintf (dump_file, "\n");
2828 /* Setting value numbers to constants will occasionally
2829 screw up phi congruence because constants are not
2830 uniquely associated with a single ssa name that can be
2831 looked up. */
2832 if (simplified
2833 && is_gimple_min_invariant (simplified)
2834 && TREE_CODE (lhs) == SSA_NAME)
2836 VN_INFO (lhs)->expr = simplified;
2837 VN_INFO (lhs)->has_constants = true;
2838 changed = set_ssa_val_to (lhs, simplified);
2839 goto done;
2841 else if (simplified
2842 && TREE_CODE (simplified) == SSA_NAME
2843 && TREE_CODE (lhs) == SSA_NAME)
2845 changed = visit_copy (lhs, simplified);
2846 goto done;
2848 else if (simplified)
2850 if (TREE_CODE (lhs) == SSA_NAME)
2852 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
2853 /* We have to unshare the expression or else
2854 valuizing may change the IL stream. */
2855 VN_INFO (lhs)->expr = unshare_expr (simplified);
2858 else if (stmt_has_constants (stmt)
2859 && TREE_CODE (lhs) == SSA_NAME)
2860 VN_INFO (lhs)->has_constants = true;
2861 else if (TREE_CODE (lhs) == SSA_NAME)
2863 /* We reset expr and constantness here because we may
2864 have been value numbering optimistically, and
2865 iterating. They may become non-constant in this case,
2866 even if they were optimistically constant. */
2868 VN_INFO (lhs)->has_constants = false;
2869 VN_INFO (lhs)->expr = NULL_TREE;
2872 if ((TREE_CODE (lhs) == SSA_NAME
2873 /* We can substitute SSA_NAMEs that are live over
2874 abnormal edges with their constant value. */
2875 && !(gimple_assign_copy_p (stmt)
2876 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2877 && !(simplified
2878 && is_gimple_min_invariant (simplified))
2879 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2880 /* Stores or copies from SSA_NAMEs that are live over
2881 abnormal edges are a problem. */
2882 || (gimple_assign_single_p (stmt)
2883 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2884 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt))))
2885 changed = defs_to_varying (stmt);
2886 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
2888 changed = visit_reference_op_store (lhs, gimple_assign_rhs1 (stmt), stmt);
2890 else if (TREE_CODE (lhs) == SSA_NAME)
2892 if ((gimple_assign_copy_p (stmt)
2893 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2894 || (simplified
2895 && is_gimple_min_invariant (simplified)))
2897 VN_INFO (lhs)->has_constants = true;
2898 if (simplified)
2899 changed = set_ssa_val_to (lhs, simplified);
2900 else
2901 changed = set_ssa_val_to (lhs, gimple_assign_rhs1 (stmt));
2903 else
2905 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2907 case GIMPLE_UNARY_RHS:
2908 changed = visit_unary_op (lhs, stmt);
2909 break;
2910 case GIMPLE_BINARY_RHS:
2911 changed = visit_binary_op (lhs, stmt);
2912 break;
2913 case GIMPLE_SINGLE_RHS:
2914 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2916 case tcc_reference:
2917 /* VOP-less references can go through unary case. */
2918 if ((gimple_assign_rhs_code (stmt) == REALPART_EXPR
2919 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2920 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR )
2921 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (stmt), 0)) == SSA_NAME)
2923 changed = visit_unary_op (lhs, stmt);
2924 break;
2926 /* Fallthrough. */
2927 case tcc_declaration:
2928 changed = visit_reference_op_load
2929 (lhs, gimple_assign_rhs1 (stmt), stmt);
2930 break;
2931 case tcc_expression:
2932 if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
2934 changed = visit_unary_op (lhs, stmt);
2935 break;
2937 /* Fallthrough. */
2938 default:
2939 changed = defs_to_varying (stmt);
2941 break;
2942 default:
2943 changed = defs_to_varying (stmt);
2944 break;
2948 else
2949 changed = defs_to_varying (stmt);
2951 else if (is_gimple_call (stmt))
2953 tree lhs = gimple_call_lhs (stmt);
2955 /* ??? We could try to simplify calls. */
2957 if (stmt_has_constants (stmt)
2958 && TREE_CODE (lhs) == SSA_NAME)
2959 VN_INFO (lhs)->has_constants = true;
2960 else if (TREE_CODE (lhs) == SSA_NAME)
2962 /* We reset expr and constantness here because we may
2963 have been value numbering optimistically, and
2964 iterating. They may become non-constant in this case,
2965 even if they were optimistically constant. */
2966 VN_INFO (lhs)->has_constants = false;
2967 VN_INFO (lhs)->expr = NULL_TREE;
2970 if (TREE_CODE (lhs) == SSA_NAME
2971 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2972 changed = defs_to_varying (stmt);
2973 /* ??? We should handle stores from calls. */
2974 else if (TREE_CODE (lhs) == SSA_NAME)
2976 if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
2977 changed = visit_reference_op_call (lhs, stmt);
2978 else
2979 changed = defs_to_varying (stmt);
2981 else
2982 changed = defs_to_varying (stmt);
2985 done:
2986 return changed;
2989 /* Compare two operands by reverse postorder index */
2991 static int
2992 compare_ops (const void *pa, const void *pb)
2994 const tree opa = *((const tree *)pa);
2995 const tree opb = *((const tree *)pb);
2996 gimple opstmta = SSA_NAME_DEF_STMT (opa);
2997 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
2998 basic_block bba;
2999 basic_block bbb;
3001 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3002 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3003 else if (gimple_nop_p (opstmta))
3004 return -1;
3005 else if (gimple_nop_p (opstmtb))
3006 return 1;
3008 bba = gimple_bb (opstmta);
3009 bbb = gimple_bb (opstmtb);
3011 if (!bba && !bbb)
3012 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3013 else if (!bba)
3014 return -1;
3015 else if (!bbb)
3016 return 1;
3018 if (bba == bbb)
3020 if (gimple_code (opstmta) == GIMPLE_PHI
3021 && gimple_code (opstmtb) == GIMPLE_PHI)
3022 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3023 else if (gimple_code (opstmta) == GIMPLE_PHI)
3024 return -1;
3025 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3026 return 1;
3027 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3028 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3029 else
3030 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3032 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3035 /* Sort an array containing members of a strongly connected component
3036 SCC so that the members are ordered by RPO number.
3037 This means that when the sort is complete, iterating through the
3038 array will give you the members in RPO order. */
3040 static void
3041 sort_scc (VEC (tree, heap) *scc)
3043 qsort (VEC_address (tree, scc),
3044 VEC_length (tree, scc),
3045 sizeof (tree),
3046 compare_ops);
3049 /* Insert the no longer used nary ONARY to the hash INFO. */
3051 static void
3052 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3054 size_t size = (sizeof (struct vn_nary_op_s)
3055 - sizeof (tree) * (4 - onary->length));
3056 vn_nary_op_t nary = (vn_nary_op_t) obstack_alloc (&info->nary_obstack, size);
3057 void **slot;
3058 memcpy (nary, onary, size);
3059 slot = htab_find_slot_with_hash (info->nary, nary, nary->hashcode, INSERT);
3060 gcc_assert (!*slot);
3061 *slot = nary;
3064 /* Insert the no longer used phi OPHI to the hash INFO. */
3066 static void
3067 copy_phi (vn_phi_t ophi, vn_tables_t info)
3069 vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
3070 void **slot;
3071 memcpy (phi, ophi, sizeof (*phi));
3072 ophi->phiargs = NULL;
3073 slot = htab_find_slot_with_hash (info->phis, phi, phi->hashcode, INSERT);
3074 gcc_assert (!*slot);
3075 *slot = phi;
3078 /* Insert the no longer used reference OREF to the hash INFO. */
3080 static void
3081 copy_reference (vn_reference_t oref, vn_tables_t info)
3083 vn_reference_t ref;
3084 void **slot;
3085 ref = (vn_reference_t) pool_alloc (info->references_pool);
3086 memcpy (ref, oref, sizeof (*ref));
3087 oref->operands = NULL;
3088 slot = htab_find_slot_with_hash (info->references, ref, ref->hashcode,
3089 INSERT);
3090 if (*slot)
3091 free_reference (*slot);
3092 *slot = ref;
3095 /* Process a strongly connected component in the SSA graph. */
3097 static void
3098 process_scc (VEC (tree, heap) *scc)
3100 tree var;
3101 unsigned int i;
3102 unsigned int iterations = 0;
3103 bool changed = true;
3104 htab_iterator hi;
3105 vn_nary_op_t nary;
3106 vn_phi_t phi;
3107 vn_reference_t ref;
3109 /* If the SCC has a single member, just visit it. */
3110 if (VEC_length (tree, scc) == 1)
3112 tree use = VEC_index (tree, scc, 0);
3113 if (!VN_INFO (use)->use_processed)
3114 visit_use (use);
3115 return;
3118 /* Iterate over the SCC with the optimistic table until it stops
3119 changing. */
3120 current_info = optimistic_info;
3121 while (changed)
3123 changed = false;
3124 iterations++;
3125 /* As we are value-numbering optimistically we have to
3126 clear the expression tables and the simplified expressions
3127 in each iteration until we converge. */
3128 htab_empty (optimistic_info->nary);
3129 htab_empty (optimistic_info->phis);
3130 htab_empty (optimistic_info->references);
3131 obstack_free (&optimistic_info->nary_obstack, NULL);
3132 gcc_obstack_init (&optimistic_info->nary_obstack);
3133 empty_alloc_pool (optimistic_info->phis_pool);
3134 empty_alloc_pool (optimistic_info->references_pool);
3135 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
3136 VN_INFO (var)->expr = NULL_TREE;
3137 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
3138 changed |= visit_use (var);
3141 statistics_histogram_event (cfun, "SCC iterations", iterations);
3143 /* Finally, copy the contents of the no longer used optimistic
3144 table to the valid table. */
3145 FOR_EACH_HTAB_ELEMENT (optimistic_info->nary, nary, vn_nary_op_t, hi)
3146 copy_nary (nary, valid_info);
3147 FOR_EACH_HTAB_ELEMENT (optimistic_info->phis, phi, vn_phi_t, hi)
3148 copy_phi (phi, valid_info);
3149 FOR_EACH_HTAB_ELEMENT (optimistic_info->references, ref, vn_reference_t, hi)
3150 copy_reference (ref, valid_info);
3152 current_info = valid_info;
3155 DEF_VEC_O(ssa_op_iter);
3156 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
3158 /* Pop the components of the found SCC for NAME off the SCC stack
3159 and process them. Returns true if all went well, false if
3160 we run into resource limits. */
3162 static bool
3163 extract_and_process_scc_for_name (tree name)
3165 VEC (tree, heap) *scc = NULL;
3166 tree x;
3168 /* Found an SCC, pop the components off the SCC stack and
3169 process them. */
3172 x = VEC_pop (tree, sccstack);
3174 VN_INFO (x)->on_sccstack = false;
3175 VEC_safe_push (tree, heap, scc, x);
3176 } while (x != name);
3178 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
3179 if (VEC_length (tree, scc)
3180 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
3182 if (dump_file)
3183 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
3184 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
3185 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
3186 return false;
3189 if (VEC_length (tree, scc) > 1)
3190 sort_scc (scc);
3192 if (dump_file && (dump_flags & TDF_DETAILS))
3193 print_scc (dump_file, scc);
3195 process_scc (scc);
3197 VEC_free (tree, heap, scc);
3199 return true;
3202 /* Depth first search on NAME to discover and process SCC's in the SSA
3203 graph.
3204 Execution of this algorithm relies on the fact that the SCC's are
3205 popped off the stack in topological order.
3206 Returns true if successful, false if we stopped processing SCC's due
3207 to resource constraints. */
3209 static bool
3210 DFS (tree name)
3212 VEC(ssa_op_iter, heap) *itervec = NULL;
3213 VEC(tree, heap) *namevec = NULL;
3214 use_operand_p usep = NULL;
3215 gimple defstmt;
3216 tree use;
3217 ssa_op_iter iter;
3219 start_over:
3220 /* SCC info */
3221 VN_INFO (name)->dfsnum = next_dfs_num++;
3222 VN_INFO (name)->visited = true;
3223 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
3225 VEC_safe_push (tree, heap, sccstack, name);
3226 VN_INFO (name)->on_sccstack = true;
3227 defstmt = SSA_NAME_DEF_STMT (name);
3229 /* Recursively DFS on our operands, looking for SCC's. */
3230 if (!gimple_nop_p (defstmt))
3232 /* Push a new iterator. */
3233 if (gimple_code (defstmt) == GIMPLE_PHI)
3234 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
3235 else
3236 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
3238 else
3239 clear_and_done_ssa_iter (&iter);
3241 while (1)
3243 /* If we are done processing uses of a name, go up the stack
3244 of iterators and process SCCs as we found them. */
3245 if (op_iter_done (&iter))
3247 /* See if we found an SCC. */
3248 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
3249 if (!extract_and_process_scc_for_name (name))
3251 VEC_free (tree, heap, namevec);
3252 VEC_free (ssa_op_iter, heap, itervec);
3253 return false;
3256 /* Check if we are done. */
3257 if (VEC_empty (tree, namevec))
3259 VEC_free (tree, heap, namevec);
3260 VEC_free (ssa_op_iter, heap, itervec);
3261 return true;
3264 /* Restore the last use walker and continue walking there. */
3265 use = name;
3266 name = VEC_pop (tree, namevec);
3267 memcpy (&iter, VEC_last (ssa_op_iter, itervec),
3268 sizeof (ssa_op_iter));
3269 VEC_pop (ssa_op_iter, itervec);
3270 goto continue_walking;
3273 use = USE_FROM_PTR (usep);
3275 /* Since we handle phi nodes, we will sometimes get
3276 invariants in the use expression. */
3277 if (TREE_CODE (use) == SSA_NAME)
3279 if (! (VN_INFO (use)->visited))
3281 /* Recurse by pushing the current use walking state on
3282 the stack and starting over. */
3283 VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
3284 VEC_safe_push(tree, heap, namevec, name);
3285 name = use;
3286 goto start_over;
3288 continue_walking:
3289 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3290 VN_INFO (use)->low);
3292 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3293 && VN_INFO (use)->on_sccstack)
3295 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3296 VN_INFO (name)->low);
3300 usep = op_iter_next_use (&iter);
3304 /* Allocate a value number table. */
3306 static void
3307 allocate_vn_table (vn_tables_t table)
3309 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
3310 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
3311 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
3312 free_reference);
3314 gcc_obstack_init (&table->nary_obstack);
3315 table->phis_pool = create_alloc_pool ("VN phis",
3316 sizeof (struct vn_phi_s),
3317 30);
3318 table->references_pool = create_alloc_pool ("VN references",
3319 sizeof (struct vn_reference_s),
3320 30);
3323 /* Free a value number table. */
3325 static void
3326 free_vn_table (vn_tables_t table)
3328 htab_delete (table->phis);
3329 htab_delete (table->nary);
3330 htab_delete (table->references);
3331 obstack_free (&table->nary_obstack, NULL);
3332 free_alloc_pool (table->phis_pool);
3333 free_alloc_pool (table->references_pool);
3336 static void
3337 init_scc_vn (void)
3339 size_t i;
3340 int j;
3341 int *rpo_numbers_temp;
3343 calculate_dominance_info (CDI_DOMINATORS);
3344 sccstack = NULL;
3345 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
3346 free);
3348 constant_value_ids = BITMAP_ALLOC (NULL);
3350 next_dfs_num = 1;
3351 next_value_id = 1;
3353 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
3354 /* VEC_alloc doesn't actually grow it to the right size, it just
3355 preallocates the space to do so. */
3356 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
3357 gcc_obstack_init (&vn_ssa_aux_obstack);
3359 shared_lookup_phiargs = NULL;
3360 shared_lookup_references = NULL;
3361 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3362 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3363 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
3365 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3366 the i'th block in RPO order is bb. We want to map bb's to RPO
3367 numbers, so we need to rearrange this array. */
3368 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
3369 rpo_numbers[rpo_numbers_temp[j]] = j;
3371 XDELETE (rpo_numbers_temp);
3373 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
3375 /* Create the VN_INFO structures, and initialize value numbers to
3376 TOP. */
3377 for (i = 0; i < num_ssa_names; i++)
3379 tree name = ssa_name (i);
3380 if (name)
3382 VN_INFO_GET (name)->valnum = VN_TOP;
3383 VN_INFO (name)->expr = NULL_TREE;
3384 VN_INFO (name)->value_id = 0;
3388 renumber_gimple_stmt_uids ();
3390 /* Create the valid and optimistic value numbering tables. */
3391 valid_info = XCNEW (struct vn_tables_s);
3392 allocate_vn_table (valid_info);
3393 optimistic_info = XCNEW (struct vn_tables_s);
3394 allocate_vn_table (optimistic_info);
3397 void
3398 free_scc_vn (void)
3400 size_t i;
3402 htab_delete (constant_to_value_id);
3403 BITMAP_FREE (constant_value_ids);
3404 VEC_free (tree, heap, shared_lookup_phiargs);
3405 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
3406 XDELETEVEC (rpo_numbers);
3408 for (i = 0; i < num_ssa_names; i++)
3410 tree name = ssa_name (i);
3411 if (name
3412 && VN_INFO (name)->needs_insertion)
3413 release_ssa_name (name);
3415 obstack_free (&vn_ssa_aux_obstack, NULL);
3416 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
3418 VEC_free (tree, heap, sccstack);
3419 free_vn_table (valid_info);
3420 XDELETE (valid_info);
3421 free_vn_table (optimistic_info);
3422 XDELETE (optimistic_info);
3425 /* Set the value ids in the valid hash tables. */
3427 static void
3428 set_hashtable_value_ids (void)
3430 htab_iterator hi;
3431 vn_nary_op_t vno;
3432 vn_reference_t vr;
3433 vn_phi_t vp;
3435 /* Now set the value ids of the things we had put in the hash
3436 table. */
3438 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
3439 vno, vn_nary_op_t, hi)
3441 if (vno->result)
3443 if (TREE_CODE (vno->result) == SSA_NAME)
3444 vno->value_id = VN_INFO (vno->result)->value_id;
3445 else if (is_gimple_min_invariant (vno->result))
3446 vno->value_id = get_or_alloc_constant_value_id (vno->result);
3450 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
3451 vp, vn_phi_t, hi)
3453 if (vp->result)
3455 if (TREE_CODE (vp->result) == SSA_NAME)
3456 vp->value_id = VN_INFO (vp->result)->value_id;
3457 else if (is_gimple_min_invariant (vp->result))
3458 vp->value_id = get_or_alloc_constant_value_id (vp->result);
3462 FOR_EACH_HTAB_ELEMENT (valid_info->references,
3463 vr, vn_reference_t, hi)
3465 if (vr->result)
3467 if (TREE_CODE (vr->result) == SSA_NAME)
3468 vr->value_id = VN_INFO (vr->result)->value_id;
3469 else if (is_gimple_min_invariant (vr->result))
3470 vr->value_id = get_or_alloc_constant_value_id (vr->result);
3475 /* Do SCCVN. Returns true if it finished, false if we bailed out
3476 due to resource constraints. */
3478 bool
3479 run_scc_vn (void)
3481 size_t i;
3482 tree param;
3483 bool changed = true;
3485 init_scc_vn ();
3486 current_info = valid_info;
3488 for (param = DECL_ARGUMENTS (current_function_decl);
3489 param;
3490 param = DECL_CHAIN (param))
3492 if (gimple_default_def (cfun, param) != NULL)
3494 tree def = gimple_default_def (cfun, param);
3495 VN_INFO (def)->valnum = def;
3499 for (i = 1; i < num_ssa_names; ++i)
3501 tree name = ssa_name (i);
3502 if (name
3503 && VN_INFO (name)->visited == false
3504 && !has_zero_uses (name))
3505 if (!DFS (name))
3507 free_scc_vn ();
3508 return false;
3512 /* Initialize the value ids. */
3514 for (i = 1; i < num_ssa_names; ++i)
3516 tree name = ssa_name (i);
3517 vn_ssa_aux_t info;
3518 if (!name)
3519 continue;
3520 info = VN_INFO (name);
3521 if (info->valnum == name
3522 || info->valnum == VN_TOP)
3523 info->value_id = get_next_value_id ();
3524 else if (is_gimple_min_invariant (info->valnum))
3525 info->value_id = get_or_alloc_constant_value_id (info->valnum);
3528 /* Propagate until they stop changing. */
3529 while (changed)
3531 changed = false;
3532 for (i = 1; i < num_ssa_names; ++i)
3534 tree name = ssa_name (i);
3535 vn_ssa_aux_t info;
3536 if (!name)
3537 continue;
3538 info = VN_INFO (name);
3539 if (TREE_CODE (info->valnum) == SSA_NAME
3540 && info->valnum != name
3541 && info->value_id != VN_INFO (info->valnum)->value_id)
3543 changed = true;
3544 info->value_id = VN_INFO (info->valnum)->value_id;
3549 set_hashtable_value_ids ();
3551 if (dump_file && (dump_flags & TDF_DETAILS))
3553 fprintf (dump_file, "Value numbers:\n");
3554 for (i = 0; i < num_ssa_names; i++)
3556 tree name = ssa_name (i);
3557 if (name
3558 && VN_INFO (name)->visited
3559 && SSA_VAL (name) != name)
3561 print_generic_expr (dump_file, name, 0);
3562 fprintf (dump_file, " = ");
3563 print_generic_expr (dump_file, SSA_VAL (name), 0);
3564 fprintf (dump_file, "\n");
3569 return true;
3572 /* Return the maximum value id we have ever seen. */
3574 unsigned int
3575 get_max_value_id (void)
3577 return next_value_id;
3580 /* Return the next unique value id. */
3582 unsigned int
3583 get_next_value_id (void)
3585 return next_value_id++;
3589 /* Compare two expressions E1 and E2 and return true if they are equal. */
3591 bool
3592 expressions_equal_p (tree e1, tree e2)
3594 /* The obvious case. */
3595 if (e1 == e2)
3596 return true;
3598 /* If only one of them is null, they cannot be equal. */
3599 if (!e1 || !e2)
3600 return false;
3602 /* Now perform the actual comparison. */
3603 if (TREE_CODE (e1) == TREE_CODE (e2)
3604 && operand_equal_p (e1, e2, OEP_PURE_SAME))
3605 return true;
3607 return false;
3611 /* Return true if the nary operation NARY may trap. This is a copy
3612 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
3614 bool
3615 vn_nary_may_trap (vn_nary_op_t nary)
3617 tree type;
3618 tree rhs2 = NULL_TREE;
3619 bool honor_nans = false;
3620 bool honor_snans = false;
3621 bool fp_operation = false;
3622 bool honor_trapv = false;
3623 bool handled, ret;
3624 unsigned i;
3626 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
3627 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
3628 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
3630 type = nary->type;
3631 fp_operation = FLOAT_TYPE_P (type);
3632 if (fp_operation)
3634 honor_nans = flag_trapping_math && !flag_finite_math_only;
3635 honor_snans = flag_signaling_nans != 0;
3637 else if (INTEGRAL_TYPE_P (type)
3638 && TYPE_OVERFLOW_TRAPS (type))
3639 honor_trapv = true;
3641 if (nary->length >= 2)
3642 rhs2 = nary->op[1];
3643 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
3644 honor_trapv,
3645 honor_nans, honor_snans, rhs2,
3646 &handled);
3647 if (handled
3648 && ret)
3649 return true;
3651 for (i = 0; i < nary->length; ++i)
3652 if (tree_could_trap_p (nary->op[i]))
3653 return true;
3655 return false;