* gcc.target/powerpc/altivec-volatile.c: Adjust expected warning.
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blob192d70f532980785368d2858f8c24df4ddd08f93
1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "basic-block.h"
28 #include "tree-pretty-print.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "gimple.h"
33 #include "tree-dump.h"
34 #include "timevar.h"
35 #include "fibheap.h"
36 #include "hashtab.h"
37 #include "tree-iterator.h"
38 #include "alloc-pool.h"
39 #include "tree-pass.h"
40 #include "flags.h"
41 #include "bitmap.h"
42 #include "langhooks.h"
43 #include "cfgloop.h"
44 #include "params.h"
45 #include "tree-ssa-propagate.h"
46 #include "tree-ssa-sccvn.h"
48 /* This algorithm is based on the SCC algorithm presented by Keith
49 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
50 (http://citeseer.ist.psu.edu/41805.html). In
51 straight line code, it is equivalent to a regular hash based value
52 numbering that is performed in reverse postorder.
54 For code with cycles, there are two alternatives, both of which
55 require keeping the hashtables separate from the actual list of
56 value numbers for SSA names.
58 1. Iterate value numbering in an RPO walk of the blocks, removing
59 all the entries from the hashtable after each iteration (but
60 keeping the SSA name->value number mapping between iterations).
61 Iterate until it does not change.
63 2. Perform value numbering as part of an SCC walk on the SSA graph,
64 iterating only the cycles in the SSA graph until they do not change
65 (using a separate, optimistic hashtable for value numbering the SCC
66 operands).
68 The second is not just faster in practice (because most SSA graph
69 cycles do not involve all the variables in the graph), it also has
70 some nice properties.
72 One of these nice properties is that when we pop an SCC off the
73 stack, we are guaranteed to have processed all the operands coming from
74 *outside of that SCC*, so we do not need to do anything special to
75 ensure they have value numbers.
77 Another nice property is that the SCC walk is done as part of a DFS
78 of the SSA graph, which makes it easy to perform combining and
79 simplifying operations at the same time.
81 The code below is deliberately written in a way that makes it easy
82 to separate the SCC walk from the other work it does.
84 In order to propagate constants through the code, we track which
85 expressions contain constants, and use those while folding. In
86 theory, we could also track expressions whose value numbers are
87 replaced, in case we end up folding based on expression
88 identities.
90 In order to value number memory, we assign value numbers to vuses.
91 This enables us to note that, for example, stores to the same
92 address of the same value from the same starting memory states are
93 equivalent.
94 TODO:
96 1. We can iterate only the changing portions of the SCC's, but
97 I have not seen an SCC big enough for this to be a win.
98 2. If you differentiate between phi nodes for loops and phi nodes
99 for if-then-else, you can properly consider phi nodes in different
100 blocks for equivalence.
101 3. We could value number vuses in more cases, particularly, whole
102 structure copies.
105 /* The set of hashtables and alloc_pool's for their items. */
107 typedef struct vn_tables_s
109 htab_t nary;
110 htab_t phis;
111 htab_t references;
112 struct obstack nary_obstack;
113 alloc_pool phis_pool;
114 alloc_pool references_pool;
115 } *vn_tables_t;
117 static htab_t constant_to_value_id;
118 static bitmap constant_value_ids;
121 /* Valid hashtables storing information we have proven to be
122 correct. */
124 static vn_tables_t valid_info;
126 /* Optimistic hashtables storing information we are making assumptions about
127 during iterations. */
129 static vn_tables_t optimistic_info;
131 /* Pointer to the set of hashtables that is currently being used.
132 Should always point to either the optimistic_info, or the
133 valid_info. */
135 static vn_tables_t current_info;
138 /* Reverse post order index for each basic block. */
140 static int *rpo_numbers;
142 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
144 /* This represents the top of the VN lattice, which is the universal
145 value. */
147 tree VN_TOP;
149 /* Unique counter for our value ids. */
151 static unsigned int next_value_id;
153 /* Next DFS number and the stack for strongly connected component
154 detection. */
156 static unsigned int next_dfs_num;
157 static VEC (tree, heap) *sccstack;
160 DEF_VEC_P(vn_ssa_aux_t);
161 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
163 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
164 are allocated on an obstack for locality reasons, and to free them
165 without looping over the VEC. */
167 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
168 static struct obstack vn_ssa_aux_obstack;
170 /* Return the value numbering information for a given SSA name. */
172 vn_ssa_aux_t
173 VN_INFO (tree name)
175 vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
176 SSA_NAME_VERSION (name));
177 gcc_checking_assert (res);
178 return res;
181 /* Set the value numbering info for a given SSA name to a given
182 value. */
184 static inline void
185 VN_INFO_SET (tree name, vn_ssa_aux_t value)
187 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
188 SSA_NAME_VERSION (name), value);
191 /* Initialize the value numbering info for a given SSA name.
192 This should be called just once for every SSA name. */
194 vn_ssa_aux_t
195 VN_INFO_GET (tree name)
197 vn_ssa_aux_t newinfo;
199 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
200 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
201 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
202 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
203 SSA_NAME_VERSION (name) + 1);
204 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
205 SSA_NAME_VERSION (name), newinfo);
206 return newinfo;
210 /* Get the representative expression for the SSA_NAME NAME. Returns
211 the representative SSA_NAME if there is no expression associated with it. */
213 tree
214 vn_get_expr_for (tree name)
216 vn_ssa_aux_t vn = VN_INFO (name);
217 gimple def_stmt;
218 tree expr = NULL_TREE;
220 if (vn->valnum == VN_TOP)
221 return name;
223 /* If the value-number is a constant it is the representative
224 expression. */
225 if (TREE_CODE (vn->valnum) != SSA_NAME)
226 return vn->valnum;
228 /* Get to the information of the value of this SSA_NAME. */
229 vn = VN_INFO (vn->valnum);
231 /* If the value-number is a constant it is the representative
232 expression. */
233 if (TREE_CODE (vn->valnum) != SSA_NAME)
234 return vn->valnum;
236 /* Else if we have an expression, return it. */
237 if (vn->expr != NULL_TREE)
238 return vn->expr;
240 /* Otherwise use the defining statement to build the expression. */
241 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
243 /* If the value number is a default-definition or a PHI result
244 use it directly. */
245 if (gimple_nop_p (def_stmt)
246 || gimple_code (def_stmt) == GIMPLE_PHI)
247 return vn->valnum;
249 if (!is_gimple_assign (def_stmt))
250 return vn->valnum;
252 /* FIXME tuples. This is incomplete and likely will miss some
253 simplifications. */
254 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)))
256 case tcc_reference:
257 if ((gimple_assign_rhs_code (def_stmt) == VIEW_CONVERT_EXPR
258 || gimple_assign_rhs_code (def_stmt) == REALPART_EXPR
259 || gimple_assign_rhs_code (def_stmt) == IMAGPART_EXPR)
260 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
261 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
262 gimple_expr_type (def_stmt),
263 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
264 break;
266 case tcc_unary:
267 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
268 gimple_expr_type (def_stmt),
269 gimple_assign_rhs1 (def_stmt));
270 break;
272 case tcc_binary:
273 expr = fold_build2 (gimple_assign_rhs_code (def_stmt),
274 gimple_expr_type (def_stmt),
275 gimple_assign_rhs1 (def_stmt),
276 gimple_assign_rhs2 (def_stmt));
277 break;
279 default:;
281 if (expr == NULL_TREE)
282 return vn->valnum;
284 /* Cache the expression. */
285 vn->expr = expr;
287 return expr;
291 /* Free a phi operation structure VP. */
293 static void
294 free_phi (void *vp)
296 vn_phi_t phi = (vn_phi_t) vp;
297 VEC_free (tree, heap, phi->phiargs);
300 /* Free a reference operation structure VP. */
302 static void
303 free_reference (void *vp)
305 vn_reference_t vr = (vn_reference_t) vp;
306 VEC_free (vn_reference_op_s, heap, vr->operands);
309 /* Hash table equality function for vn_constant_t. */
311 static int
312 vn_constant_eq (const void *p1, const void *p2)
314 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
315 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
317 if (vc1->hashcode != vc2->hashcode)
318 return false;
320 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
323 /* Hash table hash function for vn_constant_t. */
325 static hashval_t
326 vn_constant_hash (const void *p1)
328 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
329 return vc1->hashcode;
332 /* Lookup a value id for CONSTANT and return it. If it does not
333 exist returns 0. */
335 unsigned int
336 get_constant_value_id (tree constant)
338 void **slot;
339 struct vn_constant_s vc;
341 vc.hashcode = vn_hash_constant_with_type (constant);
342 vc.constant = constant;
343 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
344 vc.hashcode, NO_INSERT);
345 if (slot)
346 return ((vn_constant_t)*slot)->value_id;
347 return 0;
350 /* Lookup a value id for CONSTANT, and if it does not exist, create a
351 new one and return it. If it does exist, return it. */
353 unsigned int
354 get_or_alloc_constant_value_id (tree constant)
356 void **slot;
357 struct vn_constant_s vc;
358 vn_constant_t vcp;
360 vc.hashcode = vn_hash_constant_with_type (constant);
361 vc.constant = constant;
362 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
363 vc.hashcode, INSERT);
364 if (*slot)
365 return ((vn_constant_t)*slot)->value_id;
367 vcp = XNEW (struct vn_constant_s);
368 vcp->hashcode = vc.hashcode;
369 vcp->constant = constant;
370 vcp->value_id = get_next_value_id ();
371 *slot = (void *) vcp;
372 bitmap_set_bit (constant_value_ids, vcp->value_id);
373 return vcp->value_id;
376 /* Return true if V is a value id for a constant. */
378 bool
379 value_id_constant_p (unsigned int v)
381 return bitmap_bit_p (constant_value_ids, v);
384 /* Compare two reference operands P1 and P2 for equality. Return true if
385 they are equal, and false otherwise. */
387 static int
388 vn_reference_op_eq (const void *p1, const void *p2)
390 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
391 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
393 return vro1->opcode == vro2->opcode
394 && types_compatible_p (vro1->type, vro2->type)
395 && expressions_equal_p (vro1->op0, vro2->op0)
396 && expressions_equal_p (vro1->op1, vro2->op1)
397 && expressions_equal_p (vro1->op2, vro2->op2);
400 /* Compute the hash for a reference operand VRO1. */
402 static hashval_t
403 vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result)
405 result = iterative_hash_hashval_t (vro1->opcode, result);
406 if (vro1->op0)
407 result = iterative_hash_expr (vro1->op0, result);
408 if (vro1->op1)
409 result = iterative_hash_expr (vro1->op1, result);
410 if (vro1->op2)
411 result = iterative_hash_expr (vro1->op2, result);
412 return result;
415 /* Return the hashcode for a given reference operation P1. */
417 static hashval_t
418 vn_reference_hash (const void *p1)
420 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
421 return vr1->hashcode;
424 /* Compute a hash for the reference operation VR1 and return it. */
426 hashval_t
427 vn_reference_compute_hash (const vn_reference_t vr1)
429 hashval_t result = 0;
430 int i;
431 vn_reference_op_t vro;
432 HOST_WIDE_INT off = -1;
433 bool deref = false;
435 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
437 if (vro->opcode == MEM_REF)
438 deref = true;
439 else if (vro->opcode != ADDR_EXPR)
440 deref = false;
441 if (vro->off != -1)
443 if (off == -1)
444 off = 0;
445 off += vro->off;
447 else
449 if (off != -1
450 && off != 0)
451 result = iterative_hash_hashval_t (off, result);
452 off = -1;
453 if (deref
454 && vro->opcode == ADDR_EXPR)
456 if (vro->op0)
458 tree op = TREE_OPERAND (vro->op0, 0);
459 result = iterative_hash_hashval_t (TREE_CODE (op), result);
460 result = iterative_hash_expr (op, result);
463 else
464 result = vn_reference_op_compute_hash (vro, result);
467 if (vr1->vuse)
468 result += SSA_NAME_VERSION (vr1->vuse);
470 return result;
473 /* Return true if reference operations P1 and P2 are equivalent. This
474 means they have the same set of operands and vuses. */
477 vn_reference_eq (const void *p1, const void *p2)
479 unsigned i, j;
481 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
482 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
483 if (vr1->hashcode != vr2->hashcode)
484 return false;
486 /* Early out if this is not a hash collision. */
487 if (vr1->hashcode != vr2->hashcode)
488 return false;
490 /* The VOP needs to be the same. */
491 if (vr1->vuse != vr2->vuse)
492 return false;
494 /* If the operands are the same we are done. */
495 if (vr1->operands == vr2->operands)
496 return true;
498 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
499 return false;
501 i = 0;
502 j = 0;
505 HOST_WIDE_INT off1 = 0, off2 = 0;
506 vn_reference_op_t vro1, vro2;
507 vn_reference_op_s tem1, tem2;
508 bool deref1 = false, deref2 = false;
509 for (; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro1); i++)
511 if (vro1->opcode == MEM_REF)
512 deref1 = true;
513 if (vro1->off == -1)
514 break;
515 off1 += vro1->off;
517 for (; VEC_iterate (vn_reference_op_s, vr2->operands, j, vro2); j++)
519 if (vro2->opcode == MEM_REF)
520 deref2 = true;
521 if (vro2->off == -1)
522 break;
523 off2 += vro2->off;
525 if (off1 != off2)
526 return false;
527 if (deref1 && vro1->opcode == ADDR_EXPR)
529 memset (&tem1, 0, sizeof (tem1));
530 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
531 tem1.type = TREE_TYPE (tem1.op0);
532 tem1.opcode = TREE_CODE (tem1.op0);
533 vro1 = &tem1;
535 if (deref2 && vro2->opcode == ADDR_EXPR)
537 memset (&tem2, 0, sizeof (tem2));
538 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
539 tem2.type = TREE_TYPE (tem2.op0);
540 tem2.opcode = TREE_CODE (tem2.op0);
541 vro2 = &tem2;
543 if (!vn_reference_op_eq (vro1, vro2))
544 return false;
545 ++j;
546 ++i;
548 while (VEC_length (vn_reference_op_s, vr1->operands) != i
549 || VEC_length (vn_reference_op_s, vr2->operands) != j);
551 return true;
554 /* Copy the operations present in load/store REF into RESULT, a vector of
555 vn_reference_op_s's. */
557 void
558 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
560 if (TREE_CODE (ref) == TARGET_MEM_REF)
562 vn_reference_op_s temp;
563 tree base;
565 base = TMR_SYMBOL (ref) ? TMR_SYMBOL (ref) : TMR_BASE (ref);
566 if (!base)
567 base = build_int_cst (ptr_type_node, 0);
569 memset (&temp, 0, sizeof (temp));
570 /* We do not care for spurious type qualifications. */
571 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
572 temp.opcode = TREE_CODE (ref);
573 temp.op0 = TMR_INDEX (ref);
574 temp.op1 = TMR_STEP (ref);
575 temp.op2 = TMR_OFFSET (ref);
576 temp.off = -1;
577 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
579 memset (&temp, 0, sizeof (temp));
580 temp.type = NULL_TREE;
581 temp.opcode = TREE_CODE (base);
582 temp.op0 = base;
583 temp.op1 = TMR_ORIGINAL (ref);
584 temp.off = -1;
585 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
586 return;
589 /* For non-calls, store the information that makes up the address. */
591 while (ref)
593 vn_reference_op_s temp;
595 memset (&temp, 0, sizeof (temp));
596 /* We do not care for spurious type qualifications. */
597 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
598 temp.opcode = TREE_CODE (ref);
599 temp.off = -1;
601 switch (temp.opcode)
603 case MISALIGNED_INDIRECT_REF:
604 temp.op0 = TREE_OPERAND (ref, 1);
605 break;
606 case MEM_REF:
607 /* The base address gets its own vn_reference_op_s structure. */
608 temp.op0 = TREE_OPERAND (ref, 1);
609 if (host_integerp (TREE_OPERAND (ref, 1), 0))
610 temp.off = TREE_INT_CST_LOW (TREE_OPERAND (ref, 1));
611 break;
612 case BIT_FIELD_REF:
613 /* Record bits and position. */
614 temp.op0 = TREE_OPERAND (ref, 1);
615 temp.op1 = TREE_OPERAND (ref, 2);
616 break;
617 case COMPONENT_REF:
618 /* The field decl is enough to unambiguously specify the field,
619 a matching type is not necessary and a mismatching type
620 is always a spurious difference. */
621 temp.type = NULL_TREE;
622 temp.op0 = TREE_OPERAND (ref, 1);
623 temp.op1 = TREE_OPERAND (ref, 2);
625 tree this_offset = component_ref_field_offset (ref);
626 if (this_offset
627 && TREE_CODE (this_offset) == INTEGER_CST)
629 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
630 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
632 double_int off
633 = double_int_add (tree_to_double_int (this_offset),
634 double_int_sdiv
635 (tree_to_double_int (bit_offset),
636 uhwi_to_double_int (BITS_PER_UNIT),
637 TRUNC_DIV_EXPR));
638 if (double_int_fits_in_shwi_p (off))
639 temp.off = off.low;
643 break;
644 case ARRAY_RANGE_REF:
645 case ARRAY_REF:
646 /* Record index as operand. */
647 temp.op0 = TREE_OPERAND (ref, 1);
648 /* Always record lower bounds and element size. */
649 temp.op1 = array_ref_low_bound (ref);
650 temp.op2 = array_ref_element_size (ref);
651 if (TREE_CODE (temp.op0) == INTEGER_CST
652 && TREE_CODE (temp.op1) == INTEGER_CST
653 && TREE_CODE (temp.op2) == INTEGER_CST)
655 double_int off = tree_to_double_int (temp.op0);
656 off = double_int_add (off,
657 double_int_neg
658 (tree_to_double_int (temp.op1)));
659 off = double_int_mul (off, tree_to_double_int (temp.op2));
660 if (double_int_fits_in_shwi_p (off))
661 temp.off = off.low;
663 break;
664 case STRING_CST:
665 case INTEGER_CST:
666 case COMPLEX_CST:
667 case VECTOR_CST:
668 case REAL_CST:
669 case CONSTRUCTOR:
670 case VAR_DECL:
671 case PARM_DECL:
672 case CONST_DECL:
673 case RESULT_DECL:
674 case SSA_NAME:
675 temp.op0 = ref;
676 break;
677 case ADDR_EXPR:
678 if (is_gimple_min_invariant (ref))
680 temp.op0 = ref;
681 break;
683 /* Fallthrough. */
684 /* These are only interesting for their operands, their
685 existence, and their type. They will never be the last
686 ref in the chain of references (IE they require an
687 operand), so we don't have to put anything
688 for op* as it will be handled by the iteration */
689 case REALPART_EXPR:
690 case VIEW_CONVERT_EXPR:
691 temp.off = 0;
692 break;
693 case IMAGPART_EXPR:
694 /* This is only interesting for its constant offset. */
695 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
696 break;
697 default:
698 gcc_unreachable ();
700 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
702 if (REFERENCE_CLASS_P (ref)
703 || (TREE_CODE (ref) == ADDR_EXPR
704 && !is_gimple_min_invariant (ref)))
705 ref = TREE_OPERAND (ref, 0);
706 else
707 ref = NULL_TREE;
711 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
712 operands in *OPS, the reference alias set SET and the reference type TYPE.
713 Return true if something useful was produced. */
715 bool
716 ao_ref_init_from_vn_reference (ao_ref *ref,
717 alias_set_type set, tree type,
718 VEC (vn_reference_op_s, heap) *ops)
720 vn_reference_op_t op;
721 unsigned i;
722 tree base = NULL_TREE;
723 tree *op0_p = &base;
724 HOST_WIDE_INT offset = 0;
725 HOST_WIDE_INT max_size;
726 HOST_WIDE_INT size = -1;
727 tree size_tree = NULL_TREE;
728 alias_set_type base_alias_set = -1;
730 /* First get the final access size from just the outermost expression. */
731 op = VEC_index (vn_reference_op_s, ops, 0);
732 if (op->opcode == COMPONENT_REF)
733 size_tree = DECL_SIZE (op->op0);
734 else if (op->opcode == BIT_FIELD_REF)
735 size_tree = op->op0;
736 else
738 enum machine_mode mode = TYPE_MODE (type);
739 if (mode == BLKmode)
740 size_tree = TYPE_SIZE (type);
741 else
742 size = GET_MODE_BITSIZE (mode);
744 if (size_tree != NULL_TREE)
746 if (!host_integerp (size_tree, 1))
747 size = -1;
748 else
749 size = TREE_INT_CST_LOW (size_tree);
752 /* Initially, maxsize is the same as the accessed element size.
753 In the following it will only grow (or become -1). */
754 max_size = size;
756 /* Compute cumulative bit-offset for nested component-refs and array-refs,
757 and find the ultimate containing object. */
758 for (i = 0; VEC_iterate (vn_reference_op_s, ops, i, op); ++i)
760 switch (op->opcode)
762 /* These may be in the reference ops, but we cannot do anything
763 sensible with them here. */
764 case ADDR_EXPR:
765 /* Apart from ADDR_EXPR arguments to MEM_REF. */
766 if (base != NULL_TREE
767 && TREE_CODE (base) == MEM_REF
768 && op->op0
769 && DECL_P (TREE_OPERAND (op->op0, 0)))
771 vn_reference_op_t pop = VEC_index (vn_reference_op_s, ops, i-1);
772 base = TREE_OPERAND (op->op0, 0);
773 if (pop->off == -1)
775 max_size = -1;
776 offset = 0;
778 else
779 offset += pop->off * BITS_PER_UNIT;
780 op0_p = NULL;
781 break;
783 /* Fallthru. */
784 case CALL_EXPR:
785 return false;
787 /* Record the base objects. */
788 case MISALIGNED_INDIRECT_REF:
789 *op0_p = build2 (MISALIGNED_INDIRECT_REF, op->type,
790 NULL_TREE, op->op0);
791 op0_p = &TREE_OPERAND (*op0_p, 0);
792 break;
794 case MEM_REF:
795 base_alias_set = get_deref_alias_set (op->op0);
796 *op0_p = build2 (MEM_REF, op->type,
797 NULL_TREE, op->op0);
798 op0_p = &TREE_OPERAND (*op0_p, 0);
799 break;
801 case VAR_DECL:
802 case PARM_DECL:
803 case RESULT_DECL:
804 case SSA_NAME:
805 *op0_p = op->op0;
806 op0_p = NULL;
807 break;
809 /* And now the usual component-reference style ops. */
810 case BIT_FIELD_REF:
811 offset += tree_low_cst (op->op1, 0);
812 break;
814 case COMPONENT_REF:
816 tree field = op->op0;
817 /* We do not have a complete COMPONENT_REF tree here so we
818 cannot use component_ref_field_offset. Do the interesting
819 parts manually. */
821 if (op->op1
822 || !host_integerp (DECL_FIELD_OFFSET (field), 1))
823 max_size = -1;
824 else
826 offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
827 * BITS_PER_UNIT);
828 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
830 break;
833 case ARRAY_RANGE_REF:
834 case ARRAY_REF:
835 /* We recorded the lower bound and the element size. */
836 if (!host_integerp (op->op0, 0)
837 || !host_integerp (op->op1, 0)
838 || !host_integerp (op->op2, 0))
839 max_size = -1;
840 else
842 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
843 hindex -= TREE_INT_CST_LOW (op->op1);
844 hindex *= TREE_INT_CST_LOW (op->op2);
845 hindex *= BITS_PER_UNIT;
846 offset += hindex;
848 break;
850 case REALPART_EXPR:
851 break;
853 case IMAGPART_EXPR:
854 offset += size;
855 break;
857 case VIEW_CONVERT_EXPR:
858 break;
860 case STRING_CST:
861 case INTEGER_CST:
862 case COMPLEX_CST:
863 case VECTOR_CST:
864 case REAL_CST:
865 case CONSTRUCTOR:
866 case CONST_DECL:
867 return false;
869 default:
870 return false;
874 if (base == NULL_TREE)
875 return false;
877 ref->ref = NULL_TREE;
878 ref->base = base;
879 ref->offset = offset;
880 ref->size = size;
881 ref->max_size = max_size;
882 ref->ref_alias_set = set;
883 if (base_alias_set != -1)
884 ref->base_alias_set = base_alias_set;
885 else
886 ref->base_alias_set = get_alias_set (base);
888 return true;
891 /* Copy the operations present in load/store/call REF into RESULT, a vector of
892 vn_reference_op_s's. */
894 void
895 copy_reference_ops_from_call (gimple call,
896 VEC(vn_reference_op_s, heap) **result)
898 vn_reference_op_s temp;
899 unsigned i;
901 /* Copy the type, opcode, function being called and static chain. */
902 memset (&temp, 0, sizeof (temp));
903 temp.type = gimple_call_return_type (call);
904 temp.opcode = CALL_EXPR;
905 temp.op0 = gimple_call_fn (call);
906 temp.op1 = gimple_call_chain (call);
907 temp.off = -1;
908 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
910 /* Copy the call arguments. As they can be references as well,
911 just chain them together. */
912 for (i = 0; i < gimple_call_num_args (call); ++i)
914 tree callarg = gimple_call_arg (call, i);
915 copy_reference_ops_from_ref (callarg, result);
919 /* Create a vector of vn_reference_op_s structures from REF, a
920 REFERENCE_CLASS_P tree. The vector is not shared. */
922 static VEC(vn_reference_op_s, heap) *
923 create_reference_ops_from_ref (tree ref)
925 VEC (vn_reference_op_s, heap) *result = NULL;
927 copy_reference_ops_from_ref (ref, &result);
928 return result;
931 /* Create a vector of vn_reference_op_s structures from CALL, a
932 call statement. The vector is not shared. */
934 static VEC(vn_reference_op_s, heap) *
935 create_reference_ops_from_call (gimple call)
937 VEC (vn_reference_op_s, heap) *result = NULL;
939 copy_reference_ops_from_call (call, &result);
940 return result;
943 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
944 *I_P to point to the last element of the replacement. */
945 void
946 vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
947 unsigned int *i_p)
949 unsigned int i = *i_p;
950 vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
951 vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
952 tree addr_base;
953 HOST_WIDE_INT addr_offset;
955 /* The only thing we have to do is from &OBJ.foo.bar add the offset
956 from .foo.bar to the preceeding MEM_REF offset and replace the
957 address with &OBJ. */
958 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
959 &addr_offset);
960 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
961 if (addr_base != op->op0)
963 double_int off = tree_to_double_int (mem_op->op0);
964 off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
965 off = double_int_add (off, shwi_to_double_int (addr_offset));
966 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
967 op->op0 = build_fold_addr_expr (addr_base);
968 if (host_integerp (mem_op->op0, 0))
969 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
970 else
971 mem_op->off = -1;
975 /* Optimize the reference REF to a constant if possible or return
976 NULL_TREE if not. */
978 tree
979 fully_constant_vn_reference_p (vn_reference_t ref)
981 VEC (vn_reference_op_s, heap) *operands = ref->operands;
982 vn_reference_op_t op;
984 /* Try to simplify the translated expression if it is
985 a call to a builtin function with at most two arguments. */
986 op = VEC_index (vn_reference_op_s, operands, 0);
987 if (op->opcode == CALL_EXPR
988 && TREE_CODE (op->op0) == ADDR_EXPR
989 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
990 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
991 && VEC_length (vn_reference_op_s, operands) >= 2
992 && VEC_length (vn_reference_op_s, operands) <= 3)
994 vn_reference_op_t arg0, arg1 = NULL;
995 bool anyconst = false;
996 arg0 = VEC_index (vn_reference_op_s, operands, 1);
997 if (VEC_length (vn_reference_op_s, operands) > 2)
998 arg1 = VEC_index (vn_reference_op_s, operands, 2);
999 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1000 || (arg0->opcode == ADDR_EXPR
1001 && is_gimple_min_invariant (arg0->op0)))
1002 anyconst = true;
1003 if (arg1
1004 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1005 || (arg1->opcode == ADDR_EXPR
1006 && is_gimple_min_invariant (arg1->op0))))
1007 anyconst = true;
1008 if (anyconst)
1010 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1011 arg1 ? 2 : 1,
1012 arg0->op0,
1013 arg1 ? arg1->op0 : NULL);
1014 if (folded
1015 && TREE_CODE (folded) == NOP_EXPR)
1016 folded = TREE_OPERAND (folded, 0);
1017 if (folded
1018 && is_gimple_min_invariant (folded))
1019 return folded;
1023 /* Simplify reads from constant strings. */
1024 else if (op->opcode == ARRAY_REF
1025 && TREE_CODE (op->op0) == INTEGER_CST
1026 && integer_zerop (op->op1)
1027 && VEC_length (vn_reference_op_s, operands) == 2)
1029 vn_reference_op_t arg0;
1030 arg0 = VEC_index (vn_reference_op_s, operands, 1);
1031 if (arg0->opcode == STRING_CST
1032 && (TYPE_MODE (op->type)
1033 == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
1034 && GET_MODE_CLASS (TYPE_MODE (op->type)) == MODE_INT
1035 && GET_MODE_SIZE (TYPE_MODE (op->type)) == 1
1036 && compare_tree_int (op->op0, TREE_STRING_LENGTH (arg0->op0)) < 0)
1037 return build_int_cst_type (op->type,
1038 (TREE_STRING_POINTER (arg0->op0)
1039 [TREE_INT_CST_LOW (op->op0)]));
1042 return NULL_TREE;
1045 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1046 structures into their value numbers. This is done in-place, and
1047 the vector passed in is returned. */
1049 static VEC (vn_reference_op_s, heap) *
1050 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
1052 vn_reference_op_t vro;
1053 unsigned int i;
1055 for (i = 0; VEC_iterate (vn_reference_op_s, orig, i, vro); i++)
1057 if (vro->opcode == SSA_NAME
1058 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1060 vro->op0 = SSA_VAL (vro->op0);
1061 /* If it transforms from an SSA_NAME to a constant, update
1062 the opcode. */
1063 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1064 vro->opcode = TREE_CODE (vro->op0);
1066 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1067 vro->op1 = SSA_VAL (vro->op1);
1068 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1069 vro->op2 = SSA_VAL (vro->op2);
1070 /* If it transforms from an SSA_NAME to an address, fold with
1071 a preceding indirect reference. */
1072 if (i > 0
1073 && vro->op0
1074 && TREE_CODE (vro->op0) == ADDR_EXPR
1075 && VEC_index (vn_reference_op_s,
1076 orig, i - 1)->opcode == MEM_REF)
1077 vn_reference_fold_indirect (&orig, &i);
1078 /* If it transforms a non-constant ARRAY_REF into a constant
1079 one, adjust the constant offset. */
1080 else if (vro->opcode == ARRAY_REF
1081 && vro->off == -1
1082 && TREE_CODE (vro->op0) == INTEGER_CST
1083 && TREE_CODE (vro->op1) == INTEGER_CST
1084 && TREE_CODE (vro->op2) == INTEGER_CST)
1086 double_int off = tree_to_double_int (vro->op0);
1087 off = double_int_add (off,
1088 double_int_neg
1089 (tree_to_double_int (vro->op1)));
1090 off = double_int_mul (off, tree_to_double_int (vro->op2));
1091 if (double_int_fits_in_shwi_p (off))
1092 vro->off = off.low;
1096 return orig;
1099 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
1101 /* Create a vector of vn_reference_op_s structures from REF, a
1102 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1103 this function. */
1105 static VEC(vn_reference_op_s, heap) *
1106 valueize_shared_reference_ops_from_ref (tree ref)
1108 if (!ref)
1109 return NULL;
1110 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1111 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1112 shared_lookup_references = valueize_refs (shared_lookup_references);
1113 return shared_lookup_references;
1116 /* Create a vector of vn_reference_op_s structures from CALL, a
1117 call statement. The vector is shared among all callers of
1118 this function. */
1120 static VEC(vn_reference_op_s, heap) *
1121 valueize_shared_reference_ops_from_call (gimple call)
1123 if (!call)
1124 return NULL;
1125 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1126 copy_reference_ops_from_call (call, &shared_lookup_references);
1127 shared_lookup_references = valueize_refs (shared_lookup_references);
1128 return shared_lookup_references;
1131 /* Lookup a SCCVN reference operation VR in the current hash table.
1132 Returns the resulting value number if it exists in the hash table,
1133 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1134 vn_reference_t stored in the hashtable if something is found. */
1136 static tree
1137 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1139 void **slot;
1140 hashval_t hash;
1142 hash = vr->hashcode;
1143 slot = htab_find_slot_with_hash (current_info->references, vr,
1144 hash, NO_INSERT);
1145 if (!slot && current_info == optimistic_info)
1146 slot = htab_find_slot_with_hash (valid_info->references, vr,
1147 hash, NO_INSERT);
1148 if (slot)
1150 if (vnresult)
1151 *vnresult = (vn_reference_t)*slot;
1152 return ((vn_reference_t)*slot)->result;
1155 return NULL_TREE;
1158 static tree *last_vuse_ptr;
1160 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1161 with the current VUSE and performs the expression lookup. */
1163 static void *
1164 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
1166 vn_reference_t vr = (vn_reference_t)vr_;
1167 void **slot;
1168 hashval_t hash;
1170 if (last_vuse_ptr)
1171 *last_vuse_ptr = vuse;
1173 /* Fixup vuse and hash. */
1174 if (vr->vuse)
1175 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1176 vr->vuse = SSA_VAL (vuse);
1177 if (vr->vuse)
1178 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1180 hash = vr->hashcode;
1181 slot = htab_find_slot_with_hash (current_info->references, vr,
1182 hash, NO_INSERT);
1183 if (!slot && current_info == optimistic_info)
1184 slot = htab_find_slot_with_hash (valid_info->references, vr,
1185 hash, NO_INSERT);
1186 if (slot)
1187 return *slot;
1189 return NULL;
1192 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1193 from the statement defining VUSE and if not successful tries to
1194 translate *REFP and VR_ through an aggregate copy at the defintion
1195 of VUSE. */
1197 static void *
1198 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
1200 vn_reference_t vr = (vn_reference_t)vr_;
1201 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1202 tree fndecl;
1203 tree base;
1204 HOST_WIDE_INT offset, maxsize;
1206 /* First try to disambiguate after value-replacing in the definitions LHS. */
1207 if (is_gimple_assign (def_stmt))
1209 tree lhs = gimple_assign_lhs (def_stmt);
1210 ao_ref ref1;
1211 VEC (vn_reference_op_s, heap) *operands = NULL;
1212 bool res = true;
1213 copy_reference_ops_from_ref (lhs, &operands);
1214 operands = valueize_refs (operands);
1215 if (ao_ref_init_from_vn_reference (&ref1, get_alias_set (lhs),
1216 TREE_TYPE (lhs), operands))
1217 res = refs_may_alias_p_1 (ref, &ref1, true);
1218 VEC_free (vn_reference_op_s, heap, operands);
1219 if (!res)
1220 return NULL;
1223 base = ao_ref_base (ref);
1224 offset = ref->offset;
1225 maxsize = ref->max_size;
1227 /* If we cannot constrain the size of the reference we cannot
1228 test if anything kills it. */
1229 if (maxsize == -1)
1230 return (void *)-1;
1232 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1233 from that defintion.
1234 1) Memset. */
1235 if (is_gimple_reg_type (vr->type)
1236 && is_gimple_call (def_stmt)
1237 && (fndecl = gimple_call_fndecl (def_stmt))
1238 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1239 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
1240 && integer_zerop (gimple_call_arg (def_stmt, 1))
1241 && host_integerp (gimple_call_arg (def_stmt, 2), 1)
1242 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1244 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1245 tree base2;
1246 HOST_WIDE_INT offset2, size2, maxsize2;
1247 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1248 size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
1249 if ((unsigned HOST_WIDE_INT)size2 / 8
1250 == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
1251 && operand_equal_p (base, base2, 0)
1252 && offset2 <= offset
1253 && offset2 + size2 >= offset + maxsize)
1255 tree val = fold_convert (vr->type, integer_zero_node);
1256 unsigned int value_id = get_or_alloc_constant_value_id (val);
1257 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1258 VEC_copy (vn_reference_op_s,
1259 heap, vr->operands),
1260 val, value_id);
1264 /* 2) Assignment from an empty CONSTRUCTOR. */
1265 else if (is_gimple_reg_type (vr->type)
1266 && gimple_assign_single_p (def_stmt)
1267 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1268 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1270 tree base2;
1271 HOST_WIDE_INT offset2, size2, maxsize2;
1272 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1273 &offset2, &size2, &maxsize2);
1274 if (operand_equal_p (base, base2, 0)
1275 && offset2 <= offset
1276 && offset2 + size2 >= offset + maxsize)
1278 tree val = fold_convert (vr->type, integer_zero_node);
1279 unsigned int value_id = get_or_alloc_constant_value_id (val);
1280 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1281 VEC_copy (vn_reference_op_s,
1282 heap, vr->operands),
1283 val, value_id);
1287 /* For aggregate copies translate the reference through them if
1288 the copy kills ref. */
1289 else if (gimple_assign_single_p (def_stmt)
1290 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1291 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1292 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1294 tree base2;
1295 HOST_WIDE_INT offset2, size2, maxsize2;
1296 int i, j;
1297 VEC (vn_reference_op_s, heap) *lhs = NULL, *rhs = NULL;
1298 vn_reference_op_t vro;
1299 ao_ref r;
1301 /* See if the assignment kills REF. */
1302 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1303 &offset2, &size2, &maxsize2);
1304 if (!operand_equal_p (base, base2, 0)
1305 || offset2 > offset
1306 || offset2 + size2 < offset + maxsize)
1307 return (void *)-1;
1309 /* Find the common base of ref and the lhs. */
1310 copy_reference_ops_from_ref (gimple_assign_lhs (def_stmt), &lhs);
1311 i = VEC_length (vn_reference_op_s, vr->operands) - 1;
1312 j = VEC_length (vn_reference_op_s, lhs) - 1;
1313 while (j >= 0 && i >= 0
1314 && vn_reference_op_eq (VEC_index (vn_reference_op_s,
1315 vr->operands, i),
1316 VEC_index (vn_reference_op_s, lhs, j)))
1318 i--;
1319 j--;
1322 VEC_free (vn_reference_op_s, heap, lhs);
1323 /* i now points to the first additional op.
1324 ??? LHS may not be completely contained in VR, one or more
1325 VIEW_CONVERT_EXPRs could be in its way. We could at least
1326 try handling outermost VIEW_CONVERT_EXPRs. */
1327 if (j != -1)
1328 return (void *)-1;
1330 /* Now re-write REF to be based on the rhs of the assignment. */
1331 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1332 /* We need to pre-pend vr->operands[0..i] to rhs. */
1333 if (i + 1 + VEC_length (vn_reference_op_s, rhs)
1334 > VEC_length (vn_reference_op_s, vr->operands))
1336 VEC (vn_reference_op_s, heap) *old = vr->operands;
1337 VEC_safe_grow (vn_reference_op_s, heap, vr->operands,
1338 i + 1 + VEC_length (vn_reference_op_s, rhs));
1339 if (old == shared_lookup_references
1340 && vr->operands != old)
1341 shared_lookup_references = NULL;
1343 else
1344 VEC_truncate (vn_reference_op_s, vr->operands,
1345 i + 1 + VEC_length (vn_reference_op_s, rhs));
1346 for (j = 0; VEC_iterate (vn_reference_op_s, rhs, j, vro); ++j)
1347 VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, vro);
1348 VEC_free (vn_reference_op_s, heap, rhs);
1349 vr->hashcode = vn_reference_compute_hash (vr);
1351 /* Adjust *ref from the new operands. */
1352 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1353 return (void *)-1;
1354 /* This can happen with bitfields. */
1355 if (ref->size != r.size)
1356 return (void *)-1;
1357 *ref = r;
1359 /* Do not update last seen VUSE after translating. */
1360 last_vuse_ptr = NULL;
1362 /* Keep looking for the adjusted *REF / VR pair. */
1363 return NULL;
1366 /* Bail out and stop walking. */
1367 return (void *)-1;
1370 /* Lookup a reference operation by it's parts, in the current hash table.
1371 Returns the resulting value number if it exists in the hash table,
1372 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1373 vn_reference_t stored in the hashtable if something is found. */
1375 tree
1376 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
1377 VEC (vn_reference_op_s, heap) *operands,
1378 vn_reference_t *vnresult, bool maywalk)
1380 struct vn_reference_s vr1;
1381 vn_reference_t tmp;
1382 tree cst;
1384 if (!vnresult)
1385 vnresult = &tmp;
1386 *vnresult = NULL;
1388 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1389 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1390 VEC_safe_grow (vn_reference_op_s, heap, shared_lookup_references,
1391 VEC_length (vn_reference_op_s, operands));
1392 memcpy (VEC_address (vn_reference_op_s, shared_lookup_references),
1393 VEC_address (vn_reference_op_s, operands),
1394 sizeof (vn_reference_op_s)
1395 * VEC_length (vn_reference_op_s, operands));
1396 vr1.operands = operands = shared_lookup_references
1397 = valueize_refs (shared_lookup_references);
1398 vr1.type = type;
1399 vr1.set = set;
1400 vr1.hashcode = vn_reference_compute_hash (&vr1);
1401 if ((cst = fully_constant_vn_reference_p (&vr1)))
1402 return cst;
1404 vn_reference_lookup_1 (&vr1, vnresult);
1405 if (!*vnresult
1406 && maywalk
1407 && vr1.vuse)
1409 ao_ref r;
1410 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
1411 *vnresult =
1412 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1413 vn_reference_lookup_2,
1414 vn_reference_lookup_3, &vr1);
1415 if (vr1.operands != operands)
1416 VEC_free (vn_reference_op_s, heap, vr1.operands);
1419 if (*vnresult)
1420 return (*vnresult)->result;
1422 return NULL_TREE;
1425 /* Lookup OP in the current hash table, and return the resulting value
1426 number if it exists in the hash table. Return NULL_TREE if it does
1427 not exist in the hash table or if the result field of the structure
1428 was NULL.. VNRESULT will be filled in with the vn_reference_t
1429 stored in the hashtable if one exists. */
1431 tree
1432 vn_reference_lookup (tree op, tree vuse, bool maywalk,
1433 vn_reference_t *vnresult)
1435 VEC (vn_reference_op_s, heap) *operands;
1436 struct vn_reference_s vr1;
1437 tree cst;
1439 if (vnresult)
1440 *vnresult = NULL;
1442 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1443 vr1.operands = operands = valueize_shared_reference_ops_from_ref (op);
1444 vr1.type = TREE_TYPE (op);
1445 vr1.set = get_alias_set (op);
1446 vr1.hashcode = vn_reference_compute_hash (&vr1);
1447 if ((cst = fully_constant_vn_reference_p (&vr1)))
1448 return cst;
1450 if (maywalk
1451 && vr1.vuse)
1453 vn_reference_t wvnresult;
1454 ao_ref r;
1455 ao_ref_init (&r, op);
1456 wvnresult =
1457 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1458 vn_reference_lookup_2,
1459 vn_reference_lookup_3, &vr1);
1460 if (vr1.operands != operands)
1461 VEC_free (vn_reference_op_s, heap, vr1.operands);
1462 if (wvnresult)
1464 if (vnresult)
1465 *vnresult = wvnresult;
1466 return wvnresult->result;
1469 return NULL_TREE;
1472 return vn_reference_lookup_1 (&vr1, vnresult);
1476 /* Insert OP into the current hash table with a value number of
1477 RESULT, and return the resulting reference structure we created. */
1479 vn_reference_t
1480 vn_reference_insert (tree op, tree result, tree vuse)
1482 void **slot;
1483 vn_reference_t vr1;
1485 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1486 if (TREE_CODE (result) == SSA_NAME)
1487 vr1->value_id = VN_INFO (result)->value_id;
1488 else
1489 vr1->value_id = get_or_alloc_constant_value_id (result);
1490 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1491 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
1492 vr1->type = TREE_TYPE (op);
1493 vr1->set = get_alias_set (op);
1494 vr1->hashcode = vn_reference_compute_hash (vr1);
1495 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
1497 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1498 INSERT);
1500 /* Because we lookup stores using vuses, and value number failures
1501 using the vdefs (see visit_reference_op_store for how and why),
1502 it's possible that on failure we may try to insert an already
1503 inserted store. This is not wrong, there is no ssa name for a
1504 store that we could use as a differentiator anyway. Thus, unlike
1505 the other lookup functions, you cannot gcc_assert (!*slot)
1506 here. */
1508 /* But free the old slot in case of a collision. */
1509 if (*slot)
1510 free_reference (*slot);
1512 *slot = vr1;
1513 return vr1;
1516 /* Insert a reference by it's pieces into the current hash table with
1517 a value number of RESULT. Return the resulting reference
1518 structure we created. */
1520 vn_reference_t
1521 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
1522 VEC (vn_reference_op_s, heap) *operands,
1523 tree result, unsigned int value_id)
1526 void **slot;
1527 vn_reference_t vr1;
1529 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1530 vr1->value_id = value_id;
1531 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1532 vr1->operands = valueize_refs (operands);
1533 vr1->type = type;
1534 vr1->set = set;
1535 vr1->hashcode = vn_reference_compute_hash (vr1);
1536 if (result && TREE_CODE (result) == SSA_NAME)
1537 result = SSA_VAL (result);
1538 vr1->result = result;
1540 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1541 INSERT);
1543 /* At this point we should have all the things inserted that we have
1544 seen before, and we should never try inserting something that
1545 already exists. */
1546 gcc_assert (!*slot);
1547 if (*slot)
1548 free_reference (*slot);
1550 *slot = vr1;
1551 return vr1;
1554 /* Compute and return the hash value for nary operation VBO1. */
1556 hashval_t
1557 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
1559 hashval_t hash;
1560 unsigned i;
1562 for (i = 0; i < vno1->length; ++i)
1563 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
1564 vno1->op[i] = SSA_VAL (vno1->op[i]);
1566 if (vno1->length == 2
1567 && commutative_tree_code (vno1->opcode)
1568 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
1570 tree temp = vno1->op[0];
1571 vno1->op[0] = vno1->op[1];
1572 vno1->op[1] = temp;
1575 hash = iterative_hash_hashval_t (vno1->opcode, 0);
1576 for (i = 0; i < vno1->length; ++i)
1577 hash = iterative_hash_expr (vno1->op[i], hash);
1579 return hash;
1582 /* Return the computed hashcode for nary operation P1. */
1584 static hashval_t
1585 vn_nary_op_hash (const void *p1)
1587 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1588 return vno1->hashcode;
1591 /* Compare nary operations P1 and P2 and return true if they are
1592 equivalent. */
1595 vn_nary_op_eq (const void *p1, const void *p2)
1597 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1598 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
1599 unsigned i;
1601 if (vno1->hashcode != vno2->hashcode)
1602 return false;
1604 if (vno1->opcode != vno2->opcode
1605 || !types_compatible_p (vno1->type, vno2->type))
1606 return false;
1608 for (i = 0; i < vno1->length; ++i)
1609 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
1610 return false;
1612 return true;
1615 /* Lookup a n-ary operation by its pieces and return the resulting value
1616 number if it exists in the hash table. Return NULL_TREE if it does
1617 not exist in the hash table or if the result field of the operation
1618 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1619 if it exists. */
1621 tree
1622 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
1623 tree type, tree op0, tree op1, tree op2,
1624 tree op3, vn_nary_op_t *vnresult)
1626 void **slot;
1627 struct vn_nary_op_s vno1;
1628 if (vnresult)
1629 *vnresult = NULL;
1630 vno1.opcode = code;
1631 vno1.length = length;
1632 vno1.type = type;
1633 vno1.op[0] = op0;
1634 vno1.op[1] = op1;
1635 vno1.op[2] = op2;
1636 vno1.op[3] = op3;
1637 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1638 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1639 NO_INSERT);
1640 if (!slot && current_info == optimistic_info)
1641 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1642 NO_INSERT);
1643 if (!slot)
1644 return NULL_TREE;
1645 if (vnresult)
1646 *vnresult = (vn_nary_op_t)*slot;
1647 return ((vn_nary_op_t)*slot)->result;
1650 /* Lookup OP in the current hash table, and return the resulting value
1651 number if it exists in the hash table. Return NULL_TREE if it does
1652 not exist in the hash table or if the result field of the operation
1653 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1654 if it exists. */
1656 tree
1657 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
1659 void **slot;
1660 struct vn_nary_op_s vno1;
1661 unsigned i;
1663 if (vnresult)
1664 *vnresult = NULL;
1665 vno1.opcode = TREE_CODE (op);
1666 vno1.length = TREE_CODE_LENGTH (TREE_CODE (op));
1667 vno1.type = TREE_TYPE (op);
1668 for (i = 0; i < vno1.length; ++i)
1669 vno1.op[i] = TREE_OPERAND (op, i);
1670 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1671 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1672 NO_INSERT);
1673 if (!slot && current_info == optimistic_info)
1674 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1675 NO_INSERT);
1676 if (!slot)
1677 return NULL_TREE;
1678 if (vnresult)
1679 *vnresult = (vn_nary_op_t)*slot;
1680 return ((vn_nary_op_t)*slot)->result;
1683 /* Lookup the rhs of STMT in the current hash table, and return the resulting
1684 value number if it exists in the hash table. Return NULL_TREE if
1685 it does not exist in the hash table. VNRESULT will contain the
1686 vn_nary_op_t from the hashtable if it exists. */
1688 tree
1689 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
1691 void **slot;
1692 struct vn_nary_op_s vno1;
1693 unsigned i;
1695 if (vnresult)
1696 *vnresult = NULL;
1697 vno1.opcode = gimple_assign_rhs_code (stmt);
1698 vno1.length = gimple_num_ops (stmt) - 1;
1699 vno1.type = gimple_expr_type (stmt);
1700 for (i = 0; i < vno1.length; ++i)
1701 vno1.op[i] = gimple_op (stmt, i + 1);
1702 if (vno1.opcode == REALPART_EXPR
1703 || vno1.opcode == IMAGPART_EXPR
1704 || vno1.opcode == VIEW_CONVERT_EXPR)
1705 vno1.op[0] = TREE_OPERAND (vno1.op[0], 0);
1706 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1707 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1708 NO_INSERT);
1709 if (!slot && current_info == optimistic_info)
1710 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1711 NO_INSERT);
1712 if (!slot)
1713 return NULL_TREE;
1714 if (vnresult)
1715 *vnresult = (vn_nary_op_t)*slot;
1716 return ((vn_nary_op_t)*slot)->result;
1719 /* Insert a n-ary operation into the current hash table using it's
1720 pieces. Return the vn_nary_op_t structure we created and put in
1721 the hashtable. */
1723 vn_nary_op_t
1724 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
1725 tree type, tree op0,
1726 tree op1, tree op2, tree op3,
1727 tree result,
1728 unsigned int value_id)
1730 void **slot;
1731 vn_nary_op_t vno1;
1733 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1734 (sizeof (struct vn_nary_op_s)
1735 - sizeof (tree) * (4 - length)));
1736 vno1->value_id = value_id;
1737 vno1->opcode = code;
1738 vno1->length = length;
1739 vno1->type = type;
1740 if (length >= 1)
1741 vno1->op[0] = op0;
1742 if (length >= 2)
1743 vno1->op[1] = op1;
1744 if (length >= 3)
1745 vno1->op[2] = op2;
1746 if (length >= 4)
1747 vno1->op[3] = op3;
1748 vno1->result = result;
1749 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1750 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1751 INSERT);
1752 gcc_assert (!*slot);
1754 *slot = vno1;
1755 return vno1;
1759 /* Insert OP into the current hash table with a value number of
1760 RESULT. Return the vn_nary_op_t structure we created and put in
1761 the hashtable. */
1763 vn_nary_op_t
1764 vn_nary_op_insert (tree op, tree result)
1766 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
1767 void **slot;
1768 vn_nary_op_t vno1;
1769 unsigned i;
1771 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1772 (sizeof (struct vn_nary_op_s)
1773 - sizeof (tree) * (4 - length)));
1774 vno1->value_id = VN_INFO (result)->value_id;
1775 vno1->opcode = TREE_CODE (op);
1776 vno1->length = length;
1777 vno1->type = TREE_TYPE (op);
1778 for (i = 0; i < vno1->length; ++i)
1779 vno1->op[i] = TREE_OPERAND (op, i);
1780 vno1->result = result;
1781 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1782 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1783 INSERT);
1784 gcc_assert (!*slot);
1786 *slot = vno1;
1787 return vno1;
1790 /* Insert the rhs of STMT into the current hash table with a value number of
1791 RESULT. */
1793 vn_nary_op_t
1794 vn_nary_op_insert_stmt (gimple stmt, tree result)
1796 unsigned length = gimple_num_ops (stmt) - 1;
1797 void **slot;
1798 vn_nary_op_t vno1;
1799 unsigned i;
1801 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1802 (sizeof (struct vn_nary_op_s)
1803 - sizeof (tree) * (4 - length)));
1804 vno1->value_id = VN_INFO (result)->value_id;
1805 vno1->opcode = gimple_assign_rhs_code (stmt);
1806 vno1->length = length;
1807 vno1->type = gimple_expr_type (stmt);
1808 for (i = 0; i < vno1->length; ++i)
1809 vno1->op[i] = gimple_op (stmt, i + 1);
1810 if (vno1->opcode == REALPART_EXPR
1811 || vno1->opcode == IMAGPART_EXPR
1812 || vno1->opcode == VIEW_CONVERT_EXPR)
1813 vno1->op[0] = TREE_OPERAND (vno1->op[0], 0);
1814 vno1->result = result;
1815 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1816 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1817 INSERT);
1818 gcc_assert (!*slot);
1820 *slot = vno1;
1821 return vno1;
1824 /* Compute a hashcode for PHI operation VP1 and return it. */
1826 static inline hashval_t
1827 vn_phi_compute_hash (vn_phi_t vp1)
1829 hashval_t result;
1830 int i;
1831 tree phi1op;
1832 tree type;
1834 result = vp1->block->index;
1836 /* If all PHI arguments are constants we need to distinguish
1837 the PHI node via its type. */
1838 type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
1839 result += (INTEGRAL_TYPE_P (type)
1840 + (INTEGRAL_TYPE_P (type)
1841 ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
1843 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1845 if (phi1op == VN_TOP)
1846 continue;
1847 result = iterative_hash_expr (phi1op, result);
1850 return result;
1853 /* Return the computed hashcode for phi operation P1. */
1855 static hashval_t
1856 vn_phi_hash (const void *p1)
1858 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1859 return vp1->hashcode;
1862 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
1864 static int
1865 vn_phi_eq (const void *p1, const void *p2)
1867 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1868 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
1870 if (vp1->hashcode != vp2->hashcode)
1871 return false;
1873 if (vp1->block == vp2->block)
1875 int i;
1876 tree phi1op;
1878 /* If the PHI nodes do not have compatible types
1879 they are not the same. */
1880 if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
1881 TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
1882 return false;
1884 /* Any phi in the same block will have it's arguments in the
1885 same edge order, because of how we store phi nodes. */
1886 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1888 tree phi2op = VEC_index (tree, vp2->phiargs, i);
1889 if (phi1op == VN_TOP || phi2op == VN_TOP)
1890 continue;
1891 if (!expressions_equal_p (phi1op, phi2op))
1892 return false;
1894 return true;
1896 return false;
1899 static VEC(tree, heap) *shared_lookup_phiargs;
1901 /* Lookup PHI in the current hash table, and return the resulting
1902 value number if it exists in the hash table. Return NULL_TREE if
1903 it does not exist in the hash table. */
1905 static tree
1906 vn_phi_lookup (gimple phi)
1908 void **slot;
1909 struct vn_phi_s vp1;
1910 unsigned i;
1912 VEC_truncate (tree, shared_lookup_phiargs, 0);
1914 /* Canonicalize the SSA_NAME's to their value number. */
1915 for (i = 0; i < gimple_phi_num_args (phi); i++)
1917 tree def = PHI_ARG_DEF (phi, i);
1918 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1919 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
1921 vp1.phiargs = shared_lookup_phiargs;
1922 vp1.block = gimple_bb (phi);
1923 vp1.hashcode = vn_phi_compute_hash (&vp1);
1924 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
1925 NO_INSERT);
1926 if (!slot && current_info == optimistic_info)
1927 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
1928 NO_INSERT);
1929 if (!slot)
1930 return NULL_TREE;
1931 return ((vn_phi_t)*slot)->result;
1934 /* Insert PHI into the current hash table with a value number of
1935 RESULT. */
1937 static vn_phi_t
1938 vn_phi_insert (gimple phi, tree result)
1940 void **slot;
1941 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
1942 unsigned i;
1943 VEC (tree, heap) *args = NULL;
1945 /* Canonicalize the SSA_NAME's to their value number. */
1946 for (i = 0; i < gimple_phi_num_args (phi); i++)
1948 tree def = PHI_ARG_DEF (phi, i);
1949 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1950 VEC_safe_push (tree, heap, args, def);
1952 vp1->value_id = VN_INFO (result)->value_id;
1953 vp1->phiargs = args;
1954 vp1->block = gimple_bb (phi);
1955 vp1->result = result;
1956 vp1->hashcode = vn_phi_compute_hash (vp1);
1958 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
1959 INSERT);
1961 /* Because we iterate over phi operations more than once, it's
1962 possible the slot might already exist here, hence no assert.*/
1963 *slot = vp1;
1964 return vp1;
1968 /* Print set of components in strongly connected component SCC to OUT. */
1970 static void
1971 print_scc (FILE *out, VEC (tree, heap) *scc)
1973 tree var;
1974 unsigned int i;
1976 fprintf (out, "SCC consists of: ");
1977 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1979 print_generic_expr (out, var, 0);
1980 fprintf (out, " ");
1982 fprintf (out, "\n");
1985 /* Set the value number of FROM to TO, return true if it has changed
1986 as a result. */
1988 static inline bool
1989 set_ssa_val_to (tree from, tree to)
1991 tree currval;
1993 if (from != to
1994 && TREE_CODE (to) == SSA_NAME
1995 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
1996 to = from;
1998 /* The only thing we allow as value numbers are VN_TOP, ssa_names
1999 and invariants. So assert that here. */
2000 gcc_assert (to != NULL_TREE
2001 && (to == VN_TOP
2002 || TREE_CODE (to) == SSA_NAME
2003 || is_gimple_min_invariant (to)));
2005 if (dump_file && (dump_flags & TDF_DETAILS))
2007 fprintf (dump_file, "Setting value number of ");
2008 print_generic_expr (dump_file, from, 0);
2009 fprintf (dump_file, " to ");
2010 print_generic_expr (dump_file, to, 0);
2013 currval = SSA_VAL (from);
2015 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
2017 VN_INFO (from)->valnum = to;
2018 if (dump_file && (dump_flags & TDF_DETAILS))
2019 fprintf (dump_file, " (changed)\n");
2020 return true;
2022 if (dump_file && (dump_flags & TDF_DETAILS))
2023 fprintf (dump_file, "\n");
2024 return false;
2027 /* Set all definitions in STMT to value number to themselves.
2028 Return true if a value number changed. */
2030 static bool
2031 defs_to_varying (gimple stmt)
2033 bool changed = false;
2034 ssa_op_iter iter;
2035 def_operand_p defp;
2037 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2039 tree def = DEF_FROM_PTR (defp);
2041 VN_INFO (def)->use_processed = true;
2042 changed |= set_ssa_val_to (def, def);
2044 return changed;
2047 static bool expr_has_constants (tree expr);
2048 static tree valueize_expr (tree expr);
2050 /* Visit a copy between LHS and RHS, return true if the value number
2051 changed. */
2053 static bool
2054 visit_copy (tree lhs, tree rhs)
2056 /* Follow chains of copies to their destination. */
2057 while (TREE_CODE (rhs) == SSA_NAME
2058 && SSA_VAL (rhs) != rhs)
2059 rhs = SSA_VAL (rhs);
2061 /* The copy may have a more interesting constant filled expression
2062 (we don't, since we know our RHS is just an SSA name). */
2063 if (TREE_CODE (rhs) == SSA_NAME)
2065 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2066 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2069 return set_ssa_val_to (lhs, rhs);
2072 /* Visit a unary operator RHS, value number it, and return true if the
2073 value number of LHS has changed as a result. */
2075 static bool
2076 visit_unary_op (tree lhs, gimple stmt)
2078 bool changed = false;
2079 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2081 if (result)
2083 changed = set_ssa_val_to (lhs, result);
2085 else
2087 changed = set_ssa_val_to (lhs, lhs);
2088 vn_nary_op_insert_stmt (stmt, lhs);
2091 return changed;
2094 /* Visit a binary operator RHS, value number it, and return true if the
2095 value number of LHS has changed as a result. */
2097 static bool
2098 visit_binary_op (tree lhs, gimple stmt)
2100 bool changed = false;
2101 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2103 if (result)
2105 changed = set_ssa_val_to (lhs, result);
2107 else
2109 changed = set_ssa_val_to (lhs, lhs);
2110 vn_nary_op_insert_stmt (stmt, lhs);
2113 return changed;
2116 /* Visit a call STMT storing into LHS. Return true if the value number
2117 of the LHS has changed as a result. */
2119 static bool
2120 visit_reference_op_call (tree lhs, gimple stmt)
2122 bool changed = false;
2123 struct vn_reference_s vr1;
2124 tree result;
2125 tree vuse = gimple_vuse (stmt);
2127 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2128 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
2129 vr1.type = gimple_expr_type (stmt);
2130 vr1.set = 0;
2131 vr1.hashcode = vn_reference_compute_hash (&vr1);
2132 result = vn_reference_lookup_1 (&vr1, NULL);
2133 if (result)
2135 changed = set_ssa_val_to (lhs, result);
2136 if (TREE_CODE (result) == SSA_NAME
2137 && VN_INFO (result)->has_constants)
2138 VN_INFO (lhs)->has_constants = true;
2140 else
2142 void **slot;
2143 vn_reference_t vr2;
2144 changed = set_ssa_val_to (lhs, lhs);
2145 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
2146 vr2->vuse = vr1.vuse;
2147 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
2148 vr2->type = vr1.type;
2149 vr2->set = vr1.set;
2150 vr2->hashcode = vr1.hashcode;
2151 vr2->result = lhs;
2152 slot = htab_find_slot_with_hash (current_info->references,
2153 vr2, vr2->hashcode, INSERT);
2154 if (*slot)
2155 free_reference (*slot);
2156 *slot = vr2;
2159 return changed;
2162 /* Visit a load from a reference operator RHS, part of STMT, value number it,
2163 and return true if the value number of the LHS has changed as a result. */
2165 static bool
2166 visit_reference_op_load (tree lhs, tree op, gimple stmt)
2168 bool changed = false;
2169 tree last_vuse;
2170 tree result;
2172 last_vuse = gimple_vuse (stmt);
2173 last_vuse_ptr = &last_vuse;
2174 result = vn_reference_lookup (op, gimple_vuse (stmt), true, NULL);
2175 last_vuse_ptr = NULL;
2177 /* If we have a VCE, try looking up its operand as it might be stored in
2178 a different type. */
2179 if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
2180 result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
2181 true, NULL);
2183 /* We handle type-punning through unions by value-numbering based
2184 on offset and size of the access. Be prepared to handle a
2185 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
2186 if (result
2187 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
2189 /* We will be setting the value number of lhs to the value number
2190 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2191 So first simplify and lookup this expression to see if it
2192 is already available. */
2193 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
2194 if ((CONVERT_EXPR_P (val)
2195 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
2196 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
2198 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
2199 if ((CONVERT_EXPR_P (tem)
2200 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
2201 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
2202 TREE_TYPE (val), tem)))
2203 val = tem;
2205 result = val;
2206 if (!is_gimple_min_invariant (val)
2207 && TREE_CODE (val) != SSA_NAME)
2208 result = vn_nary_op_lookup (val, NULL);
2209 /* If the expression is not yet available, value-number lhs to
2210 a new SSA_NAME we create. */
2211 if (!result)
2213 result = make_ssa_name (SSA_NAME_VAR (lhs), gimple_build_nop ());
2214 /* Initialize value-number information properly. */
2215 VN_INFO_GET (result)->valnum = result;
2216 VN_INFO (result)->value_id = get_next_value_id ();
2217 VN_INFO (result)->expr = val;
2218 VN_INFO (result)->has_constants = expr_has_constants (val);
2219 VN_INFO (result)->needs_insertion = true;
2220 /* As all "inserted" statements are singleton SCCs, insert
2221 to the valid table. This is strictly needed to
2222 avoid re-generating new value SSA_NAMEs for the same
2223 expression during SCC iteration over and over (the
2224 optimistic table gets cleared after each iteration).
2225 We do not need to insert into the optimistic table, as
2226 lookups there will fall back to the valid table. */
2227 if (current_info == optimistic_info)
2229 current_info = valid_info;
2230 vn_nary_op_insert (val, result);
2231 current_info = optimistic_info;
2233 else
2234 vn_nary_op_insert (val, result);
2235 if (dump_file && (dump_flags & TDF_DETAILS))
2237 fprintf (dump_file, "Inserting name ");
2238 print_generic_expr (dump_file, result, 0);
2239 fprintf (dump_file, " for expression ");
2240 print_generic_expr (dump_file, val, 0);
2241 fprintf (dump_file, "\n");
2246 if (result)
2248 changed = set_ssa_val_to (lhs, result);
2249 if (TREE_CODE (result) == SSA_NAME
2250 && VN_INFO (result)->has_constants)
2252 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2253 VN_INFO (lhs)->has_constants = true;
2256 else
2258 changed = set_ssa_val_to (lhs, lhs);
2259 vn_reference_insert (op, lhs, last_vuse);
2262 return changed;
2266 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2267 and return true if the value number of the LHS has changed as a result. */
2269 static bool
2270 visit_reference_op_store (tree lhs, tree op, gimple stmt)
2272 bool changed = false;
2273 tree result;
2274 bool resultsame = false;
2276 /* First we want to lookup using the *vuses* from the store and see
2277 if there the last store to this location with the same address
2278 had the same value.
2280 The vuses represent the memory state before the store. If the
2281 memory state, address, and value of the store is the same as the
2282 last store to this location, then this store will produce the
2283 same memory state as that store.
2285 In this case the vdef versions for this store are value numbered to those
2286 vuse versions, since they represent the same memory state after
2287 this store.
2289 Otherwise, the vdefs for the store are used when inserting into
2290 the table, since the store generates a new memory state. */
2292 result = vn_reference_lookup (lhs, gimple_vuse (stmt), false, NULL);
2294 if (result)
2296 if (TREE_CODE (result) == SSA_NAME)
2297 result = SSA_VAL (result);
2298 if (TREE_CODE (op) == SSA_NAME)
2299 op = SSA_VAL (op);
2300 resultsame = expressions_equal_p (result, op);
2303 if (!result || !resultsame)
2305 tree vdef;
2307 if (dump_file && (dump_flags & TDF_DETAILS))
2309 fprintf (dump_file, "No store match\n");
2310 fprintf (dump_file, "Value numbering store ");
2311 print_generic_expr (dump_file, lhs, 0);
2312 fprintf (dump_file, " to ");
2313 print_generic_expr (dump_file, op, 0);
2314 fprintf (dump_file, "\n");
2316 /* Have to set value numbers before insert, since insert is
2317 going to valueize the references in-place. */
2318 if ((vdef = gimple_vdef (stmt)))
2320 VN_INFO (vdef)->use_processed = true;
2321 changed |= set_ssa_val_to (vdef, vdef);
2324 /* Do not insert structure copies into the tables. */
2325 if (is_gimple_min_invariant (op)
2326 || is_gimple_reg (op))
2327 vn_reference_insert (lhs, op, vdef);
2329 else
2331 /* We had a match, so value number the vdef to have the value
2332 number of the vuse it came from. */
2333 tree def, use;
2335 if (dump_file && (dump_flags & TDF_DETAILS))
2336 fprintf (dump_file, "Store matched earlier value,"
2337 "value numbering store vdefs to matching vuses.\n");
2339 def = gimple_vdef (stmt);
2340 use = gimple_vuse (stmt);
2342 VN_INFO (def)->use_processed = true;
2343 changed |= set_ssa_val_to (def, SSA_VAL (use));
2346 return changed;
2349 /* Visit and value number PHI, return true if the value number
2350 changed. */
2352 static bool
2353 visit_phi (gimple phi)
2355 bool changed = false;
2356 tree result;
2357 tree sameval = VN_TOP;
2358 bool allsame = true;
2359 unsigned i;
2361 /* TODO: We could check for this in init_sccvn, and replace this
2362 with a gcc_assert. */
2363 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
2364 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2366 /* See if all non-TOP arguments have the same value. TOP is
2367 equivalent to everything, so we can ignore it. */
2368 for (i = 0; i < gimple_phi_num_args (phi); i++)
2370 tree def = PHI_ARG_DEF (phi, i);
2372 if (TREE_CODE (def) == SSA_NAME)
2373 def = SSA_VAL (def);
2374 if (def == VN_TOP)
2375 continue;
2376 if (sameval == VN_TOP)
2378 sameval = def;
2380 else
2382 if (!expressions_equal_p (def, sameval))
2384 allsame = false;
2385 break;
2390 /* If all value numbered to the same value, the phi node has that
2391 value. */
2392 if (allsame)
2394 if (is_gimple_min_invariant (sameval))
2396 VN_INFO (PHI_RESULT (phi))->has_constants = true;
2397 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2399 else
2401 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2402 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2405 if (TREE_CODE (sameval) == SSA_NAME)
2406 return visit_copy (PHI_RESULT (phi), sameval);
2408 return set_ssa_val_to (PHI_RESULT (phi), sameval);
2411 /* Otherwise, see if it is equivalent to a phi node in this block. */
2412 result = vn_phi_lookup (phi);
2413 if (result)
2415 if (TREE_CODE (result) == SSA_NAME)
2416 changed = visit_copy (PHI_RESULT (phi), result);
2417 else
2418 changed = set_ssa_val_to (PHI_RESULT (phi), result);
2420 else
2422 vn_phi_insert (phi, PHI_RESULT (phi));
2423 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2424 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
2425 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2428 return changed;
2431 /* Return true if EXPR contains constants. */
2433 static bool
2434 expr_has_constants (tree expr)
2436 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2438 case tcc_unary:
2439 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
2441 case tcc_binary:
2442 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
2443 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
2444 /* Constants inside reference ops are rarely interesting, but
2445 it can take a lot of looking to find them. */
2446 case tcc_reference:
2447 case tcc_declaration:
2448 return false;
2449 default:
2450 return is_gimple_min_invariant (expr);
2452 return false;
2455 /* Return true if STMT contains constants. */
2457 static bool
2458 stmt_has_constants (gimple stmt)
2460 if (gimple_code (stmt) != GIMPLE_ASSIGN)
2461 return false;
2463 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2465 case GIMPLE_UNARY_RHS:
2466 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2468 case GIMPLE_BINARY_RHS:
2469 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2470 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
2471 case GIMPLE_TERNARY_RHS:
2472 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2473 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt))
2474 || is_gimple_min_invariant (gimple_assign_rhs3 (stmt)));
2475 case GIMPLE_SINGLE_RHS:
2476 /* Constants inside reference ops are rarely interesting, but
2477 it can take a lot of looking to find them. */
2478 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2479 default:
2480 gcc_unreachable ();
2482 return false;
2485 /* Replace SSA_NAMES in expr with their value numbers, and return the
2486 result.
2487 This is performed in place. */
2489 static tree
2490 valueize_expr (tree expr)
2492 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2494 case tcc_unary:
2495 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2496 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2497 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2498 break;
2499 case tcc_binary:
2500 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2501 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2502 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2503 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
2504 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
2505 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
2506 break;
2507 default:
2508 break;
2510 return expr;
2513 /* Simplify the binary expression RHS, and return the result if
2514 simplified. */
2516 static tree
2517 simplify_binary_expression (gimple stmt)
2519 tree result = NULL_TREE;
2520 tree op0 = gimple_assign_rhs1 (stmt);
2521 tree op1 = gimple_assign_rhs2 (stmt);
2523 /* This will not catch every single case we could combine, but will
2524 catch those with constants. The goal here is to simultaneously
2525 combine constants between expressions, but avoid infinite
2526 expansion of expressions during simplification. */
2527 if (TREE_CODE (op0) == SSA_NAME)
2529 if (VN_INFO (op0)->has_constants
2530 || TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)) == tcc_comparison)
2531 op0 = valueize_expr (vn_get_expr_for (op0));
2532 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
2533 op0 = SSA_VAL (op0);
2536 if (TREE_CODE (op1) == SSA_NAME)
2538 if (VN_INFO (op1)->has_constants)
2539 op1 = valueize_expr (vn_get_expr_for (op1));
2540 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
2541 op1 = SSA_VAL (op1);
2544 /* Avoid folding if nothing changed. */
2545 if (op0 == gimple_assign_rhs1 (stmt)
2546 && op1 == gimple_assign_rhs2 (stmt))
2547 return NULL_TREE;
2549 fold_defer_overflow_warnings ();
2551 result = fold_binary (gimple_assign_rhs_code (stmt),
2552 gimple_expr_type (stmt), op0, op1);
2553 if (result)
2554 STRIP_USELESS_TYPE_CONVERSION (result);
2556 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
2557 stmt, 0);
2559 /* Make sure result is not a complex expression consisting
2560 of operators of operators (IE (a + b) + (a + c))
2561 Otherwise, we will end up with unbounded expressions if
2562 fold does anything at all. */
2563 if (result && valid_gimple_rhs_p (result))
2564 return result;
2566 return NULL_TREE;
2569 /* Simplify the unary expression RHS, and return the result if
2570 simplified. */
2572 static tree
2573 simplify_unary_expression (gimple stmt)
2575 tree result = NULL_TREE;
2576 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
2578 /* We handle some tcc_reference codes here that are all
2579 GIMPLE_ASSIGN_SINGLE codes. */
2580 if (gimple_assign_rhs_code (stmt) == REALPART_EXPR
2581 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2582 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2583 op0 = TREE_OPERAND (op0, 0);
2585 if (TREE_CODE (op0) != SSA_NAME)
2586 return NULL_TREE;
2588 orig_op0 = op0;
2589 if (VN_INFO (op0)->has_constants)
2590 op0 = valueize_expr (vn_get_expr_for (op0));
2591 else if (gimple_assign_cast_p (stmt)
2592 || gimple_assign_rhs_code (stmt) == REALPART_EXPR
2593 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2594 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2596 /* We want to do tree-combining on conversion-like expressions.
2597 Make sure we feed only SSA_NAMEs or constants to fold though. */
2598 tree tem = valueize_expr (vn_get_expr_for (op0));
2599 if (UNARY_CLASS_P (tem)
2600 || BINARY_CLASS_P (tem)
2601 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
2602 || TREE_CODE (tem) == SSA_NAME
2603 || is_gimple_min_invariant (tem))
2604 op0 = tem;
2607 /* Avoid folding if nothing changed, but remember the expression. */
2608 if (op0 == orig_op0)
2609 return NULL_TREE;
2611 result = fold_unary_ignore_overflow (gimple_assign_rhs_code (stmt),
2612 gimple_expr_type (stmt), op0);
2613 if (result)
2615 STRIP_USELESS_TYPE_CONVERSION (result);
2616 if (valid_gimple_rhs_p (result))
2617 return result;
2620 return NULL_TREE;
2623 /* Try to simplify RHS using equivalences and constant folding. */
2625 static tree
2626 try_to_simplify (gimple stmt)
2628 tree tem;
2630 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
2631 in this case, there is no point in doing extra work. */
2632 if (gimple_assign_copy_p (stmt)
2633 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2634 return NULL_TREE;
2636 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2638 case tcc_declaration:
2639 tem = get_symbol_constant_value (gimple_assign_rhs1 (stmt));
2640 if (tem)
2641 return tem;
2642 break;
2644 case tcc_reference:
2645 /* Do not do full-blown reference lookup here, but simplify
2646 reads from constant aggregates. */
2647 tem = fold_const_aggregate_ref (gimple_assign_rhs1 (stmt));
2648 if (tem)
2649 return tem;
2651 /* Fallthrough for some codes that can operate on registers. */
2652 if (!(TREE_CODE (gimple_assign_rhs1 (stmt)) == REALPART_EXPR
2653 || TREE_CODE (gimple_assign_rhs1 (stmt)) == IMAGPART_EXPR
2654 || TREE_CODE (gimple_assign_rhs1 (stmt)) == VIEW_CONVERT_EXPR))
2655 break;
2656 /* We could do a little more with unary ops, if they expand
2657 into binary ops, but it's debatable whether it is worth it. */
2658 case tcc_unary:
2659 return simplify_unary_expression (stmt);
2660 break;
2661 case tcc_comparison:
2662 case tcc_binary:
2663 return simplify_binary_expression (stmt);
2664 break;
2665 default:
2666 break;
2669 return NULL_TREE;
2672 /* Visit and value number USE, return true if the value number
2673 changed. */
2675 static bool
2676 visit_use (tree use)
2678 bool changed = false;
2679 gimple stmt = SSA_NAME_DEF_STMT (use);
2681 VN_INFO (use)->use_processed = true;
2683 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
2684 if (dump_file && (dump_flags & TDF_DETAILS)
2685 && !SSA_NAME_IS_DEFAULT_DEF (use))
2687 fprintf (dump_file, "Value numbering ");
2688 print_generic_expr (dump_file, use, 0);
2689 fprintf (dump_file, " stmt = ");
2690 print_gimple_stmt (dump_file, stmt, 0, 0);
2693 /* Handle uninitialized uses. */
2694 if (SSA_NAME_IS_DEFAULT_DEF (use))
2695 changed = set_ssa_val_to (use, use);
2696 else
2698 if (gimple_code (stmt) == GIMPLE_PHI)
2699 changed = visit_phi (stmt);
2700 else if (!gimple_has_lhs (stmt)
2701 || gimple_has_volatile_ops (stmt)
2702 || stmt_could_throw_p (stmt))
2703 changed = defs_to_varying (stmt);
2704 else if (is_gimple_assign (stmt))
2706 tree lhs = gimple_assign_lhs (stmt);
2707 tree simplified;
2709 /* Shortcut for copies. Simplifying copies is pointless,
2710 since we copy the expression and value they represent. */
2711 if (gimple_assign_copy_p (stmt)
2712 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2713 && TREE_CODE (lhs) == SSA_NAME)
2715 changed = visit_copy (lhs, gimple_assign_rhs1 (stmt));
2716 goto done;
2718 simplified = try_to_simplify (stmt);
2719 if (simplified)
2721 if (dump_file && (dump_flags & TDF_DETAILS))
2723 fprintf (dump_file, "RHS ");
2724 print_gimple_expr (dump_file, stmt, 0, 0);
2725 fprintf (dump_file, " simplified to ");
2726 print_generic_expr (dump_file, simplified, 0);
2727 if (TREE_CODE (lhs) == SSA_NAME)
2728 fprintf (dump_file, " has constants %d\n",
2729 expr_has_constants (simplified));
2730 else
2731 fprintf (dump_file, "\n");
2734 /* Setting value numbers to constants will occasionally
2735 screw up phi congruence because constants are not
2736 uniquely associated with a single ssa name that can be
2737 looked up. */
2738 if (simplified
2739 && is_gimple_min_invariant (simplified)
2740 && TREE_CODE (lhs) == SSA_NAME)
2742 VN_INFO (lhs)->expr = simplified;
2743 VN_INFO (lhs)->has_constants = true;
2744 changed = set_ssa_val_to (lhs, simplified);
2745 goto done;
2747 else if (simplified
2748 && TREE_CODE (simplified) == SSA_NAME
2749 && TREE_CODE (lhs) == SSA_NAME)
2751 changed = visit_copy (lhs, simplified);
2752 goto done;
2754 else if (simplified)
2756 if (TREE_CODE (lhs) == SSA_NAME)
2758 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
2759 /* We have to unshare the expression or else
2760 valuizing may change the IL stream. */
2761 VN_INFO (lhs)->expr = unshare_expr (simplified);
2764 else if (stmt_has_constants (stmt)
2765 && TREE_CODE (lhs) == SSA_NAME)
2766 VN_INFO (lhs)->has_constants = true;
2767 else if (TREE_CODE (lhs) == SSA_NAME)
2769 /* We reset expr and constantness here because we may
2770 have been value numbering optimistically, and
2771 iterating. They may become non-constant in this case,
2772 even if they were optimistically constant. */
2774 VN_INFO (lhs)->has_constants = false;
2775 VN_INFO (lhs)->expr = NULL_TREE;
2778 if ((TREE_CODE (lhs) == SSA_NAME
2779 /* We can substitute SSA_NAMEs that are live over
2780 abnormal edges with their constant value. */
2781 && !(gimple_assign_copy_p (stmt)
2782 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2783 && !(simplified
2784 && is_gimple_min_invariant (simplified))
2785 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2786 /* Stores or copies from SSA_NAMEs that are live over
2787 abnormal edges are a problem. */
2788 || (gimple_assign_single_p (stmt)
2789 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2790 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt))))
2791 changed = defs_to_varying (stmt);
2792 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
2794 changed = visit_reference_op_store (lhs, gimple_assign_rhs1 (stmt), stmt);
2796 else if (TREE_CODE (lhs) == SSA_NAME)
2798 if ((gimple_assign_copy_p (stmt)
2799 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2800 || (simplified
2801 && is_gimple_min_invariant (simplified)))
2803 VN_INFO (lhs)->has_constants = true;
2804 if (simplified)
2805 changed = set_ssa_val_to (lhs, simplified);
2806 else
2807 changed = set_ssa_val_to (lhs, gimple_assign_rhs1 (stmt));
2809 else
2811 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2813 case GIMPLE_UNARY_RHS:
2814 changed = visit_unary_op (lhs, stmt);
2815 break;
2816 case GIMPLE_BINARY_RHS:
2817 changed = visit_binary_op (lhs, stmt);
2818 break;
2819 case GIMPLE_SINGLE_RHS:
2820 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2822 case tcc_reference:
2823 /* VOP-less references can go through unary case. */
2824 if ((gimple_assign_rhs_code (stmt) == REALPART_EXPR
2825 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2826 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR )
2827 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (stmt), 0)) == SSA_NAME)
2829 changed = visit_unary_op (lhs, stmt);
2830 break;
2832 /* Fallthrough. */
2833 case tcc_declaration:
2834 changed = visit_reference_op_load
2835 (lhs, gimple_assign_rhs1 (stmt), stmt);
2836 break;
2837 case tcc_expression:
2838 if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
2840 changed = visit_unary_op (lhs, stmt);
2841 break;
2843 /* Fallthrough. */
2844 default:
2845 changed = defs_to_varying (stmt);
2847 break;
2848 default:
2849 changed = defs_to_varying (stmt);
2850 break;
2854 else
2855 changed = defs_to_varying (stmt);
2857 else if (is_gimple_call (stmt))
2859 tree lhs = gimple_call_lhs (stmt);
2861 /* ??? We could try to simplify calls. */
2863 if (stmt_has_constants (stmt)
2864 && TREE_CODE (lhs) == SSA_NAME)
2865 VN_INFO (lhs)->has_constants = true;
2866 else if (TREE_CODE (lhs) == SSA_NAME)
2868 /* We reset expr and constantness here because we may
2869 have been value numbering optimistically, and
2870 iterating. They may become non-constant in this case,
2871 even if they were optimistically constant. */
2872 VN_INFO (lhs)->has_constants = false;
2873 VN_INFO (lhs)->expr = NULL_TREE;
2876 if (TREE_CODE (lhs) == SSA_NAME
2877 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2878 changed = defs_to_varying (stmt);
2879 /* ??? We should handle stores from calls. */
2880 else if (TREE_CODE (lhs) == SSA_NAME)
2882 if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
2883 changed = visit_reference_op_call (lhs, stmt);
2884 else
2885 changed = defs_to_varying (stmt);
2887 else
2888 changed = defs_to_varying (stmt);
2891 done:
2892 return changed;
2895 /* Compare two operands by reverse postorder index */
2897 static int
2898 compare_ops (const void *pa, const void *pb)
2900 const tree opa = *((const tree *)pa);
2901 const tree opb = *((const tree *)pb);
2902 gimple opstmta = SSA_NAME_DEF_STMT (opa);
2903 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
2904 basic_block bba;
2905 basic_block bbb;
2907 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
2908 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2909 else if (gimple_nop_p (opstmta))
2910 return -1;
2911 else if (gimple_nop_p (opstmtb))
2912 return 1;
2914 bba = gimple_bb (opstmta);
2915 bbb = gimple_bb (opstmtb);
2917 if (!bba && !bbb)
2918 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2919 else if (!bba)
2920 return -1;
2921 else if (!bbb)
2922 return 1;
2924 if (bba == bbb)
2926 if (gimple_code (opstmta) == GIMPLE_PHI
2927 && gimple_code (opstmtb) == GIMPLE_PHI)
2928 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2929 else if (gimple_code (opstmta) == GIMPLE_PHI)
2930 return -1;
2931 else if (gimple_code (opstmtb) == GIMPLE_PHI)
2932 return 1;
2933 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
2934 return gimple_uid (opstmta) - gimple_uid (opstmtb);
2935 else
2936 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2938 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
2941 /* Sort an array containing members of a strongly connected component
2942 SCC so that the members are ordered by RPO number.
2943 This means that when the sort is complete, iterating through the
2944 array will give you the members in RPO order. */
2946 static void
2947 sort_scc (VEC (tree, heap) *scc)
2949 qsort (VEC_address (tree, scc),
2950 VEC_length (tree, scc),
2951 sizeof (tree),
2952 compare_ops);
2955 /* Insert the no longer used nary ONARY to the hash INFO. */
2957 static void
2958 copy_nary (vn_nary_op_t onary, vn_tables_t info)
2960 size_t size = (sizeof (struct vn_nary_op_s)
2961 - sizeof (tree) * (4 - onary->length));
2962 vn_nary_op_t nary = (vn_nary_op_t) obstack_alloc (&info->nary_obstack, size);
2963 void **slot;
2964 memcpy (nary, onary, size);
2965 slot = htab_find_slot_with_hash (info->nary, nary, nary->hashcode, INSERT);
2966 gcc_assert (!*slot);
2967 *slot = nary;
2970 /* Insert the no longer used phi OPHI to the hash INFO. */
2972 static void
2973 copy_phi (vn_phi_t ophi, vn_tables_t info)
2975 vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
2976 void **slot;
2977 memcpy (phi, ophi, sizeof (*phi));
2978 ophi->phiargs = NULL;
2979 slot = htab_find_slot_with_hash (info->phis, phi, phi->hashcode, INSERT);
2980 gcc_assert (!*slot);
2981 *slot = phi;
2984 /* Insert the no longer used reference OREF to the hash INFO. */
2986 static void
2987 copy_reference (vn_reference_t oref, vn_tables_t info)
2989 vn_reference_t ref;
2990 void **slot;
2991 ref = (vn_reference_t) pool_alloc (info->references_pool);
2992 memcpy (ref, oref, sizeof (*ref));
2993 oref->operands = NULL;
2994 slot = htab_find_slot_with_hash (info->references, ref, ref->hashcode,
2995 INSERT);
2996 if (*slot)
2997 free_reference (*slot);
2998 *slot = ref;
3001 /* Process a strongly connected component in the SSA graph. */
3003 static void
3004 process_scc (VEC (tree, heap) *scc)
3006 tree var;
3007 unsigned int i;
3008 unsigned int iterations = 0;
3009 bool changed = true;
3010 htab_iterator hi;
3011 vn_nary_op_t nary;
3012 vn_phi_t phi;
3013 vn_reference_t ref;
3015 /* If the SCC has a single member, just visit it. */
3016 if (VEC_length (tree, scc) == 1)
3018 tree use = VEC_index (tree, scc, 0);
3019 if (!VN_INFO (use)->use_processed)
3020 visit_use (use);
3021 return;
3024 /* Iterate over the SCC with the optimistic table until it stops
3025 changing. */
3026 current_info = optimistic_info;
3027 while (changed)
3029 changed = false;
3030 iterations++;
3031 /* As we are value-numbering optimistically we have to
3032 clear the expression tables and the simplified expressions
3033 in each iteration until we converge. */
3034 htab_empty (optimistic_info->nary);
3035 htab_empty (optimistic_info->phis);
3036 htab_empty (optimistic_info->references);
3037 obstack_free (&optimistic_info->nary_obstack, NULL);
3038 gcc_obstack_init (&optimistic_info->nary_obstack);
3039 empty_alloc_pool (optimistic_info->phis_pool);
3040 empty_alloc_pool (optimistic_info->references_pool);
3041 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
3042 VN_INFO (var)->expr = NULL_TREE;
3043 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
3044 changed |= visit_use (var);
3047 statistics_histogram_event (cfun, "SCC iterations", iterations);
3049 /* Finally, copy the contents of the no longer used optimistic
3050 table to the valid table. */
3051 FOR_EACH_HTAB_ELEMENT (optimistic_info->nary, nary, vn_nary_op_t, hi)
3052 copy_nary (nary, valid_info);
3053 FOR_EACH_HTAB_ELEMENT (optimistic_info->phis, phi, vn_phi_t, hi)
3054 copy_phi (phi, valid_info);
3055 FOR_EACH_HTAB_ELEMENT (optimistic_info->references, ref, vn_reference_t, hi)
3056 copy_reference (ref, valid_info);
3058 current_info = valid_info;
3061 DEF_VEC_O(ssa_op_iter);
3062 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
3064 /* Pop the components of the found SCC for NAME off the SCC stack
3065 and process them. Returns true if all went well, false if
3066 we run into resource limits. */
3068 static bool
3069 extract_and_process_scc_for_name (tree name)
3071 VEC (tree, heap) *scc = NULL;
3072 tree x;
3074 /* Found an SCC, pop the components off the SCC stack and
3075 process them. */
3078 x = VEC_pop (tree, sccstack);
3080 VN_INFO (x)->on_sccstack = false;
3081 VEC_safe_push (tree, heap, scc, x);
3082 } while (x != name);
3084 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
3085 if (VEC_length (tree, scc)
3086 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
3088 if (dump_file)
3089 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
3090 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
3091 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
3092 return false;
3095 if (VEC_length (tree, scc) > 1)
3096 sort_scc (scc);
3098 if (dump_file && (dump_flags & TDF_DETAILS))
3099 print_scc (dump_file, scc);
3101 process_scc (scc);
3103 VEC_free (tree, heap, scc);
3105 return true;
3108 /* Depth first search on NAME to discover and process SCC's in the SSA
3109 graph.
3110 Execution of this algorithm relies on the fact that the SCC's are
3111 popped off the stack in topological order.
3112 Returns true if successful, false if we stopped processing SCC's due
3113 to resource constraints. */
3115 static bool
3116 DFS (tree name)
3118 VEC(ssa_op_iter, heap) *itervec = NULL;
3119 VEC(tree, heap) *namevec = NULL;
3120 use_operand_p usep = NULL;
3121 gimple defstmt;
3122 tree use;
3123 ssa_op_iter iter;
3125 start_over:
3126 /* SCC info */
3127 VN_INFO (name)->dfsnum = next_dfs_num++;
3128 VN_INFO (name)->visited = true;
3129 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
3131 VEC_safe_push (tree, heap, sccstack, name);
3132 VN_INFO (name)->on_sccstack = true;
3133 defstmt = SSA_NAME_DEF_STMT (name);
3135 /* Recursively DFS on our operands, looking for SCC's. */
3136 if (!gimple_nop_p (defstmt))
3138 /* Push a new iterator. */
3139 if (gimple_code (defstmt) == GIMPLE_PHI)
3140 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
3141 else
3142 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
3144 else
3145 clear_and_done_ssa_iter (&iter);
3147 while (1)
3149 /* If we are done processing uses of a name, go up the stack
3150 of iterators and process SCCs as we found them. */
3151 if (op_iter_done (&iter))
3153 /* See if we found an SCC. */
3154 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
3155 if (!extract_and_process_scc_for_name (name))
3157 VEC_free (tree, heap, namevec);
3158 VEC_free (ssa_op_iter, heap, itervec);
3159 return false;
3162 /* Check if we are done. */
3163 if (VEC_empty (tree, namevec))
3165 VEC_free (tree, heap, namevec);
3166 VEC_free (ssa_op_iter, heap, itervec);
3167 return true;
3170 /* Restore the last use walker and continue walking there. */
3171 use = name;
3172 name = VEC_pop (tree, namevec);
3173 memcpy (&iter, VEC_last (ssa_op_iter, itervec),
3174 sizeof (ssa_op_iter));
3175 VEC_pop (ssa_op_iter, itervec);
3176 goto continue_walking;
3179 use = USE_FROM_PTR (usep);
3181 /* Since we handle phi nodes, we will sometimes get
3182 invariants in the use expression. */
3183 if (TREE_CODE (use) == SSA_NAME)
3185 if (! (VN_INFO (use)->visited))
3187 /* Recurse by pushing the current use walking state on
3188 the stack and starting over. */
3189 VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
3190 VEC_safe_push(tree, heap, namevec, name);
3191 name = use;
3192 goto start_over;
3194 continue_walking:
3195 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3196 VN_INFO (use)->low);
3198 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3199 && VN_INFO (use)->on_sccstack)
3201 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3202 VN_INFO (name)->low);
3206 usep = op_iter_next_use (&iter);
3210 /* Allocate a value number table. */
3212 static void
3213 allocate_vn_table (vn_tables_t table)
3215 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
3216 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
3217 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
3218 free_reference);
3220 gcc_obstack_init (&table->nary_obstack);
3221 table->phis_pool = create_alloc_pool ("VN phis",
3222 sizeof (struct vn_phi_s),
3223 30);
3224 table->references_pool = create_alloc_pool ("VN references",
3225 sizeof (struct vn_reference_s),
3226 30);
3229 /* Free a value number table. */
3231 static void
3232 free_vn_table (vn_tables_t table)
3234 htab_delete (table->phis);
3235 htab_delete (table->nary);
3236 htab_delete (table->references);
3237 obstack_free (&table->nary_obstack, NULL);
3238 free_alloc_pool (table->phis_pool);
3239 free_alloc_pool (table->references_pool);
3242 static void
3243 init_scc_vn (void)
3245 size_t i;
3246 int j;
3247 int *rpo_numbers_temp;
3249 calculate_dominance_info (CDI_DOMINATORS);
3250 sccstack = NULL;
3251 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
3252 free);
3254 constant_value_ids = BITMAP_ALLOC (NULL);
3256 next_dfs_num = 1;
3257 next_value_id = 1;
3259 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
3260 /* VEC_alloc doesn't actually grow it to the right size, it just
3261 preallocates the space to do so. */
3262 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
3263 gcc_obstack_init (&vn_ssa_aux_obstack);
3265 shared_lookup_phiargs = NULL;
3266 shared_lookup_references = NULL;
3267 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3268 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3269 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
3271 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3272 the i'th block in RPO order is bb. We want to map bb's to RPO
3273 numbers, so we need to rearrange this array. */
3274 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
3275 rpo_numbers[rpo_numbers_temp[j]] = j;
3277 XDELETE (rpo_numbers_temp);
3279 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
3281 /* Create the VN_INFO structures, and initialize value numbers to
3282 TOP. */
3283 for (i = 0; i < num_ssa_names; i++)
3285 tree name = ssa_name (i);
3286 if (name)
3288 VN_INFO_GET (name)->valnum = VN_TOP;
3289 VN_INFO (name)->expr = NULL_TREE;
3290 VN_INFO (name)->value_id = 0;
3294 renumber_gimple_stmt_uids ();
3296 /* Create the valid and optimistic value numbering tables. */
3297 valid_info = XCNEW (struct vn_tables_s);
3298 allocate_vn_table (valid_info);
3299 optimistic_info = XCNEW (struct vn_tables_s);
3300 allocate_vn_table (optimistic_info);
3303 void
3304 free_scc_vn (void)
3306 size_t i;
3308 htab_delete (constant_to_value_id);
3309 BITMAP_FREE (constant_value_ids);
3310 VEC_free (tree, heap, shared_lookup_phiargs);
3311 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
3312 XDELETEVEC (rpo_numbers);
3314 for (i = 0; i < num_ssa_names; i++)
3316 tree name = ssa_name (i);
3317 if (name
3318 && VN_INFO (name)->needs_insertion)
3319 release_ssa_name (name);
3321 obstack_free (&vn_ssa_aux_obstack, NULL);
3322 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
3324 VEC_free (tree, heap, sccstack);
3325 free_vn_table (valid_info);
3326 XDELETE (valid_info);
3327 free_vn_table (optimistic_info);
3328 XDELETE (optimistic_info);
3331 /* Set the value ids in the valid hash tables. */
3333 static void
3334 set_hashtable_value_ids (void)
3336 htab_iterator hi;
3337 vn_nary_op_t vno;
3338 vn_reference_t vr;
3339 vn_phi_t vp;
3341 /* Now set the value ids of the things we had put in the hash
3342 table. */
3344 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
3345 vno, vn_nary_op_t, hi)
3347 if (vno->result)
3349 if (TREE_CODE (vno->result) == SSA_NAME)
3350 vno->value_id = VN_INFO (vno->result)->value_id;
3351 else if (is_gimple_min_invariant (vno->result))
3352 vno->value_id = get_or_alloc_constant_value_id (vno->result);
3356 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
3357 vp, vn_phi_t, hi)
3359 if (vp->result)
3361 if (TREE_CODE (vp->result) == SSA_NAME)
3362 vp->value_id = VN_INFO (vp->result)->value_id;
3363 else if (is_gimple_min_invariant (vp->result))
3364 vp->value_id = get_or_alloc_constant_value_id (vp->result);
3368 FOR_EACH_HTAB_ELEMENT (valid_info->references,
3369 vr, vn_reference_t, hi)
3371 if (vr->result)
3373 if (TREE_CODE (vr->result) == SSA_NAME)
3374 vr->value_id = VN_INFO (vr->result)->value_id;
3375 else if (is_gimple_min_invariant (vr->result))
3376 vr->value_id = get_or_alloc_constant_value_id (vr->result);
3381 /* Do SCCVN. Returns true if it finished, false if we bailed out
3382 due to resource constraints. */
3384 bool
3385 run_scc_vn (void)
3387 size_t i;
3388 tree param;
3389 bool changed = true;
3391 init_scc_vn ();
3392 current_info = valid_info;
3394 for (param = DECL_ARGUMENTS (current_function_decl);
3395 param;
3396 param = TREE_CHAIN (param))
3398 if (gimple_default_def (cfun, param) != NULL)
3400 tree def = gimple_default_def (cfun, param);
3401 VN_INFO (def)->valnum = def;
3405 for (i = 1; i < num_ssa_names; ++i)
3407 tree name = ssa_name (i);
3408 if (name
3409 && VN_INFO (name)->visited == false
3410 && !has_zero_uses (name))
3411 if (!DFS (name))
3413 free_scc_vn ();
3414 return false;
3418 /* Initialize the value ids. */
3420 for (i = 1; i < num_ssa_names; ++i)
3422 tree name = ssa_name (i);
3423 vn_ssa_aux_t info;
3424 if (!name)
3425 continue;
3426 info = VN_INFO (name);
3427 if (info->valnum == name
3428 || info->valnum == VN_TOP)
3429 info->value_id = get_next_value_id ();
3430 else if (is_gimple_min_invariant (info->valnum))
3431 info->value_id = get_or_alloc_constant_value_id (info->valnum);
3434 /* Propagate until they stop changing. */
3435 while (changed)
3437 changed = false;
3438 for (i = 1; i < num_ssa_names; ++i)
3440 tree name = ssa_name (i);
3441 vn_ssa_aux_t info;
3442 if (!name)
3443 continue;
3444 info = VN_INFO (name);
3445 if (TREE_CODE (info->valnum) == SSA_NAME
3446 && info->valnum != name
3447 && info->value_id != VN_INFO (info->valnum)->value_id)
3449 changed = true;
3450 info->value_id = VN_INFO (info->valnum)->value_id;
3455 set_hashtable_value_ids ();
3457 if (dump_file && (dump_flags & TDF_DETAILS))
3459 fprintf (dump_file, "Value numbers:\n");
3460 for (i = 0; i < num_ssa_names; i++)
3462 tree name = ssa_name (i);
3463 if (name
3464 && VN_INFO (name)->visited
3465 && SSA_VAL (name) != name)
3467 print_generic_expr (dump_file, name, 0);
3468 fprintf (dump_file, " = ");
3469 print_generic_expr (dump_file, SSA_VAL (name), 0);
3470 fprintf (dump_file, "\n");
3475 return true;
3478 /* Return the maximum value id we have ever seen. */
3480 unsigned int
3481 get_max_value_id (void)
3483 return next_value_id;
3486 /* Return the next unique value id. */
3488 unsigned int
3489 get_next_value_id (void)
3491 return next_value_id++;
3495 /* Compare two expressions E1 and E2 and return true if they are equal. */
3497 bool
3498 expressions_equal_p (tree e1, tree e2)
3500 /* The obvious case. */
3501 if (e1 == e2)
3502 return true;
3504 /* If only one of them is null, they cannot be equal. */
3505 if (!e1 || !e2)
3506 return false;
3508 /* Now perform the actual comparison. */
3509 if (TREE_CODE (e1) == TREE_CODE (e2)
3510 && operand_equal_p (e1, e2, OEP_PURE_SAME))
3511 return true;
3513 return false;
3517 /* Return true if the nary operation NARY may trap. This is a copy
3518 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
3520 bool
3521 vn_nary_may_trap (vn_nary_op_t nary)
3523 tree type;
3524 tree rhs2 = NULL_TREE;
3525 bool honor_nans = false;
3526 bool honor_snans = false;
3527 bool fp_operation = false;
3528 bool honor_trapv = false;
3529 bool handled, ret;
3530 unsigned i;
3532 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
3533 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
3534 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
3536 type = nary->type;
3537 fp_operation = FLOAT_TYPE_P (type);
3538 if (fp_operation)
3540 honor_nans = flag_trapping_math && !flag_finite_math_only;
3541 honor_snans = flag_signaling_nans != 0;
3543 else if (INTEGRAL_TYPE_P (type)
3544 && TYPE_OVERFLOW_TRAPS (type))
3545 honor_trapv = true;
3547 if (nary->length >= 2)
3548 rhs2 = nary->op[1];
3549 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
3550 honor_trapv,
3551 honor_nans, honor_snans, rhs2,
3552 &handled);
3553 if (handled
3554 && ret)
3555 return true;
3557 for (i = 0; i < nary->length; ++i)
3558 if (tree_could_trap_p (nary->op[i]))
3559 return true;
3561 return false;