PR 47571 Fix HPUX bootstrap regression, cleanup
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blobe37668e8abb36b4b3df36b68f178fe31afe9ef5c
1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "basic-block.h"
28 #include "tree-pretty-print.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "gimple.h"
33 #include "tree-dump.h"
34 #include "timevar.h"
35 #include "fibheap.h"
36 #include "hashtab.h"
37 #include "tree-iterator.h"
38 #include "alloc-pool.h"
39 #include "tree-pass.h"
40 #include "flags.h"
41 #include "bitmap.h"
42 #include "langhooks.h"
43 #include "cfgloop.h"
44 #include "params.h"
45 #include "tree-ssa-propagate.h"
46 #include "tree-ssa-sccvn.h"
48 /* This algorithm is based on the SCC algorithm presented by Keith
49 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
50 (http://citeseer.ist.psu.edu/41805.html). In
51 straight line code, it is equivalent to a regular hash based value
52 numbering that is performed in reverse postorder.
54 For code with cycles, there are two alternatives, both of which
55 require keeping the hashtables separate from the actual list of
56 value numbers for SSA names.
58 1. Iterate value numbering in an RPO walk of the blocks, removing
59 all the entries from the hashtable after each iteration (but
60 keeping the SSA name->value number mapping between iterations).
61 Iterate until it does not change.
63 2. Perform value numbering as part of an SCC walk on the SSA graph,
64 iterating only the cycles in the SSA graph until they do not change
65 (using a separate, optimistic hashtable for value numbering the SCC
66 operands).
68 The second is not just faster in practice (because most SSA graph
69 cycles do not involve all the variables in the graph), it also has
70 some nice properties.
72 One of these nice properties is that when we pop an SCC off the
73 stack, we are guaranteed to have processed all the operands coming from
74 *outside of that SCC*, so we do not need to do anything special to
75 ensure they have value numbers.
77 Another nice property is that the SCC walk is done as part of a DFS
78 of the SSA graph, which makes it easy to perform combining and
79 simplifying operations at the same time.
81 The code below is deliberately written in a way that makes it easy
82 to separate the SCC walk from the other work it does.
84 In order to propagate constants through the code, we track which
85 expressions contain constants, and use those while folding. In
86 theory, we could also track expressions whose value numbers are
87 replaced, in case we end up folding based on expression
88 identities.
90 In order to value number memory, we assign value numbers to vuses.
91 This enables us to note that, for example, stores to the same
92 address of the same value from the same starting memory states are
93 equivalent.
94 TODO:
96 1. We can iterate only the changing portions of the SCC's, but
97 I have not seen an SCC big enough for this to be a win.
98 2. If you differentiate between phi nodes for loops and phi nodes
99 for if-then-else, you can properly consider phi nodes in different
100 blocks for equivalence.
101 3. We could value number vuses in more cases, particularly, whole
102 structure copies.
105 /* The set of hashtables and alloc_pool's for their items. */
107 typedef struct vn_tables_s
109 htab_t nary;
110 htab_t phis;
111 htab_t references;
112 struct obstack nary_obstack;
113 alloc_pool phis_pool;
114 alloc_pool references_pool;
115 } *vn_tables_t;
117 static htab_t constant_to_value_id;
118 static bitmap constant_value_ids;
121 /* Valid hashtables storing information we have proven to be
122 correct. */
124 static vn_tables_t valid_info;
126 /* Optimistic hashtables storing information we are making assumptions about
127 during iterations. */
129 static vn_tables_t optimistic_info;
131 /* Pointer to the set of hashtables that is currently being used.
132 Should always point to either the optimistic_info, or the
133 valid_info. */
135 static vn_tables_t current_info;
138 /* Reverse post order index for each basic block. */
140 static int *rpo_numbers;
142 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
144 /* This represents the top of the VN lattice, which is the universal
145 value. */
147 tree VN_TOP;
149 /* Unique counter for our value ids. */
151 static unsigned int next_value_id;
153 /* Next DFS number and the stack for strongly connected component
154 detection. */
156 static unsigned int next_dfs_num;
157 static VEC (tree, heap) *sccstack;
160 DEF_VEC_P(vn_ssa_aux_t);
161 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
163 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
164 are allocated on an obstack for locality reasons, and to free them
165 without looping over the VEC. */
167 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
168 static struct obstack vn_ssa_aux_obstack;
170 /* Return the value numbering information for a given SSA name. */
172 vn_ssa_aux_t
173 VN_INFO (tree name)
175 vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
176 SSA_NAME_VERSION (name));
177 gcc_checking_assert (res);
178 return res;
181 /* Set the value numbering info for a given SSA name to a given
182 value. */
184 static inline void
185 VN_INFO_SET (tree name, vn_ssa_aux_t value)
187 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
188 SSA_NAME_VERSION (name), value);
191 /* Initialize the value numbering info for a given SSA name.
192 This should be called just once for every SSA name. */
194 vn_ssa_aux_t
195 VN_INFO_GET (tree name)
197 vn_ssa_aux_t newinfo;
199 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
200 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
201 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
202 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
203 SSA_NAME_VERSION (name) + 1);
204 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
205 SSA_NAME_VERSION (name), newinfo);
206 return newinfo;
210 /* Get the representative expression for the SSA_NAME NAME. Returns
211 the representative SSA_NAME if there is no expression associated with it. */
213 tree
214 vn_get_expr_for (tree name)
216 vn_ssa_aux_t vn = VN_INFO (name);
217 gimple def_stmt;
218 tree expr = NULL_TREE;
220 if (vn->valnum == VN_TOP)
221 return name;
223 /* If the value-number is a constant it is the representative
224 expression. */
225 if (TREE_CODE (vn->valnum) != SSA_NAME)
226 return vn->valnum;
228 /* Get to the information of the value of this SSA_NAME. */
229 vn = VN_INFO (vn->valnum);
231 /* If the value-number is a constant it is the representative
232 expression. */
233 if (TREE_CODE (vn->valnum) != SSA_NAME)
234 return vn->valnum;
236 /* Else if we have an expression, return it. */
237 if (vn->expr != NULL_TREE)
238 return vn->expr;
240 /* Otherwise use the defining statement to build the expression. */
241 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
243 /* If the value number is a default-definition or a PHI result
244 use it directly. */
245 if (gimple_nop_p (def_stmt)
246 || gimple_code (def_stmt) == GIMPLE_PHI)
247 return vn->valnum;
249 if (!is_gimple_assign (def_stmt))
250 return vn->valnum;
252 /* FIXME tuples. This is incomplete and likely will miss some
253 simplifications. */
254 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)))
256 case tcc_reference:
257 if ((gimple_assign_rhs_code (def_stmt) == VIEW_CONVERT_EXPR
258 || gimple_assign_rhs_code (def_stmt) == REALPART_EXPR
259 || gimple_assign_rhs_code (def_stmt) == IMAGPART_EXPR)
260 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
261 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
262 gimple_expr_type (def_stmt),
263 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
264 break;
266 case tcc_unary:
267 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
268 gimple_expr_type (def_stmt),
269 gimple_assign_rhs1 (def_stmt));
270 break;
272 case tcc_binary:
273 expr = fold_build2 (gimple_assign_rhs_code (def_stmt),
274 gimple_expr_type (def_stmt),
275 gimple_assign_rhs1 (def_stmt),
276 gimple_assign_rhs2 (def_stmt));
277 break;
279 default:;
281 if (expr == NULL_TREE)
282 return vn->valnum;
284 /* Cache the expression. */
285 vn->expr = expr;
287 return expr;
291 /* Free a phi operation structure VP. */
293 static void
294 free_phi (void *vp)
296 vn_phi_t phi = (vn_phi_t) vp;
297 VEC_free (tree, heap, phi->phiargs);
300 /* Free a reference operation structure VP. */
302 static void
303 free_reference (void *vp)
305 vn_reference_t vr = (vn_reference_t) vp;
306 VEC_free (vn_reference_op_s, heap, vr->operands);
309 /* Hash table equality function for vn_constant_t. */
311 static int
312 vn_constant_eq (const void *p1, const void *p2)
314 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
315 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
317 if (vc1->hashcode != vc2->hashcode)
318 return false;
320 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
323 /* Hash table hash function for vn_constant_t. */
325 static hashval_t
326 vn_constant_hash (const void *p1)
328 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
329 return vc1->hashcode;
332 /* Lookup a value id for CONSTANT and return it. If it does not
333 exist returns 0. */
335 unsigned int
336 get_constant_value_id (tree constant)
338 void **slot;
339 struct vn_constant_s vc;
341 vc.hashcode = vn_hash_constant_with_type (constant);
342 vc.constant = constant;
343 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
344 vc.hashcode, NO_INSERT);
345 if (slot)
346 return ((vn_constant_t)*slot)->value_id;
347 return 0;
350 /* Lookup a value id for CONSTANT, and if it does not exist, create a
351 new one and return it. If it does exist, return it. */
353 unsigned int
354 get_or_alloc_constant_value_id (tree constant)
356 void **slot;
357 struct vn_constant_s vc;
358 vn_constant_t vcp;
360 vc.hashcode = vn_hash_constant_with_type (constant);
361 vc.constant = constant;
362 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
363 vc.hashcode, INSERT);
364 if (*slot)
365 return ((vn_constant_t)*slot)->value_id;
367 vcp = XNEW (struct vn_constant_s);
368 vcp->hashcode = vc.hashcode;
369 vcp->constant = constant;
370 vcp->value_id = get_next_value_id ();
371 *slot = (void *) vcp;
372 bitmap_set_bit (constant_value_ids, vcp->value_id);
373 return vcp->value_id;
376 /* Return true if V is a value id for a constant. */
378 bool
379 value_id_constant_p (unsigned int v)
381 return bitmap_bit_p (constant_value_ids, v);
384 /* Compare two reference operands P1 and P2 for equality. Return true if
385 they are equal, and false otherwise. */
387 static int
388 vn_reference_op_eq (const void *p1, const void *p2)
390 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
391 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
393 return vro1->opcode == vro2->opcode
394 && types_compatible_p (vro1->type, vro2->type)
395 && expressions_equal_p (vro1->op0, vro2->op0)
396 && expressions_equal_p (vro1->op1, vro2->op1)
397 && expressions_equal_p (vro1->op2, vro2->op2);
400 /* Compute the hash for a reference operand VRO1. */
402 static hashval_t
403 vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result)
405 result = iterative_hash_hashval_t (vro1->opcode, result);
406 if (vro1->op0)
407 result = iterative_hash_expr (vro1->op0, result);
408 if (vro1->op1)
409 result = iterative_hash_expr (vro1->op1, result);
410 if (vro1->op2)
411 result = iterative_hash_expr (vro1->op2, result);
412 return result;
415 /* Return the hashcode for a given reference operation P1. */
417 static hashval_t
418 vn_reference_hash (const void *p1)
420 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
421 return vr1->hashcode;
424 /* Compute a hash for the reference operation VR1 and return it. */
426 hashval_t
427 vn_reference_compute_hash (const vn_reference_t vr1)
429 hashval_t result = 0;
430 int i;
431 vn_reference_op_t vro;
432 HOST_WIDE_INT off = -1;
433 bool deref = false;
435 FOR_EACH_VEC_ELT (vn_reference_op_s, vr1->operands, i, vro)
437 if (vro->opcode == MEM_REF)
438 deref = true;
439 else if (vro->opcode != ADDR_EXPR)
440 deref = false;
441 if (vro->off != -1)
443 if (off == -1)
444 off = 0;
445 off += vro->off;
447 else
449 if (off != -1
450 && off != 0)
451 result = iterative_hash_hashval_t (off, result);
452 off = -1;
453 if (deref
454 && vro->opcode == ADDR_EXPR)
456 if (vro->op0)
458 tree op = TREE_OPERAND (vro->op0, 0);
459 result = iterative_hash_hashval_t (TREE_CODE (op), result);
460 result = iterative_hash_expr (op, result);
463 else
464 result = vn_reference_op_compute_hash (vro, result);
467 if (vr1->vuse)
468 result += SSA_NAME_VERSION (vr1->vuse);
470 return result;
473 /* Return true if reference operations P1 and P2 are equivalent. This
474 means they have the same set of operands and vuses. */
477 vn_reference_eq (const void *p1, const void *p2)
479 unsigned i, j;
481 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
482 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
483 if (vr1->hashcode != vr2->hashcode)
484 return false;
486 /* Early out if this is not a hash collision. */
487 if (vr1->hashcode != vr2->hashcode)
488 return false;
490 /* The VOP needs to be the same. */
491 if (vr1->vuse != vr2->vuse)
492 return false;
494 /* If the operands are the same we are done. */
495 if (vr1->operands == vr2->operands)
496 return true;
498 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
499 return false;
501 if (INTEGRAL_TYPE_P (vr1->type)
502 && INTEGRAL_TYPE_P (vr2->type))
504 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
505 return false;
507 else if (INTEGRAL_TYPE_P (vr1->type)
508 && (TYPE_PRECISION (vr1->type)
509 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
510 return false;
511 else if (INTEGRAL_TYPE_P (vr2->type)
512 && (TYPE_PRECISION (vr2->type)
513 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
514 return false;
516 i = 0;
517 j = 0;
520 HOST_WIDE_INT off1 = 0, off2 = 0;
521 vn_reference_op_t vro1, vro2;
522 vn_reference_op_s tem1, tem2;
523 bool deref1 = false, deref2 = false;
524 for (; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro1); i++)
526 if (vro1->opcode == MEM_REF)
527 deref1 = true;
528 if (vro1->off == -1)
529 break;
530 off1 += vro1->off;
532 for (; VEC_iterate (vn_reference_op_s, vr2->operands, j, vro2); j++)
534 if (vro2->opcode == MEM_REF)
535 deref2 = true;
536 if (vro2->off == -1)
537 break;
538 off2 += vro2->off;
540 if (off1 != off2)
541 return false;
542 if (deref1 && vro1->opcode == ADDR_EXPR)
544 memset (&tem1, 0, sizeof (tem1));
545 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
546 tem1.type = TREE_TYPE (tem1.op0);
547 tem1.opcode = TREE_CODE (tem1.op0);
548 vro1 = &tem1;
550 if (deref2 && vro2->opcode == ADDR_EXPR)
552 memset (&tem2, 0, sizeof (tem2));
553 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
554 tem2.type = TREE_TYPE (tem2.op0);
555 tem2.opcode = TREE_CODE (tem2.op0);
556 vro2 = &tem2;
558 if (!vn_reference_op_eq (vro1, vro2))
559 return false;
560 ++j;
561 ++i;
563 while (VEC_length (vn_reference_op_s, vr1->operands) != i
564 || VEC_length (vn_reference_op_s, vr2->operands) != j);
566 return true;
569 /* Copy the operations present in load/store REF into RESULT, a vector of
570 vn_reference_op_s's. */
572 void
573 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
575 if (TREE_CODE (ref) == TARGET_MEM_REF)
577 vn_reference_op_s temp;
579 memset (&temp, 0, sizeof (temp));
580 /* We do not care for spurious type qualifications. */
581 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
582 temp.opcode = TREE_CODE (ref);
583 temp.op0 = TMR_INDEX (ref);
584 temp.op1 = TMR_STEP (ref);
585 temp.op2 = TMR_OFFSET (ref);
586 temp.off = -1;
587 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
589 memset (&temp, 0, sizeof (temp));
590 temp.type = NULL_TREE;
591 temp.opcode = ERROR_MARK;
592 temp.op0 = TMR_INDEX2 (ref);
593 temp.off = -1;
594 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
596 memset (&temp, 0, sizeof (temp));
597 temp.type = NULL_TREE;
598 temp.opcode = TREE_CODE (TMR_BASE (ref));
599 temp.op0 = TMR_BASE (ref);
600 temp.off = -1;
601 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
602 return;
605 /* For non-calls, store the information that makes up the address. */
607 while (ref)
609 vn_reference_op_s temp;
611 memset (&temp, 0, sizeof (temp));
612 /* We do not care for spurious type qualifications. */
613 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
614 temp.opcode = TREE_CODE (ref);
615 temp.off = -1;
617 switch (temp.opcode)
619 case MEM_REF:
620 /* The base address gets its own vn_reference_op_s structure. */
621 temp.op0 = TREE_OPERAND (ref, 1);
622 if (host_integerp (TREE_OPERAND (ref, 1), 0))
623 temp.off = TREE_INT_CST_LOW (TREE_OPERAND (ref, 1));
624 break;
625 case BIT_FIELD_REF:
626 /* Record bits and position. */
627 temp.op0 = TREE_OPERAND (ref, 1);
628 temp.op1 = TREE_OPERAND (ref, 2);
629 break;
630 case COMPONENT_REF:
631 /* The field decl is enough to unambiguously specify the field,
632 a matching type is not necessary and a mismatching type
633 is always a spurious difference. */
634 temp.type = NULL_TREE;
635 temp.op0 = TREE_OPERAND (ref, 1);
636 temp.op1 = TREE_OPERAND (ref, 2);
638 tree this_offset = component_ref_field_offset (ref);
639 if (this_offset
640 && TREE_CODE (this_offset) == INTEGER_CST)
642 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
643 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
645 double_int off
646 = double_int_add (tree_to_double_int (this_offset),
647 double_int_rshift
648 (tree_to_double_int (bit_offset),
649 BITS_PER_UNIT == 8
650 ? 3 : exact_log2 (BITS_PER_UNIT),
651 HOST_BITS_PER_DOUBLE_INT, true));
652 if (double_int_fits_in_shwi_p (off))
653 temp.off = off.low;
657 break;
658 case ARRAY_RANGE_REF:
659 case ARRAY_REF:
660 /* Record index as operand. */
661 temp.op0 = TREE_OPERAND (ref, 1);
662 /* Always record lower bounds and element size. */
663 temp.op1 = array_ref_low_bound (ref);
664 temp.op2 = array_ref_element_size (ref);
665 if (TREE_CODE (temp.op0) == INTEGER_CST
666 && TREE_CODE (temp.op1) == INTEGER_CST
667 && TREE_CODE (temp.op2) == INTEGER_CST)
669 double_int off = tree_to_double_int (temp.op0);
670 off = double_int_add (off,
671 double_int_neg
672 (tree_to_double_int (temp.op1)));
673 off = double_int_mul (off, tree_to_double_int (temp.op2));
674 if (double_int_fits_in_shwi_p (off))
675 temp.off = off.low;
677 break;
678 case STRING_CST:
679 case INTEGER_CST:
680 case COMPLEX_CST:
681 case VECTOR_CST:
682 case REAL_CST:
683 case CONSTRUCTOR:
684 case VAR_DECL:
685 case PARM_DECL:
686 case CONST_DECL:
687 case RESULT_DECL:
688 case SSA_NAME:
689 temp.op0 = ref;
690 break;
691 case ADDR_EXPR:
692 if (is_gimple_min_invariant (ref))
694 temp.op0 = ref;
695 break;
697 /* Fallthrough. */
698 /* These are only interesting for their operands, their
699 existence, and their type. They will never be the last
700 ref in the chain of references (IE they require an
701 operand), so we don't have to put anything
702 for op* as it will be handled by the iteration */
703 case REALPART_EXPR:
704 case VIEW_CONVERT_EXPR:
705 temp.off = 0;
706 break;
707 case IMAGPART_EXPR:
708 /* This is only interesting for its constant offset. */
709 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
710 break;
711 default:
712 gcc_unreachable ();
714 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
716 if (REFERENCE_CLASS_P (ref)
717 || (TREE_CODE (ref) == ADDR_EXPR
718 && !is_gimple_min_invariant (ref)))
719 ref = TREE_OPERAND (ref, 0);
720 else
721 ref = NULL_TREE;
725 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
726 operands in *OPS, the reference alias set SET and the reference type TYPE.
727 Return true if something useful was produced. */
729 bool
730 ao_ref_init_from_vn_reference (ao_ref *ref,
731 alias_set_type set, tree type,
732 VEC (vn_reference_op_s, heap) *ops)
734 vn_reference_op_t op;
735 unsigned i;
736 tree base = NULL_TREE;
737 tree *op0_p = &base;
738 HOST_WIDE_INT offset = 0;
739 HOST_WIDE_INT max_size;
740 HOST_WIDE_INT size = -1;
741 tree size_tree = NULL_TREE;
742 alias_set_type base_alias_set = -1;
744 /* First get the final access size from just the outermost expression. */
745 op = VEC_index (vn_reference_op_s, ops, 0);
746 if (op->opcode == COMPONENT_REF)
747 size_tree = DECL_SIZE (op->op0);
748 else if (op->opcode == BIT_FIELD_REF)
749 size_tree = op->op0;
750 else
752 enum machine_mode mode = TYPE_MODE (type);
753 if (mode == BLKmode)
754 size_tree = TYPE_SIZE (type);
755 else
756 size = GET_MODE_BITSIZE (mode);
758 if (size_tree != NULL_TREE)
760 if (!host_integerp (size_tree, 1))
761 size = -1;
762 else
763 size = TREE_INT_CST_LOW (size_tree);
766 /* Initially, maxsize is the same as the accessed element size.
767 In the following it will only grow (or become -1). */
768 max_size = size;
770 /* Compute cumulative bit-offset for nested component-refs and array-refs,
771 and find the ultimate containing object. */
772 FOR_EACH_VEC_ELT (vn_reference_op_s, ops, i, op)
774 switch (op->opcode)
776 /* These may be in the reference ops, but we cannot do anything
777 sensible with them here. */
778 case ADDR_EXPR:
779 /* Apart from ADDR_EXPR arguments to MEM_REF. */
780 if (base != NULL_TREE
781 && TREE_CODE (base) == MEM_REF
782 && op->op0
783 && DECL_P (TREE_OPERAND (op->op0, 0)))
785 vn_reference_op_t pop = VEC_index (vn_reference_op_s, ops, i-1);
786 base = TREE_OPERAND (op->op0, 0);
787 if (pop->off == -1)
789 max_size = -1;
790 offset = 0;
792 else
793 offset += pop->off * BITS_PER_UNIT;
794 op0_p = NULL;
795 break;
797 /* Fallthru. */
798 case CALL_EXPR:
799 return false;
801 /* Record the base objects. */
802 case MEM_REF:
803 base_alias_set = get_deref_alias_set (op->op0);
804 *op0_p = build2 (MEM_REF, op->type,
805 NULL_TREE, op->op0);
806 op0_p = &TREE_OPERAND (*op0_p, 0);
807 break;
809 case VAR_DECL:
810 case PARM_DECL:
811 case RESULT_DECL:
812 case SSA_NAME:
813 *op0_p = op->op0;
814 op0_p = NULL;
815 break;
817 /* And now the usual component-reference style ops. */
818 case BIT_FIELD_REF:
819 offset += tree_low_cst (op->op1, 0);
820 break;
822 case COMPONENT_REF:
824 tree field = op->op0;
825 /* We do not have a complete COMPONENT_REF tree here so we
826 cannot use component_ref_field_offset. Do the interesting
827 parts manually. */
829 if (op->op1
830 || !host_integerp (DECL_FIELD_OFFSET (field), 1))
831 max_size = -1;
832 else
834 offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
835 * BITS_PER_UNIT);
836 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
838 break;
841 case ARRAY_RANGE_REF:
842 case ARRAY_REF:
843 /* We recorded the lower bound and the element size. */
844 if (!host_integerp (op->op0, 0)
845 || !host_integerp (op->op1, 0)
846 || !host_integerp (op->op2, 0))
847 max_size = -1;
848 else
850 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
851 hindex -= TREE_INT_CST_LOW (op->op1);
852 hindex *= TREE_INT_CST_LOW (op->op2);
853 hindex *= BITS_PER_UNIT;
854 offset += hindex;
856 break;
858 case REALPART_EXPR:
859 break;
861 case IMAGPART_EXPR:
862 offset += size;
863 break;
865 case VIEW_CONVERT_EXPR:
866 break;
868 case STRING_CST:
869 case INTEGER_CST:
870 case COMPLEX_CST:
871 case VECTOR_CST:
872 case REAL_CST:
873 case CONSTRUCTOR:
874 case CONST_DECL:
875 return false;
877 default:
878 return false;
882 if (base == NULL_TREE)
883 return false;
885 ref->ref = NULL_TREE;
886 ref->base = base;
887 ref->offset = offset;
888 ref->size = size;
889 ref->max_size = max_size;
890 ref->ref_alias_set = set;
891 if (base_alias_set != -1)
892 ref->base_alias_set = base_alias_set;
893 else
894 ref->base_alias_set = get_alias_set (base);
896 return true;
899 /* Copy the operations present in load/store/call REF into RESULT, a vector of
900 vn_reference_op_s's. */
902 void
903 copy_reference_ops_from_call (gimple call,
904 VEC(vn_reference_op_s, heap) **result)
906 vn_reference_op_s temp;
907 unsigned i;
909 /* Copy the type, opcode, function being called and static chain. */
910 memset (&temp, 0, sizeof (temp));
911 temp.type = gimple_call_return_type (call);
912 temp.opcode = CALL_EXPR;
913 temp.op0 = gimple_call_fn (call);
914 temp.op1 = gimple_call_chain (call);
915 temp.off = -1;
916 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
918 /* Copy the call arguments. As they can be references as well,
919 just chain them together. */
920 for (i = 0; i < gimple_call_num_args (call); ++i)
922 tree callarg = gimple_call_arg (call, i);
923 copy_reference_ops_from_ref (callarg, result);
927 /* Create a vector of vn_reference_op_s structures from REF, a
928 REFERENCE_CLASS_P tree. The vector is not shared. */
930 static VEC(vn_reference_op_s, heap) *
931 create_reference_ops_from_ref (tree ref)
933 VEC (vn_reference_op_s, heap) *result = NULL;
935 copy_reference_ops_from_ref (ref, &result);
936 return result;
939 /* Create a vector of vn_reference_op_s structures from CALL, a
940 call statement. The vector is not shared. */
942 static VEC(vn_reference_op_s, heap) *
943 create_reference_ops_from_call (gimple call)
945 VEC (vn_reference_op_s, heap) *result = NULL;
947 copy_reference_ops_from_call (call, &result);
948 return result;
951 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
952 *I_P to point to the last element of the replacement. */
953 void
954 vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
955 unsigned int *i_p)
957 unsigned int i = *i_p;
958 vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
959 vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
960 tree addr_base;
961 HOST_WIDE_INT addr_offset;
963 /* The only thing we have to do is from &OBJ.foo.bar add the offset
964 from .foo.bar to the preceeding MEM_REF offset and replace the
965 address with &OBJ. */
966 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
967 &addr_offset);
968 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
969 if (addr_base != op->op0)
971 double_int off = tree_to_double_int (mem_op->op0);
972 off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
973 off = double_int_add (off, shwi_to_double_int (addr_offset));
974 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
975 op->op0 = build_fold_addr_expr (addr_base);
976 if (host_integerp (mem_op->op0, 0))
977 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
978 else
979 mem_op->off = -1;
983 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
984 *I_P to point to the last element of the replacement. */
985 static void
986 vn_reference_maybe_forwprop_address (VEC (vn_reference_op_s, heap) **ops,
987 unsigned int *i_p)
989 unsigned int i = *i_p;
990 vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
991 vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
992 gimple def_stmt;
993 enum tree_code code;
994 double_int off;
996 def_stmt = SSA_NAME_DEF_STMT (op->op0);
997 if (!is_gimple_assign (def_stmt))
998 return;
1000 code = gimple_assign_rhs_code (def_stmt);
1001 if (code != ADDR_EXPR
1002 && code != POINTER_PLUS_EXPR)
1003 return;
1005 off = tree_to_double_int (mem_op->op0);
1006 off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1008 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1009 from .foo.bar to the preceeding MEM_REF offset and replace the
1010 address with &OBJ. */
1011 if (code == ADDR_EXPR)
1013 tree addr, addr_base;
1014 HOST_WIDE_INT addr_offset;
1016 addr = gimple_assign_rhs1 (def_stmt);
1017 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1018 &addr_offset);
1019 if (!addr_base
1020 || TREE_CODE (addr_base) != MEM_REF)
1021 return;
1023 off = double_int_add (off, shwi_to_double_int (addr_offset));
1024 off = double_int_add (off, mem_ref_offset (addr_base));
1025 op->op0 = TREE_OPERAND (addr_base, 0);
1027 else
1029 tree ptr, ptroff;
1030 ptr = gimple_assign_rhs1 (def_stmt);
1031 ptroff = gimple_assign_rhs2 (def_stmt);
1032 if (TREE_CODE (ptr) != SSA_NAME
1033 || TREE_CODE (ptroff) != INTEGER_CST)
1034 return;
1036 off = double_int_add (off, tree_to_double_int (ptroff));
1037 op->op0 = ptr;
1040 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1041 if (host_integerp (mem_op->op0, 0))
1042 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1043 else
1044 mem_op->off = -1;
1045 if (TREE_CODE (op->op0) == SSA_NAME)
1046 op->op0 = SSA_VAL (op->op0);
1047 if (TREE_CODE (op->op0) != SSA_NAME)
1048 op->opcode = TREE_CODE (op->op0);
1050 /* And recurse. */
1051 if (TREE_CODE (op->op0) == SSA_NAME)
1052 vn_reference_maybe_forwprop_address (ops, i_p);
1053 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1054 vn_reference_fold_indirect (ops, i_p);
1057 /* Optimize the reference REF to a constant if possible or return
1058 NULL_TREE if not. */
1060 tree
1061 fully_constant_vn_reference_p (vn_reference_t ref)
1063 VEC (vn_reference_op_s, heap) *operands = ref->operands;
1064 vn_reference_op_t op;
1066 /* Try to simplify the translated expression if it is
1067 a call to a builtin function with at most two arguments. */
1068 op = VEC_index (vn_reference_op_s, operands, 0);
1069 if (op->opcode == CALL_EXPR
1070 && TREE_CODE (op->op0) == ADDR_EXPR
1071 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1072 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1073 && VEC_length (vn_reference_op_s, operands) >= 2
1074 && VEC_length (vn_reference_op_s, operands) <= 3)
1076 vn_reference_op_t arg0, arg1 = NULL;
1077 bool anyconst = false;
1078 arg0 = VEC_index (vn_reference_op_s, operands, 1);
1079 if (VEC_length (vn_reference_op_s, operands) > 2)
1080 arg1 = VEC_index (vn_reference_op_s, operands, 2);
1081 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1082 || (arg0->opcode == ADDR_EXPR
1083 && is_gimple_min_invariant (arg0->op0)))
1084 anyconst = true;
1085 if (arg1
1086 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1087 || (arg1->opcode == ADDR_EXPR
1088 && is_gimple_min_invariant (arg1->op0))))
1089 anyconst = true;
1090 if (anyconst)
1092 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1093 arg1 ? 2 : 1,
1094 arg0->op0,
1095 arg1 ? arg1->op0 : NULL);
1096 if (folded
1097 && TREE_CODE (folded) == NOP_EXPR)
1098 folded = TREE_OPERAND (folded, 0);
1099 if (folded
1100 && is_gimple_min_invariant (folded))
1101 return folded;
1105 /* Simplify reads from constant strings. */
1106 else if (op->opcode == ARRAY_REF
1107 && TREE_CODE (op->op0) == INTEGER_CST
1108 && integer_zerop (op->op1)
1109 && VEC_length (vn_reference_op_s, operands) == 2)
1111 vn_reference_op_t arg0;
1112 arg0 = VEC_index (vn_reference_op_s, operands, 1);
1113 if (arg0->opcode == STRING_CST
1114 && (TYPE_MODE (op->type)
1115 == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
1116 && GET_MODE_CLASS (TYPE_MODE (op->type)) == MODE_INT
1117 && GET_MODE_SIZE (TYPE_MODE (op->type)) == 1
1118 && compare_tree_int (op->op0, TREE_STRING_LENGTH (arg0->op0)) < 0)
1119 return build_int_cst_type (op->type,
1120 (TREE_STRING_POINTER (arg0->op0)
1121 [TREE_INT_CST_LOW (op->op0)]));
1124 return NULL_TREE;
1127 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1128 structures into their value numbers. This is done in-place, and
1129 the vector passed in is returned. */
1131 static VEC (vn_reference_op_s, heap) *
1132 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
1134 vn_reference_op_t vro;
1135 unsigned int i;
1137 FOR_EACH_VEC_ELT (vn_reference_op_s, orig, i, vro)
1139 if (vro->opcode == SSA_NAME
1140 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1142 vro->op0 = SSA_VAL (vro->op0);
1143 /* If it transforms from an SSA_NAME to a constant, update
1144 the opcode. */
1145 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1146 vro->opcode = TREE_CODE (vro->op0);
1148 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1149 vro->op1 = SSA_VAL (vro->op1);
1150 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1151 vro->op2 = SSA_VAL (vro->op2);
1152 /* If it transforms from an SSA_NAME to an address, fold with
1153 a preceding indirect reference. */
1154 if (i > 0
1155 && vro->op0
1156 && TREE_CODE (vro->op0) == ADDR_EXPR
1157 && VEC_index (vn_reference_op_s,
1158 orig, i - 1)->opcode == MEM_REF)
1159 vn_reference_fold_indirect (&orig, &i);
1160 else if (i > 0
1161 && vro->opcode == SSA_NAME
1162 && VEC_index (vn_reference_op_s,
1163 orig, i - 1)->opcode == MEM_REF)
1164 vn_reference_maybe_forwprop_address (&orig, &i);
1165 /* If it transforms a non-constant ARRAY_REF into a constant
1166 one, adjust the constant offset. */
1167 else if (vro->opcode == ARRAY_REF
1168 && vro->off == -1
1169 && TREE_CODE (vro->op0) == INTEGER_CST
1170 && TREE_CODE (vro->op1) == INTEGER_CST
1171 && TREE_CODE (vro->op2) == INTEGER_CST)
1173 double_int off = tree_to_double_int (vro->op0);
1174 off = double_int_add (off,
1175 double_int_neg
1176 (tree_to_double_int (vro->op1)));
1177 off = double_int_mul (off, tree_to_double_int (vro->op2));
1178 if (double_int_fits_in_shwi_p (off))
1179 vro->off = off.low;
1183 return orig;
1186 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
1188 /* Create a vector of vn_reference_op_s structures from REF, a
1189 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1190 this function. */
1192 static VEC(vn_reference_op_s, heap) *
1193 valueize_shared_reference_ops_from_ref (tree ref)
1195 if (!ref)
1196 return NULL;
1197 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1198 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1199 shared_lookup_references = valueize_refs (shared_lookup_references);
1200 return shared_lookup_references;
1203 /* Create a vector of vn_reference_op_s structures from CALL, a
1204 call statement. The vector is shared among all callers of
1205 this function. */
1207 static VEC(vn_reference_op_s, heap) *
1208 valueize_shared_reference_ops_from_call (gimple call)
1210 if (!call)
1211 return NULL;
1212 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1213 copy_reference_ops_from_call (call, &shared_lookup_references);
1214 shared_lookup_references = valueize_refs (shared_lookup_references);
1215 return shared_lookup_references;
1218 /* Lookup a SCCVN reference operation VR in the current hash table.
1219 Returns the resulting value number if it exists in the hash table,
1220 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1221 vn_reference_t stored in the hashtable if something is found. */
1223 static tree
1224 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1226 void **slot;
1227 hashval_t hash;
1229 hash = vr->hashcode;
1230 slot = htab_find_slot_with_hash (current_info->references, vr,
1231 hash, NO_INSERT);
1232 if (!slot && current_info == optimistic_info)
1233 slot = htab_find_slot_with_hash (valid_info->references, vr,
1234 hash, NO_INSERT);
1235 if (slot)
1237 if (vnresult)
1238 *vnresult = (vn_reference_t)*slot;
1239 return ((vn_reference_t)*slot)->result;
1242 return NULL_TREE;
1245 static tree *last_vuse_ptr;
1246 static vn_lookup_kind vn_walk_kind;
1248 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1249 with the current VUSE and performs the expression lookup. */
1251 static void *
1252 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
1254 vn_reference_t vr = (vn_reference_t)vr_;
1255 void **slot;
1256 hashval_t hash;
1258 if (last_vuse_ptr)
1259 *last_vuse_ptr = vuse;
1261 /* Fixup vuse and hash. */
1262 if (vr->vuse)
1263 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1264 vr->vuse = SSA_VAL (vuse);
1265 if (vr->vuse)
1266 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1268 hash = vr->hashcode;
1269 slot = htab_find_slot_with_hash (current_info->references, vr,
1270 hash, NO_INSERT);
1271 if (!slot && current_info == optimistic_info)
1272 slot = htab_find_slot_with_hash (valid_info->references, vr,
1273 hash, NO_INSERT);
1274 if (slot)
1275 return *slot;
1277 return NULL;
1280 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1281 from the statement defining VUSE and if not successful tries to
1282 translate *REFP and VR_ through an aggregate copy at the defintion
1283 of VUSE. */
1285 static void *
1286 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
1288 vn_reference_t vr = (vn_reference_t)vr_;
1289 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1290 tree fndecl;
1291 tree base;
1292 HOST_WIDE_INT offset, maxsize;
1293 static VEC (vn_reference_op_s, heap) *lhs_ops = NULL;
1294 ao_ref lhs_ref;
1295 bool lhs_ref_ok = false;
1297 /* First try to disambiguate after value-replacing in the definitions LHS. */
1298 if (is_gimple_assign (def_stmt))
1300 VEC (vn_reference_op_s, heap) *tem;
1301 tree lhs = gimple_assign_lhs (def_stmt);
1302 /* Avoid re-allocation overhead. */
1303 VEC_truncate (vn_reference_op_s, lhs_ops, 0);
1304 copy_reference_ops_from_ref (lhs, &lhs_ops);
1305 tem = lhs_ops;
1306 lhs_ops = valueize_refs (lhs_ops);
1307 gcc_assert (lhs_ops == tem);
1308 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref, get_alias_set (lhs),
1309 TREE_TYPE (lhs), lhs_ops);
1310 if (lhs_ref_ok
1311 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1312 return NULL;
1315 base = ao_ref_base (ref);
1316 offset = ref->offset;
1317 maxsize = ref->max_size;
1319 /* If we cannot constrain the size of the reference we cannot
1320 test if anything kills it. */
1321 if (maxsize == -1)
1322 return (void *)-1;
1324 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1325 from that defintion.
1326 1) Memset. */
1327 if (is_gimple_reg_type (vr->type)
1328 && is_gimple_call (def_stmt)
1329 && (fndecl = gimple_call_fndecl (def_stmt))
1330 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1331 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
1332 && integer_zerop (gimple_call_arg (def_stmt, 1))
1333 && host_integerp (gimple_call_arg (def_stmt, 2), 1)
1334 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1336 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1337 tree base2;
1338 HOST_WIDE_INT offset2, size2, maxsize2;
1339 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1340 size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
1341 if ((unsigned HOST_WIDE_INT)size2 / 8
1342 == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
1343 && maxsize2 != -1
1344 && operand_equal_p (base, base2, 0)
1345 && offset2 <= offset
1346 && offset2 + size2 >= offset + maxsize)
1348 tree val = build_zero_cst (vr->type);
1349 unsigned int value_id = get_or_alloc_constant_value_id (val);
1350 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1351 VEC_copy (vn_reference_op_s,
1352 heap, vr->operands),
1353 val, value_id);
1357 /* 2) Assignment from an empty CONSTRUCTOR. */
1358 else if (is_gimple_reg_type (vr->type)
1359 && gimple_assign_single_p (def_stmt)
1360 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1361 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1363 tree base2;
1364 HOST_WIDE_INT offset2, size2, maxsize2;
1365 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1366 &offset2, &size2, &maxsize2);
1367 if (maxsize2 != -1
1368 && operand_equal_p (base, base2, 0)
1369 && offset2 <= offset
1370 && offset2 + size2 >= offset + maxsize)
1372 tree val = build_zero_cst (vr->type);
1373 unsigned int value_id = get_or_alloc_constant_value_id (val);
1374 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1375 VEC_copy (vn_reference_op_s,
1376 heap, vr->operands),
1377 val, value_id);
1381 /* For aggregate copies translate the reference through them if
1382 the copy kills ref. */
1383 else if (vn_walk_kind == VN_WALKREWRITE
1384 && gimple_assign_single_p (def_stmt)
1385 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1386 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1387 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1389 tree base2;
1390 HOST_WIDE_INT offset2, size2, maxsize2;
1391 int i, j;
1392 VEC (vn_reference_op_s, heap) *rhs = NULL;
1393 vn_reference_op_t vro;
1394 ao_ref r;
1396 if (!lhs_ref_ok)
1397 return (void *)-1;
1399 /* See if the assignment kills REF. */
1400 base2 = ao_ref_base (&lhs_ref);
1401 offset2 = lhs_ref.offset;
1402 size2 = lhs_ref.size;
1403 maxsize2 = lhs_ref.max_size;
1404 if (maxsize2 == -1
1405 || (base != base2 && !operand_equal_p (base, base2, 0))
1406 || offset2 > offset
1407 || offset2 + size2 < offset + maxsize)
1408 return (void *)-1;
1410 /* Find the common base of ref and the lhs. lhs_ops already
1411 contains valueized operands for the lhs. */
1412 i = VEC_length (vn_reference_op_s, vr->operands) - 1;
1413 j = VEC_length (vn_reference_op_s, lhs_ops) - 1;
1414 while (j >= 0 && i >= 0
1415 && vn_reference_op_eq (VEC_index (vn_reference_op_s,
1416 vr->operands, i),
1417 VEC_index (vn_reference_op_s, lhs_ops, j)))
1419 i--;
1420 j--;
1423 /* i now points to the first additional op.
1424 ??? LHS may not be completely contained in VR, one or more
1425 VIEW_CONVERT_EXPRs could be in its way. We could at least
1426 try handling outermost VIEW_CONVERT_EXPRs. */
1427 if (j != -1)
1428 return (void *)-1;
1430 /* Now re-write REF to be based on the rhs of the assignment. */
1431 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1432 /* We need to pre-pend vr->operands[0..i] to rhs. */
1433 if (i + 1 + VEC_length (vn_reference_op_s, rhs)
1434 > VEC_length (vn_reference_op_s, vr->operands))
1436 VEC (vn_reference_op_s, heap) *old = vr->operands;
1437 VEC_safe_grow (vn_reference_op_s, heap, vr->operands,
1438 i + 1 + VEC_length (vn_reference_op_s, rhs));
1439 if (old == shared_lookup_references
1440 && vr->operands != old)
1441 shared_lookup_references = NULL;
1443 else
1444 VEC_truncate (vn_reference_op_s, vr->operands,
1445 i + 1 + VEC_length (vn_reference_op_s, rhs));
1446 FOR_EACH_VEC_ELT (vn_reference_op_s, rhs, j, vro)
1447 VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, vro);
1448 VEC_free (vn_reference_op_s, heap, rhs);
1449 vr->hashcode = vn_reference_compute_hash (vr);
1451 /* Adjust *ref from the new operands. */
1452 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1453 return (void *)-1;
1454 /* This can happen with bitfields. */
1455 if (ref->size != r.size)
1456 return (void *)-1;
1457 *ref = r;
1459 /* Do not update last seen VUSE after translating. */
1460 last_vuse_ptr = NULL;
1462 /* Keep looking for the adjusted *REF / VR pair. */
1463 return NULL;
1466 /* Bail out and stop walking. */
1467 return (void *)-1;
1470 /* Lookup a reference operation by it's parts, in the current hash table.
1471 Returns the resulting value number if it exists in the hash table,
1472 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1473 vn_reference_t stored in the hashtable if something is found. */
1475 tree
1476 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
1477 VEC (vn_reference_op_s, heap) *operands,
1478 vn_reference_t *vnresult, vn_lookup_kind kind)
1480 struct vn_reference_s vr1;
1481 vn_reference_t tmp;
1482 tree cst;
1484 if (!vnresult)
1485 vnresult = &tmp;
1486 *vnresult = NULL;
1488 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1489 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1490 VEC_safe_grow (vn_reference_op_s, heap, shared_lookup_references,
1491 VEC_length (vn_reference_op_s, operands));
1492 memcpy (VEC_address (vn_reference_op_s, shared_lookup_references),
1493 VEC_address (vn_reference_op_s, operands),
1494 sizeof (vn_reference_op_s)
1495 * VEC_length (vn_reference_op_s, operands));
1496 vr1.operands = operands = shared_lookup_references
1497 = valueize_refs (shared_lookup_references);
1498 vr1.type = type;
1499 vr1.set = set;
1500 vr1.hashcode = vn_reference_compute_hash (&vr1);
1501 if ((cst = fully_constant_vn_reference_p (&vr1)))
1502 return cst;
1504 vn_reference_lookup_1 (&vr1, vnresult);
1505 if (!*vnresult
1506 && kind != VN_NOWALK
1507 && vr1.vuse)
1509 ao_ref r;
1510 vn_walk_kind = kind;
1511 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
1512 *vnresult =
1513 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1514 vn_reference_lookup_2,
1515 vn_reference_lookup_3, &vr1);
1516 if (vr1.operands != operands)
1517 VEC_free (vn_reference_op_s, heap, vr1.operands);
1520 if (*vnresult)
1521 return (*vnresult)->result;
1523 return NULL_TREE;
1526 /* Lookup OP in the current hash table, and return the resulting value
1527 number if it exists in the hash table. Return NULL_TREE if it does
1528 not exist in the hash table or if the result field of the structure
1529 was NULL.. VNRESULT will be filled in with the vn_reference_t
1530 stored in the hashtable if one exists. */
1532 tree
1533 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
1534 vn_reference_t *vnresult)
1536 VEC (vn_reference_op_s, heap) *operands;
1537 struct vn_reference_s vr1;
1538 tree cst;
1540 if (vnresult)
1541 *vnresult = NULL;
1543 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1544 vr1.operands = operands = valueize_shared_reference_ops_from_ref (op);
1545 vr1.type = TREE_TYPE (op);
1546 vr1.set = get_alias_set (op);
1547 vr1.hashcode = vn_reference_compute_hash (&vr1);
1548 if ((cst = fully_constant_vn_reference_p (&vr1)))
1549 return cst;
1551 if (kind != VN_NOWALK
1552 && vr1.vuse)
1554 vn_reference_t wvnresult;
1555 ao_ref r;
1556 ao_ref_init (&r, op);
1557 vn_walk_kind = kind;
1558 wvnresult =
1559 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1560 vn_reference_lookup_2,
1561 vn_reference_lookup_3, &vr1);
1562 if (vr1.operands != operands)
1563 VEC_free (vn_reference_op_s, heap, vr1.operands);
1564 if (wvnresult)
1566 if (vnresult)
1567 *vnresult = wvnresult;
1568 return wvnresult->result;
1571 return NULL_TREE;
1574 return vn_reference_lookup_1 (&vr1, vnresult);
1578 /* Insert OP into the current hash table with a value number of
1579 RESULT, and return the resulting reference structure we created. */
1581 vn_reference_t
1582 vn_reference_insert (tree op, tree result, tree vuse)
1584 void **slot;
1585 vn_reference_t vr1;
1587 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1588 if (TREE_CODE (result) == SSA_NAME)
1589 vr1->value_id = VN_INFO (result)->value_id;
1590 else
1591 vr1->value_id = get_or_alloc_constant_value_id (result);
1592 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1593 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
1594 vr1->type = TREE_TYPE (op);
1595 vr1->set = get_alias_set (op);
1596 vr1->hashcode = vn_reference_compute_hash (vr1);
1597 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
1599 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1600 INSERT);
1602 /* Because we lookup stores using vuses, and value number failures
1603 using the vdefs (see visit_reference_op_store for how and why),
1604 it's possible that on failure we may try to insert an already
1605 inserted store. This is not wrong, there is no ssa name for a
1606 store that we could use as a differentiator anyway. Thus, unlike
1607 the other lookup functions, you cannot gcc_assert (!*slot)
1608 here. */
1610 /* But free the old slot in case of a collision. */
1611 if (*slot)
1612 free_reference (*slot);
1614 *slot = vr1;
1615 return vr1;
1618 /* Insert a reference by it's pieces into the current hash table with
1619 a value number of RESULT. Return the resulting reference
1620 structure we created. */
1622 vn_reference_t
1623 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
1624 VEC (vn_reference_op_s, heap) *operands,
1625 tree result, unsigned int value_id)
1628 void **slot;
1629 vn_reference_t vr1;
1631 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1632 vr1->value_id = value_id;
1633 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1634 vr1->operands = valueize_refs (operands);
1635 vr1->type = type;
1636 vr1->set = set;
1637 vr1->hashcode = vn_reference_compute_hash (vr1);
1638 if (result && TREE_CODE (result) == SSA_NAME)
1639 result = SSA_VAL (result);
1640 vr1->result = result;
1642 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1643 INSERT);
1645 /* At this point we should have all the things inserted that we have
1646 seen before, and we should never try inserting something that
1647 already exists. */
1648 gcc_assert (!*slot);
1649 if (*slot)
1650 free_reference (*slot);
1652 *slot = vr1;
1653 return vr1;
1656 /* Compute and return the hash value for nary operation VBO1. */
1658 hashval_t
1659 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
1661 hashval_t hash;
1662 unsigned i;
1664 for (i = 0; i < vno1->length; ++i)
1665 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
1666 vno1->op[i] = SSA_VAL (vno1->op[i]);
1668 if (vno1->length == 2
1669 && commutative_tree_code (vno1->opcode)
1670 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
1672 tree temp = vno1->op[0];
1673 vno1->op[0] = vno1->op[1];
1674 vno1->op[1] = temp;
1677 hash = iterative_hash_hashval_t (vno1->opcode, 0);
1678 for (i = 0; i < vno1->length; ++i)
1679 hash = iterative_hash_expr (vno1->op[i], hash);
1681 return hash;
1684 /* Return the computed hashcode for nary operation P1. */
1686 static hashval_t
1687 vn_nary_op_hash (const void *p1)
1689 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1690 return vno1->hashcode;
1693 /* Compare nary operations P1 and P2 and return true if they are
1694 equivalent. */
1697 vn_nary_op_eq (const void *p1, const void *p2)
1699 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1700 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
1701 unsigned i;
1703 if (vno1->hashcode != vno2->hashcode)
1704 return false;
1706 if (vno1->opcode != vno2->opcode
1707 || !types_compatible_p (vno1->type, vno2->type))
1708 return false;
1710 for (i = 0; i < vno1->length; ++i)
1711 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
1712 return false;
1714 return true;
1717 /* Initialize VNO from the pieces provided. */
1719 static void
1720 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
1721 enum tree_code code, tree type, tree op0,
1722 tree op1, tree op2, tree op3)
1724 vno->opcode = code;
1725 vno->length = length;
1726 vno->type = type;
1727 switch (length)
1729 /* The fallthrus here are deliberate. */
1730 case 4: vno->op[3] = op3;
1731 case 3: vno->op[2] = op2;
1732 case 2: vno->op[1] = op1;
1733 case 1: vno->op[0] = op0;
1734 default:
1735 break;
1739 /* Initialize VNO from OP. */
1741 static void
1742 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
1744 unsigned i;
1746 vno->opcode = TREE_CODE (op);
1747 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
1748 vno->type = TREE_TYPE (op);
1749 for (i = 0; i < vno->length; ++i)
1750 vno->op[i] = TREE_OPERAND (op, i);
1753 /* Initialize VNO from STMT. */
1755 static void
1756 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
1758 unsigned i;
1760 vno->opcode = gimple_assign_rhs_code (stmt);
1761 vno->length = gimple_num_ops (stmt) - 1;
1762 vno->type = gimple_expr_type (stmt);
1763 for (i = 0; i < vno->length; ++i)
1764 vno->op[i] = gimple_op (stmt, i + 1);
1765 if (vno->opcode == REALPART_EXPR
1766 || vno->opcode == IMAGPART_EXPR
1767 || vno->opcode == VIEW_CONVERT_EXPR)
1768 vno->op[0] = TREE_OPERAND (vno->op[0], 0);
1771 /* Compute the hashcode for VNO and look for it in the hash table;
1772 return the resulting value number if it exists in the hash table.
1773 Return NULL_TREE if it does not exist in the hash table or if the
1774 result field of the operation is NULL. VNRESULT will contain the
1775 vn_nary_op_t from the hashtable if it exists. */
1777 static tree
1778 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
1780 void **slot;
1782 if (vnresult)
1783 *vnresult = NULL;
1785 vno->hashcode = vn_nary_op_compute_hash (vno);
1786 slot = htab_find_slot_with_hash (current_info->nary, vno, vno->hashcode,
1787 NO_INSERT);
1788 if (!slot && current_info == optimistic_info)
1789 slot = htab_find_slot_with_hash (valid_info->nary, vno, vno->hashcode,
1790 NO_INSERT);
1791 if (!slot)
1792 return NULL_TREE;
1793 if (vnresult)
1794 *vnresult = (vn_nary_op_t)*slot;
1795 return ((vn_nary_op_t)*slot)->result;
1798 /* Lookup a n-ary operation by its pieces and return the resulting value
1799 number if it exists in the hash table. Return NULL_TREE if it does
1800 not exist in the hash table or if the result field of the operation
1801 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1802 if it exists. */
1804 tree
1805 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
1806 tree type, tree op0, tree op1, tree op2,
1807 tree op3, vn_nary_op_t *vnresult)
1809 struct vn_nary_op_s vno1;
1810 init_vn_nary_op_from_pieces (&vno1, length, code, type, op0, op1, op2, op3);
1811 return vn_nary_op_lookup_1 (&vno1, vnresult);
1814 /* Lookup OP in the current hash table, and return the resulting value
1815 number if it exists in the hash table. Return NULL_TREE if it does
1816 not exist in the hash table or if the result field of the operation
1817 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1818 if it exists. */
1820 tree
1821 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
1823 struct vn_nary_op_s vno1;
1824 init_vn_nary_op_from_op (&vno1, op);
1825 return vn_nary_op_lookup_1 (&vno1, vnresult);
1828 /* Lookup the rhs of STMT in the current hash table, and return the resulting
1829 value number if it exists in the hash table. Return NULL_TREE if
1830 it does not exist in the hash table. VNRESULT will contain the
1831 vn_nary_op_t from the hashtable if it exists. */
1833 tree
1834 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
1836 struct vn_nary_op_s vno1;
1837 init_vn_nary_op_from_stmt (&vno1, stmt);
1838 return vn_nary_op_lookup_1 (&vno1, vnresult);
1841 /* Return the size of a vn_nary_op_t with LENGTH operands. */
1843 static size_t
1844 sizeof_vn_nary_op (unsigned int length)
1846 return sizeof (struct vn_nary_op_s) - sizeof (tree) * (4 - length);
1849 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
1851 static vn_nary_op_t
1852 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
1854 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
1857 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
1858 obstack. */
1860 static vn_nary_op_t
1861 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
1863 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
1864 &current_info->nary_obstack);
1866 vno1->value_id = value_id;
1867 vno1->length = length;
1868 vno1->result = result;
1870 return vno1;
1873 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
1874 VNO->HASHCODE first. */
1876 static vn_nary_op_t
1877 vn_nary_op_insert_into (vn_nary_op_t vno, htab_t table, bool compute_hash)
1879 void **slot;
1881 if (compute_hash)
1882 vno->hashcode = vn_nary_op_compute_hash (vno);
1884 slot = htab_find_slot_with_hash (table, vno, vno->hashcode, INSERT);
1885 gcc_assert (!*slot);
1887 *slot = vno;
1888 return vno;
1891 /* Insert a n-ary operation into the current hash table using it's
1892 pieces. Return the vn_nary_op_t structure we created and put in
1893 the hashtable. */
1895 vn_nary_op_t
1896 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
1897 tree type, tree op0,
1898 tree op1, tree op2, tree op3,
1899 tree result,
1900 unsigned int value_id)
1902 vn_nary_op_t vno1;
1904 vno1 = alloc_vn_nary_op (length, result, value_id);
1905 init_vn_nary_op_from_pieces (vno1, length, code, type, op0, op1, op2, op3);
1906 return vn_nary_op_insert_into (vno1, current_info->nary, true);
1909 /* Insert OP into the current hash table with a value number of
1910 RESULT. Return the vn_nary_op_t structure we created and put in
1911 the hashtable. */
1913 vn_nary_op_t
1914 vn_nary_op_insert (tree op, tree result)
1916 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
1917 vn_nary_op_t vno1;
1919 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
1920 init_vn_nary_op_from_op (vno1, op);
1921 return vn_nary_op_insert_into (vno1, current_info->nary, true);
1924 /* Insert the rhs of STMT into the current hash table with a value number of
1925 RESULT. */
1927 vn_nary_op_t
1928 vn_nary_op_insert_stmt (gimple stmt, tree result)
1930 unsigned length = gimple_num_ops (stmt) - 1;
1931 vn_nary_op_t vno1;
1933 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
1934 init_vn_nary_op_from_stmt (vno1, stmt);
1935 return vn_nary_op_insert_into (vno1, current_info->nary, true);
1938 /* Compute a hashcode for PHI operation VP1 and return it. */
1940 static inline hashval_t
1941 vn_phi_compute_hash (vn_phi_t vp1)
1943 hashval_t result;
1944 int i;
1945 tree phi1op;
1946 tree type;
1948 result = vp1->block->index;
1950 /* If all PHI arguments are constants we need to distinguish
1951 the PHI node via its type. */
1952 type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
1953 result += (INTEGRAL_TYPE_P (type)
1954 + (INTEGRAL_TYPE_P (type)
1955 ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
1957 FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
1959 if (phi1op == VN_TOP)
1960 continue;
1961 result = iterative_hash_expr (phi1op, result);
1964 return result;
1967 /* Return the computed hashcode for phi operation P1. */
1969 static hashval_t
1970 vn_phi_hash (const void *p1)
1972 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1973 return vp1->hashcode;
1976 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
1978 static int
1979 vn_phi_eq (const void *p1, const void *p2)
1981 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1982 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
1984 if (vp1->hashcode != vp2->hashcode)
1985 return false;
1987 if (vp1->block == vp2->block)
1989 int i;
1990 tree phi1op;
1992 /* If the PHI nodes do not have compatible types
1993 they are not the same. */
1994 if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
1995 TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
1996 return false;
1998 /* Any phi in the same block will have it's arguments in the
1999 same edge order, because of how we store phi nodes. */
2000 FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
2002 tree phi2op = VEC_index (tree, vp2->phiargs, i);
2003 if (phi1op == VN_TOP || phi2op == VN_TOP)
2004 continue;
2005 if (!expressions_equal_p (phi1op, phi2op))
2006 return false;
2008 return true;
2010 return false;
2013 static VEC(tree, heap) *shared_lookup_phiargs;
2015 /* Lookup PHI in the current hash table, and return the resulting
2016 value number if it exists in the hash table. Return NULL_TREE if
2017 it does not exist in the hash table. */
2019 static tree
2020 vn_phi_lookup (gimple phi)
2022 void **slot;
2023 struct vn_phi_s vp1;
2024 unsigned i;
2026 VEC_truncate (tree, shared_lookup_phiargs, 0);
2028 /* Canonicalize the SSA_NAME's to their value number. */
2029 for (i = 0; i < gimple_phi_num_args (phi); i++)
2031 tree def = PHI_ARG_DEF (phi, i);
2032 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2033 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
2035 vp1.phiargs = shared_lookup_phiargs;
2036 vp1.block = gimple_bb (phi);
2037 vp1.hashcode = vn_phi_compute_hash (&vp1);
2038 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
2039 NO_INSERT);
2040 if (!slot && current_info == optimistic_info)
2041 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
2042 NO_INSERT);
2043 if (!slot)
2044 return NULL_TREE;
2045 return ((vn_phi_t)*slot)->result;
2048 /* Insert PHI into the current hash table with a value number of
2049 RESULT. */
2051 static vn_phi_t
2052 vn_phi_insert (gimple phi, tree result)
2054 void **slot;
2055 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
2056 unsigned i;
2057 VEC (tree, heap) *args = NULL;
2059 /* Canonicalize the SSA_NAME's to their value number. */
2060 for (i = 0; i < gimple_phi_num_args (phi); i++)
2062 tree def = PHI_ARG_DEF (phi, i);
2063 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2064 VEC_safe_push (tree, heap, args, def);
2066 vp1->value_id = VN_INFO (result)->value_id;
2067 vp1->phiargs = args;
2068 vp1->block = gimple_bb (phi);
2069 vp1->result = result;
2070 vp1->hashcode = vn_phi_compute_hash (vp1);
2072 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
2073 INSERT);
2075 /* Because we iterate over phi operations more than once, it's
2076 possible the slot might already exist here, hence no assert.*/
2077 *slot = vp1;
2078 return vp1;
2082 /* Print set of components in strongly connected component SCC to OUT. */
2084 static void
2085 print_scc (FILE *out, VEC (tree, heap) *scc)
2087 tree var;
2088 unsigned int i;
2090 fprintf (out, "SCC consists of: ");
2091 FOR_EACH_VEC_ELT (tree, scc, i, var)
2093 print_generic_expr (out, var, 0);
2094 fprintf (out, " ");
2096 fprintf (out, "\n");
2099 /* Set the value number of FROM to TO, return true if it has changed
2100 as a result. */
2102 static inline bool
2103 set_ssa_val_to (tree from, tree to)
2105 tree currval;
2107 if (from != to
2108 && TREE_CODE (to) == SSA_NAME
2109 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2110 to = from;
2112 /* The only thing we allow as value numbers are VN_TOP, ssa_names
2113 and invariants. So assert that here. */
2114 gcc_assert (to != NULL_TREE
2115 && (to == VN_TOP
2116 || TREE_CODE (to) == SSA_NAME
2117 || is_gimple_min_invariant (to)));
2119 if (dump_file && (dump_flags & TDF_DETAILS))
2121 fprintf (dump_file, "Setting value number of ");
2122 print_generic_expr (dump_file, from, 0);
2123 fprintf (dump_file, " to ");
2124 print_generic_expr (dump_file, to, 0);
2127 currval = SSA_VAL (from);
2129 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
2131 VN_INFO (from)->valnum = to;
2132 if (dump_file && (dump_flags & TDF_DETAILS))
2133 fprintf (dump_file, " (changed)\n");
2134 return true;
2136 if (dump_file && (dump_flags & TDF_DETAILS))
2137 fprintf (dump_file, "\n");
2138 return false;
2141 /* Set all definitions in STMT to value number to themselves.
2142 Return true if a value number changed. */
2144 static bool
2145 defs_to_varying (gimple stmt)
2147 bool changed = false;
2148 ssa_op_iter iter;
2149 def_operand_p defp;
2151 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2153 tree def = DEF_FROM_PTR (defp);
2155 VN_INFO (def)->use_processed = true;
2156 changed |= set_ssa_val_to (def, def);
2158 return changed;
2161 static bool expr_has_constants (tree expr);
2162 static tree valueize_expr (tree expr);
2164 /* Visit a copy between LHS and RHS, return true if the value number
2165 changed. */
2167 static bool
2168 visit_copy (tree lhs, tree rhs)
2170 /* Follow chains of copies to their destination. */
2171 while (TREE_CODE (rhs) == SSA_NAME
2172 && SSA_VAL (rhs) != rhs)
2173 rhs = SSA_VAL (rhs);
2175 /* The copy may have a more interesting constant filled expression
2176 (we don't, since we know our RHS is just an SSA name). */
2177 if (TREE_CODE (rhs) == SSA_NAME)
2179 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2180 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2183 return set_ssa_val_to (lhs, rhs);
2186 /* Visit a nary operator RHS, value number it, and return true if the
2187 value number of LHS has changed as a result. */
2189 static bool
2190 visit_nary_op (tree lhs, gimple stmt)
2192 bool changed = false;
2193 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2195 if (result)
2196 changed = set_ssa_val_to (lhs, result);
2197 else
2199 changed = set_ssa_val_to (lhs, lhs);
2200 vn_nary_op_insert_stmt (stmt, lhs);
2203 return changed;
2206 /* Visit a call STMT storing into LHS. Return true if the value number
2207 of the LHS has changed as a result. */
2209 static bool
2210 visit_reference_op_call (tree lhs, gimple stmt)
2212 bool changed = false;
2213 struct vn_reference_s vr1;
2214 tree result;
2215 tree vuse = gimple_vuse (stmt);
2217 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2218 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
2219 vr1.type = gimple_expr_type (stmt);
2220 vr1.set = 0;
2221 vr1.hashcode = vn_reference_compute_hash (&vr1);
2222 result = vn_reference_lookup_1 (&vr1, NULL);
2223 if (result)
2225 changed = set_ssa_val_to (lhs, result);
2226 if (TREE_CODE (result) == SSA_NAME
2227 && VN_INFO (result)->has_constants)
2228 VN_INFO (lhs)->has_constants = true;
2230 else
2232 void **slot;
2233 vn_reference_t vr2;
2234 changed = set_ssa_val_to (lhs, lhs);
2235 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
2236 vr2->vuse = vr1.vuse;
2237 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
2238 vr2->type = vr1.type;
2239 vr2->set = vr1.set;
2240 vr2->hashcode = vr1.hashcode;
2241 vr2->result = lhs;
2242 slot = htab_find_slot_with_hash (current_info->references,
2243 vr2, vr2->hashcode, INSERT);
2244 if (*slot)
2245 free_reference (*slot);
2246 *slot = vr2;
2249 return changed;
2252 /* Visit a load from a reference operator RHS, part of STMT, value number it,
2253 and return true if the value number of the LHS has changed as a result. */
2255 static bool
2256 visit_reference_op_load (tree lhs, tree op, gimple stmt)
2258 bool changed = false;
2259 tree last_vuse;
2260 tree result;
2262 last_vuse = gimple_vuse (stmt);
2263 last_vuse_ptr = &last_vuse;
2264 result = vn_reference_lookup (op, gimple_vuse (stmt), VN_WALKREWRITE, NULL);
2265 last_vuse_ptr = NULL;
2267 /* If we have a VCE, try looking up its operand as it might be stored in
2268 a different type. */
2269 if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
2270 result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
2271 VN_WALKREWRITE, NULL);
2273 /* We handle type-punning through unions by value-numbering based
2274 on offset and size of the access. Be prepared to handle a
2275 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
2276 if (result
2277 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
2279 /* We will be setting the value number of lhs to the value number
2280 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2281 So first simplify and lookup this expression to see if it
2282 is already available. */
2283 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
2284 if ((CONVERT_EXPR_P (val)
2285 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
2286 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
2288 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
2289 if ((CONVERT_EXPR_P (tem)
2290 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
2291 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
2292 TREE_TYPE (val), tem)))
2293 val = tem;
2295 result = val;
2296 if (!is_gimple_min_invariant (val)
2297 && TREE_CODE (val) != SSA_NAME)
2298 result = vn_nary_op_lookup (val, NULL);
2299 /* If the expression is not yet available, value-number lhs to
2300 a new SSA_NAME we create. */
2301 if (!result)
2303 result = make_ssa_name (SSA_NAME_VAR (lhs), gimple_build_nop ());
2304 /* Initialize value-number information properly. */
2305 VN_INFO_GET (result)->valnum = result;
2306 VN_INFO (result)->value_id = get_next_value_id ();
2307 VN_INFO (result)->expr = val;
2308 VN_INFO (result)->has_constants = expr_has_constants (val);
2309 VN_INFO (result)->needs_insertion = true;
2310 /* As all "inserted" statements are singleton SCCs, insert
2311 to the valid table. This is strictly needed to
2312 avoid re-generating new value SSA_NAMEs for the same
2313 expression during SCC iteration over and over (the
2314 optimistic table gets cleared after each iteration).
2315 We do not need to insert into the optimistic table, as
2316 lookups there will fall back to the valid table. */
2317 if (current_info == optimistic_info)
2319 current_info = valid_info;
2320 vn_nary_op_insert (val, result);
2321 current_info = optimistic_info;
2323 else
2324 vn_nary_op_insert (val, result);
2325 if (dump_file && (dump_flags & TDF_DETAILS))
2327 fprintf (dump_file, "Inserting name ");
2328 print_generic_expr (dump_file, result, 0);
2329 fprintf (dump_file, " for expression ");
2330 print_generic_expr (dump_file, val, 0);
2331 fprintf (dump_file, "\n");
2336 if (result)
2338 changed = set_ssa_val_to (lhs, result);
2339 if (TREE_CODE (result) == SSA_NAME
2340 && VN_INFO (result)->has_constants)
2342 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2343 VN_INFO (lhs)->has_constants = true;
2346 else
2348 changed = set_ssa_val_to (lhs, lhs);
2349 vn_reference_insert (op, lhs, last_vuse);
2352 return changed;
2356 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2357 and return true if the value number of the LHS has changed as a result. */
2359 static bool
2360 visit_reference_op_store (tree lhs, tree op, gimple stmt)
2362 bool changed = false;
2363 tree result;
2364 bool resultsame = false;
2366 /* First we want to lookup using the *vuses* from the store and see
2367 if there the last store to this location with the same address
2368 had the same value.
2370 The vuses represent the memory state before the store. If the
2371 memory state, address, and value of the store is the same as the
2372 last store to this location, then this store will produce the
2373 same memory state as that store.
2375 In this case the vdef versions for this store are value numbered to those
2376 vuse versions, since they represent the same memory state after
2377 this store.
2379 Otherwise, the vdefs for the store are used when inserting into
2380 the table, since the store generates a new memory state. */
2382 result = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_NOWALK, NULL);
2384 if (result)
2386 if (TREE_CODE (result) == SSA_NAME)
2387 result = SSA_VAL (result);
2388 if (TREE_CODE (op) == SSA_NAME)
2389 op = SSA_VAL (op);
2390 resultsame = expressions_equal_p (result, op);
2393 if (!result || !resultsame)
2395 tree vdef;
2397 if (dump_file && (dump_flags & TDF_DETAILS))
2399 fprintf (dump_file, "No store match\n");
2400 fprintf (dump_file, "Value numbering store ");
2401 print_generic_expr (dump_file, lhs, 0);
2402 fprintf (dump_file, " to ");
2403 print_generic_expr (dump_file, op, 0);
2404 fprintf (dump_file, "\n");
2406 /* Have to set value numbers before insert, since insert is
2407 going to valueize the references in-place. */
2408 if ((vdef = gimple_vdef (stmt)))
2410 VN_INFO (vdef)->use_processed = true;
2411 changed |= set_ssa_val_to (vdef, vdef);
2414 /* Do not insert structure copies into the tables. */
2415 if (is_gimple_min_invariant (op)
2416 || is_gimple_reg (op))
2417 vn_reference_insert (lhs, op, vdef);
2419 else
2421 /* We had a match, so value number the vdef to have the value
2422 number of the vuse it came from. */
2423 tree def, use;
2425 if (dump_file && (dump_flags & TDF_DETAILS))
2426 fprintf (dump_file, "Store matched earlier value,"
2427 "value numbering store vdefs to matching vuses.\n");
2429 def = gimple_vdef (stmt);
2430 use = gimple_vuse (stmt);
2432 VN_INFO (def)->use_processed = true;
2433 changed |= set_ssa_val_to (def, SSA_VAL (use));
2436 return changed;
2439 /* Visit and value number PHI, return true if the value number
2440 changed. */
2442 static bool
2443 visit_phi (gimple phi)
2445 bool changed = false;
2446 tree result;
2447 tree sameval = VN_TOP;
2448 bool allsame = true;
2449 unsigned i;
2451 /* TODO: We could check for this in init_sccvn, and replace this
2452 with a gcc_assert. */
2453 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
2454 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2456 /* See if all non-TOP arguments have the same value. TOP is
2457 equivalent to everything, so we can ignore it. */
2458 for (i = 0; i < gimple_phi_num_args (phi); i++)
2460 tree def = PHI_ARG_DEF (phi, i);
2462 if (TREE_CODE (def) == SSA_NAME)
2463 def = SSA_VAL (def);
2464 if (def == VN_TOP)
2465 continue;
2466 if (sameval == VN_TOP)
2468 sameval = def;
2470 else
2472 if (!expressions_equal_p (def, sameval))
2474 allsame = false;
2475 break;
2480 /* If all value numbered to the same value, the phi node has that
2481 value. */
2482 if (allsame)
2484 if (is_gimple_min_invariant (sameval))
2486 VN_INFO (PHI_RESULT (phi))->has_constants = true;
2487 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2489 else
2491 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2492 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2495 if (TREE_CODE (sameval) == SSA_NAME)
2496 return visit_copy (PHI_RESULT (phi), sameval);
2498 return set_ssa_val_to (PHI_RESULT (phi), sameval);
2501 /* Otherwise, see if it is equivalent to a phi node in this block. */
2502 result = vn_phi_lookup (phi);
2503 if (result)
2505 if (TREE_CODE (result) == SSA_NAME)
2506 changed = visit_copy (PHI_RESULT (phi), result);
2507 else
2508 changed = set_ssa_val_to (PHI_RESULT (phi), result);
2510 else
2512 vn_phi_insert (phi, PHI_RESULT (phi));
2513 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2514 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
2515 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2518 return changed;
2521 /* Return true if EXPR contains constants. */
2523 static bool
2524 expr_has_constants (tree expr)
2526 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2528 case tcc_unary:
2529 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
2531 case tcc_binary:
2532 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
2533 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
2534 /* Constants inside reference ops are rarely interesting, but
2535 it can take a lot of looking to find them. */
2536 case tcc_reference:
2537 case tcc_declaration:
2538 return false;
2539 default:
2540 return is_gimple_min_invariant (expr);
2542 return false;
2545 /* Return true if STMT contains constants. */
2547 static bool
2548 stmt_has_constants (gimple stmt)
2550 if (gimple_code (stmt) != GIMPLE_ASSIGN)
2551 return false;
2553 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2555 case GIMPLE_UNARY_RHS:
2556 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2558 case GIMPLE_BINARY_RHS:
2559 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2560 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
2561 case GIMPLE_TERNARY_RHS:
2562 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2563 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt))
2564 || is_gimple_min_invariant (gimple_assign_rhs3 (stmt)));
2565 case GIMPLE_SINGLE_RHS:
2566 /* Constants inside reference ops are rarely interesting, but
2567 it can take a lot of looking to find them. */
2568 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2569 default:
2570 gcc_unreachable ();
2572 return false;
2575 /* Replace SSA_NAMES in expr with their value numbers, and return the
2576 result.
2577 This is performed in place. */
2579 static tree
2580 valueize_expr (tree expr)
2582 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2584 case tcc_unary:
2585 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2586 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2587 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2588 break;
2589 case tcc_binary:
2590 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2591 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2592 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2593 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
2594 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
2595 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
2596 break;
2597 default:
2598 break;
2600 return expr;
2603 /* Simplify the binary expression RHS, and return the result if
2604 simplified. */
2606 static tree
2607 simplify_binary_expression (gimple stmt)
2609 tree result = NULL_TREE;
2610 tree op0 = gimple_assign_rhs1 (stmt);
2611 tree op1 = gimple_assign_rhs2 (stmt);
2613 /* This will not catch every single case we could combine, but will
2614 catch those with constants. The goal here is to simultaneously
2615 combine constants between expressions, but avoid infinite
2616 expansion of expressions during simplification. */
2617 if (TREE_CODE (op0) == SSA_NAME)
2619 if (VN_INFO (op0)->has_constants
2620 || TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)) == tcc_comparison)
2621 op0 = valueize_expr (vn_get_expr_for (op0));
2622 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
2623 op0 = SSA_VAL (op0);
2626 if (TREE_CODE (op1) == SSA_NAME)
2628 if (VN_INFO (op1)->has_constants)
2629 op1 = valueize_expr (vn_get_expr_for (op1));
2630 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
2631 op1 = SSA_VAL (op1);
2634 /* Avoid folding if nothing changed. */
2635 if (op0 == gimple_assign_rhs1 (stmt)
2636 && op1 == gimple_assign_rhs2 (stmt))
2637 return NULL_TREE;
2639 fold_defer_overflow_warnings ();
2641 result = fold_binary (gimple_assign_rhs_code (stmt),
2642 gimple_expr_type (stmt), op0, op1);
2643 if (result)
2644 STRIP_USELESS_TYPE_CONVERSION (result);
2646 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
2647 stmt, 0);
2649 /* Make sure result is not a complex expression consisting
2650 of operators of operators (IE (a + b) + (a + c))
2651 Otherwise, we will end up with unbounded expressions if
2652 fold does anything at all. */
2653 if (result && valid_gimple_rhs_p (result))
2654 return result;
2656 return NULL_TREE;
2659 /* Simplify the unary expression RHS, and return the result if
2660 simplified. */
2662 static tree
2663 simplify_unary_expression (gimple stmt)
2665 tree result = NULL_TREE;
2666 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
2668 /* We handle some tcc_reference codes here that are all
2669 GIMPLE_ASSIGN_SINGLE codes. */
2670 if (gimple_assign_rhs_code (stmt) == REALPART_EXPR
2671 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2672 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2673 op0 = TREE_OPERAND (op0, 0);
2675 if (TREE_CODE (op0) != SSA_NAME)
2676 return NULL_TREE;
2678 orig_op0 = op0;
2679 if (VN_INFO (op0)->has_constants)
2680 op0 = valueize_expr (vn_get_expr_for (op0));
2681 else if (gimple_assign_cast_p (stmt)
2682 || gimple_assign_rhs_code (stmt) == REALPART_EXPR
2683 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2684 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2686 /* We want to do tree-combining on conversion-like expressions.
2687 Make sure we feed only SSA_NAMEs or constants to fold though. */
2688 tree tem = valueize_expr (vn_get_expr_for (op0));
2689 if (UNARY_CLASS_P (tem)
2690 || BINARY_CLASS_P (tem)
2691 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
2692 || TREE_CODE (tem) == SSA_NAME
2693 || is_gimple_min_invariant (tem))
2694 op0 = tem;
2697 /* Avoid folding if nothing changed, but remember the expression. */
2698 if (op0 == orig_op0)
2699 return NULL_TREE;
2701 result = fold_unary_ignore_overflow (gimple_assign_rhs_code (stmt),
2702 gimple_expr_type (stmt), op0);
2703 if (result)
2705 STRIP_USELESS_TYPE_CONVERSION (result);
2706 if (valid_gimple_rhs_p (result))
2707 return result;
2710 return NULL_TREE;
2713 /* Try to simplify RHS using equivalences and constant folding. */
2715 static tree
2716 try_to_simplify (gimple stmt)
2718 tree tem;
2720 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
2721 in this case, there is no point in doing extra work. */
2722 if (gimple_assign_copy_p (stmt)
2723 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2724 return NULL_TREE;
2726 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2728 case tcc_declaration:
2729 tem = get_symbol_constant_value (gimple_assign_rhs1 (stmt));
2730 if (tem)
2731 return tem;
2732 break;
2734 case tcc_reference:
2735 /* Do not do full-blown reference lookup here, but simplify
2736 reads from constant aggregates. */
2737 tem = fold_const_aggregate_ref (gimple_assign_rhs1 (stmt));
2738 if (tem)
2739 return tem;
2741 /* Fallthrough for some codes that can operate on registers. */
2742 if (!(TREE_CODE (gimple_assign_rhs1 (stmt)) == REALPART_EXPR
2743 || TREE_CODE (gimple_assign_rhs1 (stmt)) == IMAGPART_EXPR
2744 || TREE_CODE (gimple_assign_rhs1 (stmt)) == VIEW_CONVERT_EXPR))
2745 break;
2746 /* We could do a little more with unary ops, if they expand
2747 into binary ops, but it's debatable whether it is worth it. */
2748 case tcc_unary:
2749 return simplify_unary_expression (stmt);
2750 break;
2751 case tcc_comparison:
2752 case tcc_binary:
2753 return simplify_binary_expression (stmt);
2754 break;
2755 default:
2756 break;
2759 return NULL_TREE;
2762 /* Visit and value number USE, return true if the value number
2763 changed. */
2765 static bool
2766 visit_use (tree use)
2768 bool changed = false;
2769 gimple stmt = SSA_NAME_DEF_STMT (use);
2771 VN_INFO (use)->use_processed = true;
2773 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
2774 if (dump_file && (dump_flags & TDF_DETAILS)
2775 && !SSA_NAME_IS_DEFAULT_DEF (use))
2777 fprintf (dump_file, "Value numbering ");
2778 print_generic_expr (dump_file, use, 0);
2779 fprintf (dump_file, " stmt = ");
2780 print_gimple_stmt (dump_file, stmt, 0, 0);
2783 /* Handle uninitialized uses. */
2784 if (SSA_NAME_IS_DEFAULT_DEF (use))
2785 changed = set_ssa_val_to (use, use);
2786 else
2788 if (gimple_code (stmt) == GIMPLE_PHI)
2789 changed = visit_phi (stmt);
2790 else if (!gimple_has_lhs (stmt)
2791 || gimple_has_volatile_ops (stmt)
2792 || stmt_could_throw_p (stmt))
2793 changed = defs_to_varying (stmt);
2794 else if (is_gimple_assign (stmt))
2796 tree lhs = gimple_assign_lhs (stmt);
2797 tree simplified;
2799 /* Shortcut for copies. Simplifying copies is pointless,
2800 since we copy the expression and value they represent. */
2801 if (gimple_assign_copy_p (stmt)
2802 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2803 && TREE_CODE (lhs) == SSA_NAME)
2805 changed = visit_copy (lhs, gimple_assign_rhs1 (stmt));
2806 goto done;
2808 simplified = try_to_simplify (stmt);
2809 if (simplified)
2811 if (dump_file && (dump_flags & TDF_DETAILS))
2813 fprintf (dump_file, "RHS ");
2814 print_gimple_expr (dump_file, stmt, 0, 0);
2815 fprintf (dump_file, " simplified to ");
2816 print_generic_expr (dump_file, simplified, 0);
2817 if (TREE_CODE (lhs) == SSA_NAME)
2818 fprintf (dump_file, " has constants %d\n",
2819 expr_has_constants (simplified));
2820 else
2821 fprintf (dump_file, "\n");
2824 /* Setting value numbers to constants will occasionally
2825 screw up phi congruence because constants are not
2826 uniquely associated with a single ssa name that can be
2827 looked up. */
2828 if (simplified
2829 && is_gimple_min_invariant (simplified)
2830 && TREE_CODE (lhs) == SSA_NAME)
2832 VN_INFO (lhs)->expr = simplified;
2833 VN_INFO (lhs)->has_constants = true;
2834 changed = set_ssa_val_to (lhs, simplified);
2835 goto done;
2837 else if (simplified
2838 && TREE_CODE (simplified) == SSA_NAME
2839 && TREE_CODE (lhs) == SSA_NAME)
2841 changed = visit_copy (lhs, simplified);
2842 goto done;
2844 else if (simplified)
2846 if (TREE_CODE (lhs) == SSA_NAME)
2848 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
2849 /* We have to unshare the expression or else
2850 valuizing may change the IL stream. */
2851 VN_INFO (lhs)->expr = unshare_expr (simplified);
2854 else if (stmt_has_constants (stmt)
2855 && TREE_CODE (lhs) == SSA_NAME)
2856 VN_INFO (lhs)->has_constants = true;
2857 else if (TREE_CODE (lhs) == SSA_NAME)
2859 /* We reset expr and constantness here because we may
2860 have been value numbering optimistically, and
2861 iterating. They may become non-constant in this case,
2862 even if they were optimistically constant. */
2864 VN_INFO (lhs)->has_constants = false;
2865 VN_INFO (lhs)->expr = NULL_TREE;
2868 if ((TREE_CODE (lhs) == SSA_NAME
2869 /* We can substitute SSA_NAMEs that are live over
2870 abnormal edges with their constant value. */
2871 && !(gimple_assign_copy_p (stmt)
2872 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2873 && !(simplified
2874 && is_gimple_min_invariant (simplified))
2875 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2876 /* Stores or copies from SSA_NAMEs that are live over
2877 abnormal edges are a problem. */
2878 || (gimple_assign_single_p (stmt)
2879 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2880 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt))))
2881 changed = defs_to_varying (stmt);
2882 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
2884 changed = visit_reference_op_store (lhs, gimple_assign_rhs1 (stmt), stmt);
2886 else if (TREE_CODE (lhs) == SSA_NAME)
2888 if ((gimple_assign_copy_p (stmt)
2889 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2890 || (simplified
2891 && is_gimple_min_invariant (simplified)))
2893 VN_INFO (lhs)->has_constants = true;
2894 if (simplified)
2895 changed = set_ssa_val_to (lhs, simplified);
2896 else
2897 changed = set_ssa_val_to (lhs, gimple_assign_rhs1 (stmt));
2899 else
2901 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2903 case GIMPLE_UNARY_RHS:
2904 case GIMPLE_BINARY_RHS:
2905 case GIMPLE_TERNARY_RHS:
2906 changed = visit_nary_op (lhs, stmt);
2907 break;
2908 case GIMPLE_SINGLE_RHS:
2909 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2911 case tcc_reference:
2912 /* VOP-less references can go through unary case. */
2913 if ((gimple_assign_rhs_code (stmt) == REALPART_EXPR
2914 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2915 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2916 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (stmt), 0)) == SSA_NAME)
2918 changed = visit_nary_op (lhs, stmt);
2919 break;
2921 /* Fallthrough. */
2922 case tcc_declaration:
2923 changed = visit_reference_op_load
2924 (lhs, gimple_assign_rhs1 (stmt), stmt);
2925 break;
2926 case tcc_expression:
2927 if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
2929 changed = visit_nary_op (lhs, stmt);
2930 break;
2932 /* Fallthrough. */
2933 default:
2934 changed = defs_to_varying (stmt);
2936 break;
2937 default:
2938 changed = defs_to_varying (stmt);
2939 break;
2943 else
2944 changed = defs_to_varying (stmt);
2946 else if (is_gimple_call (stmt))
2948 tree lhs = gimple_call_lhs (stmt);
2950 /* ??? We could try to simplify calls. */
2952 if (stmt_has_constants (stmt)
2953 && TREE_CODE (lhs) == SSA_NAME)
2954 VN_INFO (lhs)->has_constants = true;
2955 else if (TREE_CODE (lhs) == SSA_NAME)
2957 /* We reset expr and constantness here because we may
2958 have been value numbering optimistically, and
2959 iterating. They may become non-constant in this case,
2960 even if they were optimistically constant. */
2961 VN_INFO (lhs)->has_constants = false;
2962 VN_INFO (lhs)->expr = NULL_TREE;
2965 if (TREE_CODE (lhs) == SSA_NAME
2966 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2967 changed = defs_to_varying (stmt);
2968 /* ??? We should handle stores from calls. */
2969 else if (TREE_CODE (lhs) == SSA_NAME)
2971 if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
2972 changed = visit_reference_op_call (lhs, stmt);
2973 else
2974 changed = defs_to_varying (stmt);
2976 else
2977 changed = defs_to_varying (stmt);
2980 done:
2981 return changed;
2984 /* Compare two operands by reverse postorder index */
2986 static int
2987 compare_ops (const void *pa, const void *pb)
2989 const tree opa = *((const tree *)pa);
2990 const tree opb = *((const tree *)pb);
2991 gimple opstmta = SSA_NAME_DEF_STMT (opa);
2992 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
2993 basic_block bba;
2994 basic_block bbb;
2996 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
2997 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2998 else if (gimple_nop_p (opstmta))
2999 return -1;
3000 else if (gimple_nop_p (opstmtb))
3001 return 1;
3003 bba = gimple_bb (opstmta);
3004 bbb = gimple_bb (opstmtb);
3006 if (!bba && !bbb)
3007 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3008 else if (!bba)
3009 return -1;
3010 else if (!bbb)
3011 return 1;
3013 if (bba == bbb)
3015 if (gimple_code (opstmta) == GIMPLE_PHI
3016 && gimple_code (opstmtb) == GIMPLE_PHI)
3017 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3018 else if (gimple_code (opstmta) == GIMPLE_PHI)
3019 return -1;
3020 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3021 return 1;
3022 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3023 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3024 else
3025 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3027 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3030 /* Sort an array containing members of a strongly connected component
3031 SCC so that the members are ordered by RPO number.
3032 This means that when the sort is complete, iterating through the
3033 array will give you the members in RPO order. */
3035 static void
3036 sort_scc (VEC (tree, heap) *scc)
3038 VEC_qsort (tree, scc, compare_ops);
3041 /* Insert the no longer used nary ONARY to the hash INFO. */
3043 static void
3044 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3046 size_t size = sizeof_vn_nary_op (onary->length);
3047 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3048 &info->nary_obstack);
3049 memcpy (nary, onary, size);
3050 vn_nary_op_insert_into (nary, info->nary, false);
3053 /* Insert the no longer used phi OPHI to the hash INFO. */
3055 static void
3056 copy_phi (vn_phi_t ophi, vn_tables_t info)
3058 vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
3059 void **slot;
3060 memcpy (phi, ophi, sizeof (*phi));
3061 ophi->phiargs = NULL;
3062 slot = htab_find_slot_with_hash (info->phis, phi, phi->hashcode, INSERT);
3063 gcc_assert (!*slot);
3064 *slot = phi;
3067 /* Insert the no longer used reference OREF to the hash INFO. */
3069 static void
3070 copy_reference (vn_reference_t oref, vn_tables_t info)
3072 vn_reference_t ref;
3073 void **slot;
3074 ref = (vn_reference_t) pool_alloc (info->references_pool);
3075 memcpy (ref, oref, sizeof (*ref));
3076 oref->operands = NULL;
3077 slot = htab_find_slot_with_hash (info->references, ref, ref->hashcode,
3078 INSERT);
3079 if (*slot)
3080 free_reference (*slot);
3081 *slot = ref;
3084 /* Process a strongly connected component in the SSA graph. */
3086 static void
3087 process_scc (VEC (tree, heap) *scc)
3089 tree var;
3090 unsigned int i;
3091 unsigned int iterations = 0;
3092 bool changed = true;
3093 htab_iterator hi;
3094 vn_nary_op_t nary;
3095 vn_phi_t phi;
3096 vn_reference_t ref;
3098 /* If the SCC has a single member, just visit it. */
3099 if (VEC_length (tree, scc) == 1)
3101 tree use = VEC_index (tree, scc, 0);
3102 if (VN_INFO (use)->use_processed)
3103 return;
3104 /* We need to make sure it doesn't form a cycle itself, which can
3105 happen for self-referential PHI nodes. In that case we would
3106 end up inserting an expression with VN_TOP operands into the
3107 valid table which makes us derive bogus equivalences later.
3108 The cheapest way to check this is to assume it for all PHI nodes. */
3109 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3110 /* Fallthru to iteration. */ ;
3111 else
3113 visit_use (use);
3114 return;
3118 /* Iterate over the SCC with the optimistic table until it stops
3119 changing. */
3120 current_info = optimistic_info;
3121 while (changed)
3123 changed = false;
3124 iterations++;
3125 /* As we are value-numbering optimistically we have to
3126 clear the expression tables and the simplified expressions
3127 in each iteration until we converge. */
3128 htab_empty (optimistic_info->nary);
3129 htab_empty (optimistic_info->phis);
3130 htab_empty (optimistic_info->references);
3131 obstack_free (&optimistic_info->nary_obstack, NULL);
3132 gcc_obstack_init (&optimistic_info->nary_obstack);
3133 empty_alloc_pool (optimistic_info->phis_pool);
3134 empty_alloc_pool (optimistic_info->references_pool);
3135 FOR_EACH_VEC_ELT (tree, scc, i, var)
3136 VN_INFO (var)->expr = NULL_TREE;
3137 FOR_EACH_VEC_ELT (tree, scc, i, var)
3138 changed |= visit_use (var);
3141 statistics_histogram_event (cfun, "SCC iterations", iterations);
3143 /* Finally, copy the contents of the no longer used optimistic
3144 table to the valid table. */
3145 FOR_EACH_HTAB_ELEMENT (optimistic_info->nary, nary, vn_nary_op_t, hi)
3146 copy_nary (nary, valid_info);
3147 FOR_EACH_HTAB_ELEMENT (optimistic_info->phis, phi, vn_phi_t, hi)
3148 copy_phi (phi, valid_info);
3149 FOR_EACH_HTAB_ELEMENT (optimistic_info->references, ref, vn_reference_t, hi)
3150 copy_reference (ref, valid_info);
3152 current_info = valid_info;
3155 DEF_VEC_O(ssa_op_iter);
3156 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
3158 /* Pop the components of the found SCC for NAME off the SCC stack
3159 and process them. Returns true if all went well, false if
3160 we run into resource limits. */
3162 static bool
3163 extract_and_process_scc_for_name (tree name)
3165 VEC (tree, heap) *scc = NULL;
3166 tree x;
3168 /* Found an SCC, pop the components off the SCC stack and
3169 process them. */
3172 x = VEC_pop (tree, sccstack);
3174 VN_INFO (x)->on_sccstack = false;
3175 VEC_safe_push (tree, heap, scc, x);
3176 } while (x != name);
3178 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
3179 if (VEC_length (tree, scc)
3180 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
3182 if (dump_file)
3183 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
3184 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
3185 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
3186 return false;
3189 if (VEC_length (tree, scc) > 1)
3190 sort_scc (scc);
3192 if (dump_file && (dump_flags & TDF_DETAILS))
3193 print_scc (dump_file, scc);
3195 process_scc (scc);
3197 VEC_free (tree, heap, scc);
3199 return true;
3202 /* Depth first search on NAME to discover and process SCC's in the SSA
3203 graph.
3204 Execution of this algorithm relies on the fact that the SCC's are
3205 popped off the stack in topological order.
3206 Returns true if successful, false if we stopped processing SCC's due
3207 to resource constraints. */
3209 static bool
3210 DFS (tree name)
3212 VEC(ssa_op_iter, heap) *itervec = NULL;
3213 VEC(tree, heap) *namevec = NULL;
3214 use_operand_p usep = NULL;
3215 gimple defstmt;
3216 tree use;
3217 ssa_op_iter iter;
3219 start_over:
3220 /* SCC info */
3221 VN_INFO (name)->dfsnum = next_dfs_num++;
3222 VN_INFO (name)->visited = true;
3223 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
3225 VEC_safe_push (tree, heap, sccstack, name);
3226 VN_INFO (name)->on_sccstack = true;
3227 defstmt = SSA_NAME_DEF_STMT (name);
3229 /* Recursively DFS on our operands, looking for SCC's. */
3230 if (!gimple_nop_p (defstmt))
3232 /* Push a new iterator. */
3233 if (gimple_code (defstmt) == GIMPLE_PHI)
3234 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
3235 else
3236 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
3238 else
3239 clear_and_done_ssa_iter (&iter);
3241 while (1)
3243 /* If we are done processing uses of a name, go up the stack
3244 of iterators and process SCCs as we found them. */
3245 if (op_iter_done (&iter))
3247 /* See if we found an SCC. */
3248 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
3249 if (!extract_and_process_scc_for_name (name))
3251 VEC_free (tree, heap, namevec);
3252 VEC_free (ssa_op_iter, heap, itervec);
3253 return false;
3256 /* Check if we are done. */
3257 if (VEC_empty (tree, namevec))
3259 VEC_free (tree, heap, namevec);
3260 VEC_free (ssa_op_iter, heap, itervec);
3261 return true;
3264 /* Restore the last use walker and continue walking there. */
3265 use = name;
3266 name = VEC_pop (tree, namevec);
3267 memcpy (&iter, VEC_last (ssa_op_iter, itervec),
3268 sizeof (ssa_op_iter));
3269 VEC_pop (ssa_op_iter, itervec);
3270 goto continue_walking;
3273 use = USE_FROM_PTR (usep);
3275 /* Since we handle phi nodes, we will sometimes get
3276 invariants in the use expression. */
3277 if (TREE_CODE (use) == SSA_NAME)
3279 if (! (VN_INFO (use)->visited))
3281 /* Recurse by pushing the current use walking state on
3282 the stack and starting over. */
3283 VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
3284 VEC_safe_push(tree, heap, namevec, name);
3285 name = use;
3286 goto start_over;
3288 continue_walking:
3289 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3290 VN_INFO (use)->low);
3292 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3293 && VN_INFO (use)->on_sccstack)
3295 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3296 VN_INFO (name)->low);
3300 usep = op_iter_next_use (&iter);
3304 /* Allocate a value number table. */
3306 static void
3307 allocate_vn_table (vn_tables_t table)
3309 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
3310 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
3311 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
3312 free_reference);
3314 gcc_obstack_init (&table->nary_obstack);
3315 table->phis_pool = create_alloc_pool ("VN phis",
3316 sizeof (struct vn_phi_s),
3317 30);
3318 table->references_pool = create_alloc_pool ("VN references",
3319 sizeof (struct vn_reference_s),
3320 30);
3323 /* Free a value number table. */
3325 static void
3326 free_vn_table (vn_tables_t table)
3328 htab_delete (table->phis);
3329 htab_delete (table->nary);
3330 htab_delete (table->references);
3331 obstack_free (&table->nary_obstack, NULL);
3332 free_alloc_pool (table->phis_pool);
3333 free_alloc_pool (table->references_pool);
3336 static void
3337 init_scc_vn (void)
3339 size_t i;
3340 int j;
3341 int *rpo_numbers_temp;
3343 calculate_dominance_info (CDI_DOMINATORS);
3344 sccstack = NULL;
3345 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
3346 free);
3348 constant_value_ids = BITMAP_ALLOC (NULL);
3350 next_dfs_num = 1;
3351 next_value_id = 1;
3353 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
3354 /* VEC_alloc doesn't actually grow it to the right size, it just
3355 preallocates the space to do so. */
3356 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
3357 gcc_obstack_init (&vn_ssa_aux_obstack);
3359 shared_lookup_phiargs = NULL;
3360 shared_lookup_references = NULL;
3361 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3362 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3363 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
3365 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3366 the i'th block in RPO order is bb. We want to map bb's to RPO
3367 numbers, so we need to rearrange this array. */
3368 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
3369 rpo_numbers[rpo_numbers_temp[j]] = j;
3371 XDELETE (rpo_numbers_temp);
3373 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
3375 /* Create the VN_INFO structures, and initialize value numbers to
3376 TOP. */
3377 for (i = 0; i < num_ssa_names; i++)
3379 tree name = ssa_name (i);
3380 if (name)
3382 VN_INFO_GET (name)->valnum = VN_TOP;
3383 VN_INFO (name)->expr = NULL_TREE;
3384 VN_INFO (name)->value_id = 0;
3388 renumber_gimple_stmt_uids ();
3390 /* Create the valid and optimistic value numbering tables. */
3391 valid_info = XCNEW (struct vn_tables_s);
3392 allocate_vn_table (valid_info);
3393 optimistic_info = XCNEW (struct vn_tables_s);
3394 allocate_vn_table (optimistic_info);
3397 void
3398 free_scc_vn (void)
3400 size_t i;
3402 htab_delete (constant_to_value_id);
3403 BITMAP_FREE (constant_value_ids);
3404 VEC_free (tree, heap, shared_lookup_phiargs);
3405 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
3406 XDELETEVEC (rpo_numbers);
3408 for (i = 0; i < num_ssa_names; i++)
3410 tree name = ssa_name (i);
3411 if (name
3412 && VN_INFO (name)->needs_insertion)
3413 release_ssa_name (name);
3415 obstack_free (&vn_ssa_aux_obstack, NULL);
3416 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
3418 VEC_free (tree, heap, sccstack);
3419 free_vn_table (valid_info);
3420 XDELETE (valid_info);
3421 free_vn_table (optimistic_info);
3422 XDELETE (optimistic_info);
3425 /* Set *ID if we computed something useful in RESULT. */
3427 static void
3428 set_value_id_for_result (tree result, unsigned int *id)
3430 if (result)
3432 if (TREE_CODE (result) == SSA_NAME)
3433 *id = VN_INFO (result)->value_id;
3434 else if (is_gimple_min_invariant (result))
3435 *id = get_or_alloc_constant_value_id (result);
3439 /* Set the value ids in the valid hash tables. */
3441 static void
3442 set_hashtable_value_ids (void)
3444 htab_iterator hi;
3445 vn_nary_op_t vno;
3446 vn_reference_t vr;
3447 vn_phi_t vp;
3449 /* Now set the value ids of the things we had put in the hash
3450 table. */
3452 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
3453 vno, vn_nary_op_t, hi)
3454 set_value_id_for_result (vno->result, &vno->value_id);
3456 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
3457 vp, vn_phi_t, hi)
3458 set_value_id_for_result (vp->result, &vp->value_id);
3460 FOR_EACH_HTAB_ELEMENT (valid_info->references,
3461 vr, vn_reference_t, hi)
3462 set_value_id_for_result (vr->result, &vr->value_id);
3465 /* Do SCCVN. Returns true if it finished, false if we bailed out
3466 due to resource constraints. */
3468 bool
3469 run_scc_vn (void)
3471 size_t i;
3472 tree param;
3473 bool changed = true;
3475 init_scc_vn ();
3476 current_info = valid_info;
3478 for (param = DECL_ARGUMENTS (current_function_decl);
3479 param;
3480 param = DECL_CHAIN (param))
3482 if (gimple_default_def (cfun, param) != NULL)
3484 tree def = gimple_default_def (cfun, param);
3485 VN_INFO (def)->valnum = def;
3489 for (i = 1; i < num_ssa_names; ++i)
3491 tree name = ssa_name (i);
3492 if (name
3493 && VN_INFO (name)->visited == false
3494 && !has_zero_uses (name))
3495 if (!DFS (name))
3497 free_scc_vn ();
3498 return false;
3502 /* Initialize the value ids. */
3504 for (i = 1; i < num_ssa_names; ++i)
3506 tree name = ssa_name (i);
3507 vn_ssa_aux_t info;
3508 if (!name)
3509 continue;
3510 info = VN_INFO (name);
3511 if (info->valnum == name
3512 || info->valnum == VN_TOP)
3513 info->value_id = get_next_value_id ();
3514 else if (is_gimple_min_invariant (info->valnum))
3515 info->value_id = get_or_alloc_constant_value_id (info->valnum);
3518 /* Propagate until they stop changing. */
3519 while (changed)
3521 changed = false;
3522 for (i = 1; i < num_ssa_names; ++i)
3524 tree name = ssa_name (i);
3525 vn_ssa_aux_t info;
3526 if (!name)
3527 continue;
3528 info = VN_INFO (name);
3529 if (TREE_CODE (info->valnum) == SSA_NAME
3530 && info->valnum != name
3531 && info->value_id != VN_INFO (info->valnum)->value_id)
3533 changed = true;
3534 info->value_id = VN_INFO (info->valnum)->value_id;
3539 set_hashtable_value_ids ();
3541 if (dump_file && (dump_flags & TDF_DETAILS))
3543 fprintf (dump_file, "Value numbers:\n");
3544 for (i = 0; i < num_ssa_names; i++)
3546 tree name = ssa_name (i);
3547 if (name
3548 && VN_INFO (name)->visited
3549 && SSA_VAL (name) != name)
3551 print_generic_expr (dump_file, name, 0);
3552 fprintf (dump_file, " = ");
3553 print_generic_expr (dump_file, SSA_VAL (name), 0);
3554 fprintf (dump_file, "\n");
3559 return true;
3562 /* Return the maximum value id we have ever seen. */
3564 unsigned int
3565 get_max_value_id (void)
3567 return next_value_id;
3570 /* Return the next unique value id. */
3572 unsigned int
3573 get_next_value_id (void)
3575 return next_value_id++;
3579 /* Compare two expressions E1 and E2 and return true if they are equal. */
3581 bool
3582 expressions_equal_p (tree e1, tree e2)
3584 /* The obvious case. */
3585 if (e1 == e2)
3586 return true;
3588 /* If only one of them is null, they cannot be equal. */
3589 if (!e1 || !e2)
3590 return false;
3592 /* Now perform the actual comparison. */
3593 if (TREE_CODE (e1) == TREE_CODE (e2)
3594 && operand_equal_p (e1, e2, OEP_PURE_SAME))
3595 return true;
3597 return false;
3601 /* Return true if the nary operation NARY may trap. This is a copy
3602 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
3604 bool
3605 vn_nary_may_trap (vn_nary_op_t nary)
3607 tree type;
3608 tree rhs2 = NULL_TREE;
3609 bool honor_nans = false;
3610 bool honor_snans = false;
3611 bool fp_operation = false;
3612 bool honor_trapv = false;
3613 bool handled, ret;
3614 unsigned i;
3616 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
3617 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
3618 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
3620 type = nary->type;
3621 fp_operation = FLOAT_TYPE_P (type);
3622 if (fp_operation)
3624 honor_nans = flag_trapping_math && !flag_finite_math_only;
3625 honor_snans = flag_signaling_nans != 0;
3627 else if (INTEGRAL_TYPE_P (type)
3628 && TYPE_OVERFLOW_TRAPS (type))
3629 honor_trapv = true;
3631 if (nary->length >= 2)
3632 rhs2 = nary->op[1];
3633 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
3634 honor_trapv,
3635 honor_nans, honor_snans, rhs2,
3636 &handled);
3637 if (handled
3638 && ret)
3639 return true;
3641 for (i = 0; i < nary->length; ++i)
3642 if (tree_could_trap_p (nary->op[i]))
3643 return true;
3645 return false;