Merge from mainline (167278:168000).
[official-gcc/graphite-test-results.git] / gcc / tree-ssa-sccvn.c
blob970f2c0276a1ef6fcf7c8dd0f278927b146b787a
1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "basic-block.h"
28 #include "tree-pretty-print.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "gimple.h"
33 #include "tree-dump.h"
34 #include "timevar.h"
35 #include "fibheap.h"
36 #include "hashtab.h"
37 #include "tree-iterator.h"
38 #include "alloc-pool.h"
39 #include "tree-pass.h"
40 #include "flags.h"
41 #include "bitmap.h"
42 #include "langhooks.h"
43 #include "cfgloop.h"
44 #include "params.h"
45 #include "tree-ssa-propagate.h"
46 #include "tree-ssa-sccvn.h"
48 /* This algorithm is based on the SCC algorithm presented by Keith
49 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
50 (http://citeseer.ist.psu.edu/41805.html). In
51 straight line code, it is equivalent to a regular hash based value
52 numbering that is performed in reverse postorder.
54 For code with cycles, there are two alternatives, both of which
55 require keeping the hashtables separate from the actual list of
56 value numbers for SSA names.
58 1. Iterate value numbering in an RPO walk of the blocks, removing
59 all the entries from the hashtable after each iteration (but
60 keeping the SSA name->value number mapping between iterations).
61 Iterate until it does not change.
63 2. Perform value numbering as part of an SCC walk on the SSA graph,
64 iterating only the cycles in the SSA graph until they do not change
65 (using a separate, optimistic hashtable for value numbering the SCC
66 operands).
68 The second is not just faster in practice (because most SSA graph
69 cycles do not involve all the variables in the graph), it also has
70 some nice properties.
72 One of these nice properties is that when we pop an SCC off the
73 stack, we are guaranteed to have processed all the operands coming from
74 *outside of that SCC*, so we do not need to do anything special to
75 ensure they have value numbers.
77 Another nice property is that the SCC walk is done as part of a DFS
78 of the SSA graph, which makes it easy to perform combining and
79 simplifying operations at the same time.
81 The code below is deliberately written in a way that makes it easy
82 to separate the SCC walk from the other work it does.
84 In order to propagate constants through the code, we track which
85 expressions contain constants, and use those while folding. In
86 theory, we could also track expressions whose value numbers are
87 replaced, in case we end up folding based on expression
88 identities.
90 In order to value number memory, we assign value numbers to vuses.
91 This enables us to note that, for example, stores to the same
92 address of the same value from the same starting memory states are
93 equivalent.
94 TODO:
96 1. We can iterate only the changing portions of the SCC's, but
97 I have not seen an SCC big enough for this to be a win.
98 2. If you differentiate between phi nodes for loops and phi nodes
99 for if-then-else, you can properly consider phi nodes in different
100 blocks for equivalence.
101 3. We could value number vuses in more cases, particularly, whole
102 structure copies.
105 /* The set of hashtables and alloc_pool's for their items. */
107 typedef struct vn_tables_s
109 htab_t nary;
110 htab_t phis;
111 htab_t references;
112 struct obstack nary_obstack;
113 alloc_pool phis_pool;
114 alloc_pool references_pool;
115 } *vn_tables_t;
117 static htab_t constant_to_value_id;
118 static bitmap constant_value_ids;
121 /* Valid hashtables storing information we have proven to be
122 correct. */
124 static vn_tables_t valid_info;
126 /* Optimistic hashtables storing information we are making assumptions about
127 during iterations. */
129 static vn_tables_t optimistic_info;
131 /* Pointer to the set of hashtables that is currently being used.
132 Should always point to either the optimistic_info, or the
133 valid_info. */
135 static vn_tables_t current_info;
138 /* Reverse post order index for each basic block. */
140 static int *rpo_numbers;
142 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
144 /* This represents the top of the VN lattice, which is the universal
145 value. */
147 tree VN_TOP;
149 /* Unique counter for our value ids. */
151 static unsigned int next_value_id;
153 /* Next DFS number and the stack for strongly connected component
154 detection. */
156 static unsigned int next_dfs_num;
157 static VEC (tree, heap) *sccstack;
160 DEF_VEC_P(vn_ssa_aux_t);
161 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
163 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
164 are allocated on an obstack for locality reasons, and to free them
165 without looping over the VEC. */
167 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
168 static struct obstack vn_ssa_aux_obstack;
170 /* Return the value numbering information for a given SSA name. */
172 vn_ssa_aux_t
173 VN_INFO (tree name)
175 vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
176 SSA_NAME_VERSION (name));
177 gcc_checking_assert (res);
178 return res;
181 /* Set the value numbering info for a given SSA name to a given
182 value. */
184 static inline void
185 VN_INFO_SET (tree name, vn_ssa_aux_t value)
187 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
188 SSA_NAME_VERSION (name), value);
191 /* Initialize the value numbering info for a given SSA name.
192 This should be called just once for every SSA name. */
194 vn_ssa_aux_t
195 VN_INFO_GET (tree name)
197 vn_ssa_aux_t newinfo;
199 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
200 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
201 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
202 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
203 SSA_NAME_VERSION (name) + 1);
204 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
205 SSA_NAME_VERSION (name), newinfo);
206 return newinfo;
210 /* Get the representative expression for the SSA_NAME NAME. Returns
211 the representative SSA_NAME if there is no expression associated with it. */
213 tree
214 vn_get_expr_for (tree name)
216 vn_ssa_aux_t vn = VN_INFO (name);
217 gimple def_stmt;
218 tree expr = NULL_TREE;
220 if (vn->valnum == VN_TOP)
221 return name;
223 /* If the value-number is a constant it is the representative
224 expression. */
225 if (TREE_CODE (vn->valnum) != SSA_NAME)
226 return vn->valnum;
228 /* Get to the information of the value of this SSA_NAME. */
229 vn = VN_INFO (vn->valnum);
231 /* If the value-number is a constant it is the representative
232 expression. */
233 if (TREE_CODE (vn->valnum) != SSA_NAME)
234 return vn->valnum;
236 /* Else if we have an expression, return it. */
237 if (vn->expr != NULL_TREE)
238 return vn->expr;
240 /* Otherwise use the defining statement to build the expression. */
241 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
243 /* If the value number is a default-definition or a PHI result
244 use it directly. */
245 if (gimple_nop_p (def_stmt)
246 || gimple_code (def_stmt) == GIMPLE_PHI)
247 return vn->valnum;
249 if (!is_gimple_assign (def_stmt))
250 return vn->valnum;
252 /* FIXME tuples. This is incomplete and likely will miss some
253 simplifications. */
254 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)))
256 case tcc_reference:
257 if ((gimple_assign_rhs_code (def_stmt) == VIEW_CONVERT_EXPR
258 || gimple_assign_rhs_code (def_stmt) == REALPART_EXPR
259 || gimple_assign_rhs_code (def_stmt) == IMAGPART_EXPR)
260 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
261 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
262 gimple_expr_type (def_stmt),
263 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
264 break;
266 case tcc_unary:
267 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
268 gimple_expr_type (def_stmt),
269 gimple_assign_rhs1 (def_stmt));
270 break;
272 case tcc_binary:
273 expr = fold_build2 (gimple_assign_rhs_code (def_stmt),
274 gimple_expr_type (def_stmt),
275 gimple_assign_rhs1 (def_stmt),
276 gimple_assign_rhs2 (def_stmt));
277 break;
279 default:;
281 if (expr == NULL_TREE)
282 return vn->valnum;
284 /* Cache the expression. */
285 vn->expr = expr;
287 return expr;
291 /* Free a phi operation structure VP. */
293 static void
294 free_phi (void *vp)
296 vn_phi_t phi = (vn_phi_t) vp;
297 VEC_free (tree, heap, phi->phiargs);
300 /* Free a reference operation structure VP. */
302 static void
303 free_reference (void *vp)
305 vn_reference_t vr = (vn_reference_t) vp;
306 VEC_free (vn_reference_op_s, heap, vr->operands);
309 /* Hash table equality function for vn_constant_t. */
311 static int
312 vn_constant_eq (const void *p1, const void *p2)
314 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
315 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
317 if (vc1->hashcode != vc2->hashcode)
318 return false;
320 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
323 /* Hash table hash function for vn_constant_t. */
325 static hashval_t
326 vn_constant_hash (const void *p1)
328 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
329 return vc1->hashcode;
332 /* Lookup a value id for CONSTANT and return it. If it does not
333 exist returns 0. */
335 unsigned int
336 get_constant_value_id (tree constant)
338 void **slot;
339 struct vn_constant_s vc;
341 vc.hashcode = vn_hash_constant_with_type (constant);
342 vc.constant = constant;
343 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
344 vc.hashcode, NO_INSERT);
345 if (slot)
346 return ((vn_constant_t)*slot)->value_id;
347 return 0;
350 /* Lookup a value id for CONSTANT, and if it does not exist, create a
351 new one and return it. If it does exist, return it. */
353 unsigned int
354 get_or_alloc_constant_value_id (tree constant)
356 void **slot;
357 struct vn_constant_s vc;
358 vn_constant_t vcp;
360 vc.hashcode = vn_hash_constant_with_type (constant);
361 vc.constant = constant;
362 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
363 vc.hashcode, INSERT);
364 if (*slot)
365 return ((vn_constant_t)*slot)->value_id;
367 vcp = XNEW (struct vn_constant_s);
368 vcp->hashcode = vc.hashcode;
369 vcp->constant = constant;
370 vcp->value_id = get_next_value_id ();
371 *slot = (void *) vcp;
372 bitmap_set_bit (constant_value_ids, vcp->value_id);
373 return vcp->value_id;
376 /* Return true if V is a value id for a constant. */
378 bool
379 value_id_constant_p (unsigned int v)
381 return bitmap_bit_p (constant_value_ids, v);
384 /* Compare two reference operands P1 and P2 for equality. Return true if
385 they are equal, and false otherwise. */
387 static int
388 vn_reference_op_eq (const void *p1, const void *p2)
390 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
391 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
393 return vro1->opcode == vro2->opcode
394 && types_compatible_p (vro1->type, vro2->type)
395 && expressions_equal_p (vro1->op0, vro2->op0)
396 && expressions_equal_p (vro1->op1, vro2->op1)
397 && expressions_equal_p (vro1->op2, vro2->op2);
400 /* Compute the hash for a reference operand VRO1. */
402 static hashval_t
403 vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result)
405 result = iterative_hash_hashval_t (vro1->opcode, result);
406 if (vro1->op0)
407 result = iterative_hash_expr (vro1->op0, result);
408 if (vro1->op1)
409 result = iterative_hash_expr (vro1->op1, result);
410 if (vro1->op2)
411 result = iterative_hash_expr (vro1->op2, result);
412 return result;
415 /* Return the hashcode for a given reference operation P1. */
417 static hashval_t
418 vn_reference_hash (const void *p1)
420 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
421 return vr1->hashcode;
424 /* Compute a hash for the reference operation VR1 and return it. */
426 hashval_t
427 vn_reference_compute_hash (const vn_reference_t vr1)
429 hashval_t result = 0;
430 int i;
431 vn_reference_op_t vro;
432 HOST_WIDE_INT off = -1;
433 bool deref = false;
435 FOR_EACH_VEC_ELT (vn_reference_op_s, vr1->operands, i, vro)
437 if (vro->opcode == MEM_REF)
438 deref = true;
439 else if (vro->opcode != ADDR_EXPR)
440 deref = false;
441 if (vro->off != -1)
443 if (off == -1)
444 off = 0;
445 off += vro->off;
447 else
449 if (off != -1
450 && off != 0)
451 result = iterative_hash_hashval_t (off, result);
452 off = -1;
453 if (deref
454 && vro->opcode == ADDR_EXPR)
456 if (vro->op0)
458 tree op = TREE_OPERAND (vro->op0, 0);
459 result = iterative_hash_hashval_t (TREE_CODE (op), result);
460 result = iterative_hash_expr (op, result);
463 else
464 result = vn_reference_op_compute_hash (vro, result);
467 if (vr1->vuse)
468 result += SSA_NAME_VERSION (vr1->vuse);
470 return result;
473 /* Return true if reference operations P1 and P2 are equivalent. This
474 means they have the same set of operands and vuses. */
477 vn_reference_eq (const void *p1, const void *p2)
479 unsigned i, j;
481 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
482 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
483 if (vr1->hashcode != vr2->hashcode)
484 return false;
486 /* Early out if this is not a hash collision. */
487 if (vr1->hashcode != vr2->hashcode)
488 return false;
490 /* The VOP needs to be the same. */
491 if (vr1->vuse != vr2->vuse)
492 return false;
494 /* If the operands are the same we are done. */
495 if (vr1->operands == vr2->operands)
496 return true;
498 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
499 return false;
501 if (INTEGRAL_TYPE_P (vr1->type)
502 && INTEGRAL_TYPE_P (vr2->type))
504 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
505 return false;
507 else if (INTEGRAL_TYPE_P (vr1->type)
508 && (TYPE_PRECISION (vr1->type)
509 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
510 return false;
511 else if (INTEGRAL_TYPE_P (vr2->type)
512 && (TYPE_PRECISION (vr2->type)
513 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
514 return false;
516 i = 0;
517 j = 0;
520 HOST_WIDE_INT off1 = 0, off2 = 0;
521 vn_reference_op_t vro1, vro2;
522 vn_reference_op_s tem1, tem2;
523 bool deref1 = false, deref2 = false;
524 for (; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro1); i++)
526 if (vro1->opcode == MEM_REF)
527 deref1 = true;
528 if (vro1->off == -1)
529 break;
530 off1 += vro1->off;
532 for (; VEC_iterate (vn_reference_op_s, vr2->operands, j, vro2); j++)
534 if (vro2->opcode == MEM_REF)
535 deref2 = true;
536 if (vro2->off == -1)
537 break;
538 off2 += vro2->off;
540 if (off1 != off2)
541 return false;
542 if (deref1 && vro1->opcode == ADDR_EXPR)
544 memset (&tem1, 0, sizeof (tem1));
545 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
546 tem1.type = TREE_TYPE (tem1.op0);
547 tem1.opcode = TREE_CODE (tem1.op0);
548 vro1 = &tem1;
550 if (deref2 && vro2->opcode == ADDR_EXPR)
552 memset (&tem2, 0, sizeof (tem2));
553 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
554 tem2.type = TREE_TYPE (tem2.op0);
555 tem2.opcode = TREE_CODE (tem2.op0);
556 vro2 = &tem2;
558 if (!vn_reference_op_eq (vro1, vro2))
559 return false;
560 ++j;
561 ++i;
563 while (VEC_length (vn_reference_op_s, vr1->operands) != i
564 || VEC_length (vn_reference_op_s, vr2->operands) != j);
566 return true;
569 /* Copy the operations present in load/store REF into RESULT, a vector of
570 vn_reference_op_s's. */
572 void
573 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
575 if (TREE_CODE (ref) == TARGET_MEM_REF)
577 vn_reference_op_s temp;
579 memset (&temp, 0, sizeof (temp));
580 /* We do not care for spurious type qualifications. */
581 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
582 temp.opcode = TREE_CODE (ref);
583 temp.op0 = TMR_INDEX (ref);
584 temp.op1 = TMR_STEP (ref);
585 temp.op2 = TMR_OFFSET (ref);
586 temp.off = -1;
587 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
589 memset (&temp, 0, sizeof (temp));
590 temp.type = NULL_TREE;
591 temp.opcode = ERROR_MARK;
592 temp.op0 = TMR_INDEX2 (ref);
593 temp.off = -1;
594 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
596 memset (&temp, 0, sizeof (temp));
597 temp.type = NULL_TREE;
598 temp.opcode = TREE_CODE (TMR_BASE (ref));
599 temp.op0 = TMR_BASE (ref);
600 temp.off = -1;
601 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
602 return;
605 /* For non-calls, store the information that makes up the address. */
607 while (ref)
609 vn_reference_op_s temp;
611 memset (&temp, 0, sizeof (temp));
612 /* We do not care for spurious type qualifications. */
613 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
614 temp.opcode = TREE_CODE (ref);
615 temp.off = -1;
617 switch (temp.opcode)
619 case MEM_REF:
620 /* The base address gets its own vn_reference_op_s structure. */
621 temp.op0 = TREE_OPERAND (ref, 1);
622 if (host_integerp (TREE_OPERAND (ref, 1), 0))
623 temp.off = TREE_INT_CST_LOW (TREE_OPERAND (ref, 1));
624 break;
625 case BIT_FIELD_REF:
626 /* Record bits and position. */
627 temp.op0 = TREE_OPERAND (ref, 1);
628 temp.op1 = TREE_OPERAND (ref, 2);
629 break;
630 case COMPONENT_REF:
631 /* The field decl is enough to unambiguously specify the field,
632 a matching type is not necessary and a mismatching type
633 is always a spurious difference. */
634 temp.type = NULL_TREE;
635 temp.op0 = TREE_OPERAND (ref, 1);
636 temp.op1 = TREE_OPERAND (ref, 2);
638 tree this_offset = component_ref_field_offset (ref);
639 if (this_offset
640 && TREE_CODE (this_offset) == INTEGER_CST)
642 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
643 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
645 double_int off
646 = double_int_add (tree_to_double_int (this_offset),
647 double_int_rshift
648 (tree_to_double_int (bit_offset),
649 BITS_PER_UNIT == 8
650 ? 3 : exact_log2 (BITS_PER_UNIT),
651 HOST_BITS_PER_DOUBLE_INT, true));
652 if (double_int_fits_in_shwi_p (off))
653 temp.off = off.low;
657 break;
658 case ARRAY_RANGE_REF:
659 case ARRAY_REF:
660 /* Record index as operand. */
661 temp.op0 = TREE_OPERAND (ref, 1);
662 /* Always record lower bounds and element size. */
663 temp.op1 = array_ref_low_bound (ref);
664 temp.op2 = array_ref_element_size (ref);
665 if (TREE_CODE (temp.op0) == INTEGER_CST
666 && TREE_CODE (temp.op1) == INTEGER_CST
667 && TREE_CODE (temp.op2) == INTEGER_CST)
669 double_int off = tree_to_double_int (temp.op0);
670 off = double_int_add (off,
671 double_int_neg
672 (tree_to_double_int (temp.op1)));
673 off = double_int_mul (off, tree_to_double_int (temp.op2));
674 if (double_int_fits_in_shwi_p (off))
675 temp.off = off.low;
677 break;
678 case STRING_CST:
679 case INTEGER_CST:
680 case COMPLEX_CST:
681 case VECTOR_CST:
682 case REAL_CST:
683 case CONSTRUCTOR:
684 case VAR_DECL:
685 case PARM_DECL:
686 case CONST_DECL:
687 case RESULT_DECL:
688 case SSA_NAME:
689 temp.op0 = ref;
690 break;
691 case ADDR_EXPR:
692 if (is_gimple_min_invariant (ref))
694 temp.op0 = ref;
695 break;
697 /* Fallthrough. */
698 /* These are only interesting for their operands, their
699 existence, and their type. They will never be the last
700 ref in the chain of references (IE they require an
701 operand), so we don't have to put anything
702 for op* as it will be handled by the iteration */
703 case REALPART_EXPR:
704 case VIEW_CONVERT_EXPR:
705 temp.off = 0;
706 break;
707 case IMAGPART_EXPR:
708 /* This is only interesting for its constant offset. */
709 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
710 break;
711 default:
712 gcc_unreachable ();
714 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
716 if (REFERENCE_CLASS_P (ref)
717 || (TREE_CODE (ref) == ADDR_EXPR
718 && !is_gimple_min_invariant (ref)))
719 ref = TREE_OPERAND (ref, 0);
720 else
721 ref = NULL_TREE;
725 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
726 operands in *OPS, the reference alias set SET and the reference type TYPE.
727 Return true if something useful was produced. */
729 bool
730 ao_ref_init_from_vn_reference (ao_ref *ref,
731 alias_set_type set, tree type,
732 VEC (vn_reference_op_s, heap) *ops)
734 vn_reference_op_t op;
735 unsigned i;
736 tree base = NULL_TREE;
737 tree *op0_p = &base;
738 HOST_WIDE_INT offset = 0;
739 HOST_WIDE_INT max_size;
740 HOST_WIDE_INT size = -1;
741 tree size_tree = NULL_TREE;
742 alias_set_type base_alias_set = -1;
744 /* First get the final access size from just the outermost expression. */
745 op = VEC_index (vn_reference_op_s, ops, 0);
746 if (op->opcode == COMPONENT_REF)
747 size_tree = DECL_SIZE (op->op0);
748 else if (op->opcode == BIT_FIELD_REF)
749 size_tree = op->op0;
750 else
752 enum machine_mode mode = TYPE_MODE (type);
753 if (mode == BLKmode)
754 size_tree = TYPE_SIZE (type);
755 else
756 size = GET_MODE_BITSIZE (mode);
758 if (size_tree != NULL_TREE)
760 if (!host_integerp (size_tree, 1))
761 size = -1;
762 else
763 size = TREE_INT_CST_LOW (size_tree);
766 /* Initially, maxsize is the same as the accessed element size.
767 In the following it will only grow (or become -1). */
768 max_size = size;
770 /* Compute cumulative bit-offset for nested component-refs and array-refs,
771 and find the ultimate containing object. */
772 FOR_EACH_VEC_ELT (vn_reference_op_s, ops, i, op)
774 switch (op->opcode)
776 /* These may be in the reference ops, but we cannot do anything
777 sensible with them here. */
778 case ADDR_EXPR:
779 /* Apart from ADDR_EXPR arguments to MEM_REF. */
780 if (base != NULL_TREE
781 && TREE_CODE (base) == MEM_REF
782 && op->op0
783 && DECL_P (TREE_OPERAND (op->op0, 0)))
785 vn_reference_op_t pop = VEC_index (vn_reference_op_s, ops, i-1);
786 base = TREE_OPERAND (op->op0, 0);
787 if (pop->off == -1)
789 max_size = -1;
790 offset = 0;
792 else
793 offset += pop->off * BITS_PER_UNIT;
794 op0_p = NULL;
795 break;
797 /* Fallthru. */
798 case CALL_EXPR:
799 return false;
801 /* Record the base objects. */
802 case MEM_REF:
803 base_alias_set = get_deref_alias_set (op->op0);
804 *op0_p = build2 (MEM_REF, op->type,
805 NULL_TREE, op->op0);
806 op0_p = &TREE_OPERAND (*op0_p, 0);
807 break;
809 case VAR_DECL:
810 case PARM_DECL:
811 case RESULT_DECL:
812 case SSA_NAME:
813 *op0_p = op->op0;
814 op0_p = NULL;
815 break;
817 /* And now the usual component-reference style ops. */
818 case BIT_FIELD_REF:
819 offset += tree_low_cst (op->op1, 0);
820 break;
822 case COMPONENT_REF:
824 tree field = op->op0;
825 /* We do not have a complete COMPONENT_REF tree here so we
826 cannot use component_ref_field_offset. Do the interesting
827 parts manually. */
829 if (op->op1
830 || !host_integerp (DECL_FIELD_OFFSET (field), 1))
831 max_size = -1;
832 else
834 offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
835 * BITS_PER_UNIT);
836 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
838 break;
841 case ARRAY_RANGE_REF:
842 case ARRAY_REF:
843 /* We recorded the lower bound and the element size. */
844 if (!host_integerp (op->op0, 0)
845 || !host_integerp (op->op1, 0)
846 || !host_integerp (op->op2, 0))
847 max_size = -1;
848 else
850 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
851 hindex -= TREE_INT_CST_LOW (op->op1);
852 hindex *= TREE_INT_CST_LOW (op->op2);
853 hindex *= BITS_PER_UNIT;
854 offset += hindex;
856 break;
858 case REALPART_EXPR:
859 break;
861 case IMAGPART_EXPR:
862 offset += size;
863 break;
865 case VIEW_CONVERT_EXPR:
866 break;
868 case STRING_CST:
869 case INTEGER_CST:
870 case COMPLEX_CST:
871 case VECTOR_CST:
872 case REAL_CST:
873 case CONSTRUCTOR:
874 case CONST_DECL:
875 return false;
877 default:
878 return false;
882 if (base == NULL_TREE)
883 return false;
885 ref->ref = NULL_TREE;
886 ref->base = base;
887 ref->offset = offset;
888 ref->size = size;
889 ref->max_size = max_size;
890 ref->ref_alias_set = set;
891 if (base_alias_set != -1)
892 ref->base_alias_set = base_alias_set;
893 else
894 ref->base_alias_set = get_alias_set (base);
896 return true;
899 /* Copy the operations present in load/store/call REF into RESULT, a vector of
900 vn_reference_op_s's. */
902 void
903 copy_reference_ops_from_call (gimple call,
904 VEC(vn_reference_op_s, heap) **result)
906 vn_reference_op_s temp;
907 unsigned i;
909 /* Copy the type, opcode, function being called and static chain. */
910 memset (&temp, 0, sizeof (temp));
911 temp.type = gimple_call_return_type (call);
912 temp.opcode = CALL_EXPR;
913 temp.op0 = gimple_call_fn (call);
914 temp.op1 = gimple_call_chain (call);
915 temp.off = -1;
916 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
918 /* Copy the call arguments. As they can be references as well,
919 just chain them together. */
920 for (i = 0; i < gimple_call_num_args (call); ++i)
922 tree callarg = gimple_call_arg (call, i);
923 copy_reference_ops_from_ref (callarg, result);
927 /* Create a vector of vn_reference_op_s structures from REF, a
928 REFERENCE_CLASS_P tree. The vector is not shared. */
930 static VEC(vn_reference_op_s, heap) *
931 create_reference_ops_from_ref (tree ref)
933 VEC (vn_reference_op_s, heap) *result = NULL;
935 copy_reference_ops_from_ref (ref, &result);
936 return result;
939 /* Create a vector of vn_reference_op_s structures from CALL, a
940 call statement. The vector is not shared. */
942 static VEC(vn_reference_op_s, heap) *
943 create_reference_ops_from_call (gimple call)
945 VEC (vn_reference_op_s, heap) *result = NULL;
947 copy_reference_ops_from_call (call, &result);
948 return result;
951 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
952 *I_P to point to the last element of the replacement. */
953 void
954 vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
955 unsigned int *i_p)
957 unsigned int i = *i_p;
958 vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
959 vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
960 tree addr_base;
961 HOST_WIDE_INT addr_offset;
963 /* The only thing we have to do is from &OBJ.foo.bar add the offset
964 from .foo.bar to the preceeding MEM_REF offset and replace the
965 address with &OBJ. */
966 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
967 &addr_offset);
968 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
969 if (addr_base != op->op0)
971 double_int off = tree_to_double_int (mem_op->op0);
972 off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
973 off = double_int_add (off, shwi_to_double_int (addr_offset));
974 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
975 op->op0 = build_fold_addr_expr (addr_base);
976 if (host_integerp (mem_op->op0, 0))
977 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
978 else
979 mem_op->off = -1;
983 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
984 *I_P to point to the last element of the replacement. */
985 static void
986 vn_reference_maybe_forwprop_address (VEC (vn_reference_op_s, heap) **ops,
987 unsigned int *i_p)
989 unsigned int i = *i_p;
990 vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
991 vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
992 gimple def_stmt;
993 enum tree_code code;
994 double_int off;
996 def_stmt = SSA_NAME_DEF_STMT (op->op0);
997 if (!is_gimple_assign (def_stmt))
998 return;
1000 code = gimple_assign_rhs_code (def_stmt);
1001 if (code != ADDR_EXPR
1002 && code != POINTER_PLUS_EXPR)
1003 return;
1005 off = tree_to_double_int (mem_op->op0);
1006 off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1008 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1009 from .foo.bar to the preceeding MEM_REF offset and replace the
1010 address with &OBJ. */
1011 if (code == ADDR_EXPR)
1013 tree addr, addr_base;
1014 HOST_WIDE_INT addr_offset;
1016 addr = gimple_assign_rhs1 (def_stmt);
1017 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1018 &addr_offset);
1019 if (!addr_base
1020 || TREE_CODE (addr_base) != MEM_REF)
1021 return;
1023 off = double_int_add (off, shwi_to_double_int (addr_offset));
1024 off = double_int_add (off, mem_ref_offset (addr_base));
1025 op->op0 = TREE_OPERAND (addr_base, 0);
1027 else
1029 tree ptr, ptroff;
1030 ptr = gimple_assign_rhs1 (def_stmt);
1031 ptroff = gimple_assign_rhs2 (def_stmt);
1032 if (TREE_CODE (ptr) != SSA_NAME
1033 || TREE_CODE (ptroff) != INTEGER_CST)
1034 return;
1036 off = double_int_add (off, tree_to_double_int (ptroff));
1037 op->op0 = ptr;
1040 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1041 if (host_integerp (mem_op->op0, 0))
1042 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1043 else
1044 mem_op->off = -1;
1045 if (TREE_CODE (op->op0) == SSA_NAME)
1046 op->op0 = SSA_VAL (op->op0);
1047 if (TREE_CODE (op->op0) != SSA_NAME)
1048 op->opcode = TREE_CODE (op->op0);
1050 /* And recurse. */
1051 if (TREE_CODE (op->op0) == SSA_NAME)
1052 vn_reference_maybe_forwprop_address (ops, i_p);
1053 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1054 vn_reference_fold_indirect (ops, i_p);
1057 /* Optimize the reference REF to a constant if possible or return
1058 NULL_TREE if not. */
1060 tree
1061 fully_constant_vn_reference_p (vn_reference_t ref)
1063 VEC (vn_reference_op_s, heap) *operands = ref->operands;
1064 vn_reference_op_t op;
1066 /* Try to simplify the translated expression if it is
1067 a call to a builtin function with at most two arguments. */
1068 op = VEC_index (vn_reference_op_s, operands, 0);
1069 if (op->opcode == CALL_EXPR
1070 && TREE_CODE (op->op0) == ADDR_EXPR
1071 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1072 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1073 && VEC_length (vn_reference_op_s, operands) >= 2
1074 && VEC_length (vn_reference_op_s, operands) <= 3)
1076 vn_reference_op_t arg0, arg1 = NULL;
1077 bool anyconst = false;
1078 arg0 = VEC_index (vn_reference_op_s, operands, 1);
1079 if (VEC_length (vn_reference_op_s, operands) > 2)
1080 arg1 = VEC_index (vn_reference_op_s, operands, 2);
1081 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1082 || (arg0->opcode == ADDR_EXPR
1083 && is_gimple_min_invariant (arg0->op0)))
1084 anyconst = true;
1085 if (arg1
1086 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1087 || (arg1->opcode == ADDR_EXPR
1088 && is_gimple_min_invariant (arg1->op0))))
1089 anyconst = true;
1090 if (anyconst)
1092 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1093 arg1 ? 2 : 1,
1094 arg0->op0,
1095 arg1 ? arg1->op0 : NULL);
1096 if (folded
1097 && TREE_CODE (folded) == NOP_EXPR)
1098 folded = TREE_OPERAND (folded, 0);
1099 if (folded
1100 && is_gimple_min_invariant (folded))
1101 return folded;
1105 /* Simplify reads from constant strings. */
1106 else if (op->opcode == ARRAY_REF
1107 && TREE_CODE (op->op0) == INTEGER_CST
1108 && integer_zerop (op->op1)
1109 && VEC_length (vn_reference_op_s, operands) == 2)
1111 vn_reference_op_t arg0;
1112 arg0 = VEC_index (vn_reference_op_s, operands, 1);
1113 if (arg0->opcode == STRING_CST
1114 && (TYPE_MODE (op->type)
1115 == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
1116 && GET_MODE_CLASS (TYPE_MODE (op->type)) == MODE_INT
1117 && GET_MODE_SIZE (TYPE_MODE (op->type)) == 1
1118 && compare_tree_int (op->op0, TREE_STRING_LENGTH (arg0->op0)) < 0)
1119 return build_int_cst_type (op->type,
1120 (TREE_STRING_POINTER (arg0->op0)
1121 [TREE_INT_CST_LOW (op->op0)]));
1124 return NULL_TREE;
1127 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1128 structures into their value numbers. This is done in-place, and
1129 the vector passed in is returned. */
1131 static VEC (vn_reference_op_s, heap) *
1132 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
1134 vn_reference_op_t vro;
1135 unsigned int i;
1137 FOR_EACH_VEC_ELT (vn_reference_op_s, orig, i, vro)
1139 if (vro->opcode == SSA_NAME
1140 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1142 vro->op0 = SSA_VAL (vro->op0);
1143 /* If it transforms from an SSA_NAME to a constant, update
1144 the opcode. */
1145 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1146 vro->opcode = TREE_CODE (vro->op0);
1148 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1149 vro->op1 = SSA_VAL (vro->op1);
1150 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1151 vro->op2 = SSA_VAL (vro->op2);
1152 /* If it transforms from an SSA_NAME to an address, fold with
1153 a preceding indirect reference. */
1154 if (i > 0
1155 && vro->op0
1156 && TREE_CODE (vro->op0) == ADDR_EXPR
1157 && VEC_index (vn_reference_op_s,
1158 orig, i - 1)->opcode == MEM_REF)
1159 vn_reference_fold_indirect (&orig, &i);
1160 else if (i > 0
1161 && vro->opcode == SSA_NAME
1162 && VEC_index (vn_reference_op_s,
1163 orig, i - 1)->opcode == MEM_REF)
1164 vn_reference_maybe_forwprop_address (&orig, &i);
1165 /* If it transforms a non-constant ARRAY_REF into a constant
1166 one, adjust the constant offset. */
1167 else if (vro->opcode == ARRAY_REF
1168 && vro->off == -1
1169 && TREE_CODE (vro->op0) == INTEGER_CST
1170 && TREE_CODE (vro->op1) == INTEGER_CST
1171 && TREE_CODE (vro->op2) == INTEGER_CST)
1173 double_int off = tree_to_double_int (vro->op0);
1174 off = double_int_add (off,
1175 double_int_neg
1176 (tree_to_double_int (vro->op1)));
1177 off = double_int_mul (off, tree_to_double_int (vro->op2));
1178 if (double_int_fits_in_shwi_p (off))
1179 vro->off = off.low;
1183 return orig;
1186 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
1188 /* Create a vector of vn_reference_op_s structures from REF, a
1189 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1190 this function. */
1192 static VEC(vn_reference_op_s, heap) *
1193 valueize_shared_reference_ops_from_ref (tree ref)
1195 if (!ref)
1196 return NULL;
1197 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1198 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1199 shared_lookup_references = valueize_refs (shared_lookup_references);
1200 return shared_lookup_references;
1203 /* Create a vector of vn_reference_op_s structures from CALL, a
1204 call statement. The vector is shared among all callers of
1205 this function. */
1207 static VEC(vn_reference_op_s, heap) *
1208 valueize_shared_reference_ops_from_call (gimple call)
1210 if (!call)
1211 return NULL;
1212 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1213 copy_reference_ops_from_call (call, &shared_lookup_references);
1214 shared_lookup_references = valueize_refs (shared_lookup_references);
1215 return shared_lookup_references;
1218 /* Lookup a SCCVN reference operation VR in the current hash table.
1219 Returns the resulting value number if it exists in the hash table,
1220 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1221 vn_reference_t stored in the hashtable if something is found. */
1223 static tree
1224 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1226 void **slot;
1227 hashval_t hash;
1229 hash = vr->hashcode;
1230 slot = htab_find_slot_with_hash (current_info->references, vr,
1231 hash, NO_INSERT);
1232 if (!slot && current_info == optimistic_info)
1233 slot = htab_find_slot_with_hash (valid_info->references, vr,
1234 hash, NO_INSERT);
1235 if (slot)
1237 if (vnresult)
1238 *vnresult = (vn_reference_t)*slot;
1239 return ((vn_reference_t)*slot)->result;
1242 return NULL_TREE;
1245 static tree *last_vuse_ptr;
1247 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1248 with the current VUSE and performs the expression lookup. */
1250 static void *
1251 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
1253 vn_reference_t vr = (vn_reference_t)vr_;
1254 void **slot;
1255 hashval_t hash;
1257 if (last_vuse_ptr)
1258 *last_vuse_ptr = vuse;
1260 /* Fixup vuse and hash. */
1261 if (vr->vuse)
1262 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1263 vr->vuse = SSA_VAL (vuse);
1264 if (vr->vuse)
1265 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1267 hash = vr->hashcode;
1268 slot = htab_find_slot_with_hash (current_info->references, vr,
1269 hash, NO_INSERT);
1270 if (!slot && current_info == optimistic_info)
1271 slot = htab_find_slot_with_hash (valid_info->references, vr,
1272 hash, NO_INSERT);
1273 if (slot)
1274 return *slot;
1276 return NULL;
1279 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1280 from the statement defining VUSE and if not successful tries to
1281 translate *REFP and VR_ through an aggregate copy at the defintion
1282 of VUSE. */
1284 static void *
1285 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
1287 vn_reference_t vr = (vn_reference_t)vr_;
1288 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1289 tree fndecl;
1290 tree base;
1291 HOST_WIDE_INT offset, maxsize;
1292 static VEC (vn_reference_op_s, heap) *lhs_ops = NULL;
1293 ao_ref lhs_ref;
1294 bool lhs_ref_ok = false;
1296 /* First try to disambiguate after value-replacing in the definitions LHS. */
1297 if (is_gimple_assign (def_stmt))
1299 VEC (vn_reference_op_s, heap) *tem;
1300 tree lhs = gimple_assign_lhs (def_stmt);
1301 /* Avoid re-allocation overhead. */
1302 VEC_truncate (vn_reference_op_s, lhs_ops, 0);
1303 copy_reference_ops_from_ref (lhs, &lhs_ops);
1304 tem = lhs_ops;
1305 lhs_ops = valueize_refs (lhs_ops);
1306 gcc_assert (lhs_ops == tem);
1307 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref, get_alias_set (lhs),
1308 TREE_TYPE (lhs), lhs_ops);
1309 if (lhs_ref_ok
1310 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1311 return NULL;
1314 base = ao_ref_base (ref);
1315 offset = ref->offset;
1316 maxsize = ref->max_size;
1318 /* If we cannot constrain the size of the reference we cannot
1319 test if anything kills it. */
1320 if (maxsize == -1)
1321 return (void *)-1;
1323 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1324 from that defintion.
1325 1) Memset. */
1326 if (is_gimple_reg_type (vr->type)
1327 && is_gimple_call (def_stmt)
1328 && (fndecl = gimple_call_fndecl (def_stmt))
1329 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1330 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
1331 && integer_zerop (gimple_call_arg (def_stmt, 1))
1332 && host_integerp (gimple_call_arg (def_stmt, 2), 1)
1333 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1335 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1336 tree base2;
1337 HOST_WIDE_INT offset2, size2, maxsize2;
1338 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1339 size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
1340 if ((unsigned HOST_WIDE_INT)size2 / 8
1341 == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
1342 && maxsize2 != -1
1343 && operand_equal_p (base, base2, 0)
1344 && offset2 <= offset
1345 && offset2 + size2 >= offset + maxsize)
1347 tree val = build_zero_cst (vr->type);
1348 unsigned int value_id = get_or_alloc_constant_value_id (val);
1349 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1350 VEC_copy (vn_reference_op_s,
1351 heap, vr->operands),
1352 val, value_id);
1356 /* 2) Assignment from an empty CONSTRUCTOR. */
1357 else if (is_gimple_reg_type (vr->type)
1358 && gimple_assign_single_p (def_stmt)
1359 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1360 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1362 tree base2;
1363 HOST_WIDE_INT offset2, size2, maxsize2;
1364 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1365 &offset2, &size2, &maxsize2);
1366 if (maxsize2 != -1
1367 && operand_equal_p (base, base2, 0)
1368 && offset2 <= offset
1369 && offset2 + size2 >= offset + maxsize)
1371 tree val = build_zero_cst (vr->type);
1372 unsigned int value_id = get_or_alloc_constant_value_id (val);
1373 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1374 VEC_copy (vn_reference_op_s,
1375 heap, vr->operands),
1376 val, value_id);
1380 /* For aggregate copies translate the reference through them if
1381 the copy kills ref. */
1382 else if (gimple_assign_single_p (def_stmt)
1383 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1384 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1385 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1387 tree base2;
1388 HOST_WIDE_INT offset2, size2, maxsize2;
1389 int i, j;
1390 VEC (vn_reference_op_s, heap) *rhs = NULL;
1391 vn_reference_op_t vro;
1392 ao_ref r;
1394 if (!lhs_ref_ok)
1395 return (void *)-1;
1397 /* See if the assignment kills REF. */
1398 base2 = ao_ref_base (&lhs_ref);
1399 offset2 = lhs_ref.offset;
1400 size2 = lhs_ref.size;
1401 maxsize2 = lhs_ref.max_size;
1402 if (maxsize2 == -1
1403 || (base != base2 && !operand_equal_p (base, base2, 0))
1404 || offset2 > offset
1405 || offset2 + size2 < offset + maxsize)
1406 return (void *)-1;
1408 /* Find the common base of ref and the lhs. lhs_ops already
1409 contains valueized operands for the lhs. */
1410 i = VEC_length (vn_reference_op_s, vr->operands) - 1;
1411 j = VEC_length (vn_reference_op_s, lhs_ops) - 1;
1412 while (j >= 0 && i >= 0
1413 && vn_reference_op_eq (VEC_index (vn_reference_op_s,
1414 vr->operands, i),
1415 VEC_index (vn_reference_op_s, lhs_ops, j)))
1417 i--;
1418 j--;
1421 /* i now points to the first additional op.
1422 ??? LHS may not be completely contained in VR, one or more
1423 VIEW_CONVERT_EXPRs could be in its way. We could at least
1424 try handling outermost VIEW_CONVERT_EXPRs. */
1425 if (j != -1)
1426 return (void *)-1;
1428 /* Now re-write REF to be based on the rhs of the assignment. */
1429 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1430 /* We need to pre-pend vr->operands[0..i] to rhs. */
1431 if (i + 1 + VEC_length (vn_reference_op_s, rhs)
1432 > VEC_length (vn_reference_op_s, vr->operands))
1434 VEC (vn_reference_op_s, heap) *old = vr->operands;
1435 VEC_safe_grow (vn_reference_op_s, heap, vr->operands,
1436 i + 1 + VEC_length (vn_reference_op_s, rhs));
1437 if (old == shared_lookup_references
1438 && vr->operands != old)
1439 shared_lookup_references = NULL;
1441 else
1442 VEC_truncate (vn_reference_op_s, vr->operands,
1443 i + 1 + VEC_length (vn_reference_op_s, rhs));
1444 FOR_EACH_VEC_ELT (vn_reference_op_s, rhs, j, vro)
1445 VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, vro);
1446 VEC_free (vn_reference_op_s, heap, rhs);
1447 vr->hashcode = vn_reference_compute_hash (vr);
1449 /* Adjust *ref from the new operands. */
1450 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1451 return (void *)-1;
1452 /* This can happen with bitfields. */
1453 if (ref->size != r.size)
1454 return (void *)-1;
1455 *ref = r;
1457 /* Do not update last seen VUSE after translating. */
1458 last_vuse_ptr = NULL;
1460 /* Keep looking for the adjusted *REF / VR pair. */
1461 return NULL;
1464 /* Bail out and stop walking. */
1465 return (void *)-1;
1468 /* Lookup a reference operation by it's parts, in the current hash table.
1469 Returns the resulting value number if it exists in the hash table,
1470 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1471 vn_reference_t stored in the hashtable if something is found. */
1473 tree
1474 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
1475 VEC (vn_reference_op_s, heap) *operands,
1476 vn_reference_t *vnresult, bool maywalk)
1478 struct vn_reference_s vr1;
1479 vn_reference_t tmp;
1480 tree cst;
1482 if (!vnresult)
1483 vnresult = &tmp;
1484 *vnresult = NULL;
1486 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1487 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1488 VEC_safe_grow (vn_reference_op_s, heap, shared_lookup_references,
1489 VEC_length (vn_reference_op_s, operands));
1490 memcpy (VEC_address (vn_reference_op_s, shared_lookup_references),
1491 VEC_address (vn_reference_op_s, operands),
1492 sizeof (vn_reference_op_s)
1493 * VEC_length (vn_reference_op_s, operands));
1494 vr1.operands = operands = shared_lookup_references
1495 = valueize_refs (shared_lookup_references);
1496 vr1.type = type;
1497 vr1.set = set;
1498 vr1.hashcode = vn_reference_compute_hash (&vr1);
1499 if ((cst = fully_constant_vn_reference_p (&vr1)))
1500 return cst;
1502 vn_reference_lookup_1 (&vr1, vnresult);
1503 if (!*vnresult
1504 && maywalk
1505 && vr1.vuse)
1507 ao_ref r;
1508 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
1509 *vnresult =
1510 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1511 vn_reference_lookup_2,
1512 vn_reference_lookup_3, &vr1);
1513 if (vr1.operands != operands)
1514 VEC_free (vn_reference_op_s, heap, vr1.operands);
1517 if (*vnresult)
1518 return (*vnresult)->result;
1520 return NULL_TREE;
1523 /* Lookup OP in the current hash table, and return the resulting value
1524 number if it exists in the hash table. Return NULL_TREE if it does
1525 not exist in the hash table or if the result field of the structure
1526 was NULL.. VNRESULT will be filled in with the vn_reference_t
1527 stored in the hashtable if one exists. */
1529 tree
1530 vn_reference_lookup (tree op, tree vuse, bool maywalk,
1531 vn_reference_t *vnresult)
1533 VEC (vn_reference_op_s, heap) *operands;
1534 struct vn_reference_s vr1;
1535 tree cst;
1537 if (vnresult)
1538 *vnresult = NULL;
1540 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1541 vr1.operands = operands = valueize_shared_reference_ops_from_ref (op);
1542 vr1.type = TREE_TYPE (op);
1543 vr1.set = get_alias_set (op);
1544 vr1.hashcode = vn_reference_compute_hash (&vr1);
1545 if ((cst = fully_constant_vn_reference_p (&vr1)))
1546 return cst;
1548 if (maywalk
1549 && vr1.vuse)
1551 vn_reference_t wvnresult;
1552 ao_ref r;
1553 ao_ref_init (&r, op);
1554 wvnresult =
1555 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1556 vn_reference_lookup_2,
1557 vn_reference_lookup_3, &vr1);
1558 if (vr1.operands != operands)
1559 VEC_free (vn_reference_op_s, heap, vr1.operands);
1560 if (wvnresult)
1562 if (vnresult)
1563 *vnresult = wvnresult;
1564 return wvnresult->result;
1567 return NULL_TREE;
1570 return vn_reference_lookup_1 (&vr1, vnresult);
1574 /* Insert OP into the current hash table with a value number of
1575 RESULT, and return the resulting reference structure we created. */
1577 vn_reference_t
1578 vn_reference_insert (tree op, tree result, tree vuse)
1580 void **slot;
1581 vn_reference_t vr1;
1583 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1584 if (TREE_CODE (result) == SSA_NAME)
1585 vr1->value_id = VN_INFO (result)->value_id;
1586 else
1587 vr1->value_id = get_or_alloc_constant_value_id (result);
1588 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1589 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
1590 vr1->type = TREE_TYPE (op);
1591 vr1->set = get_alias_set (op);
1592 vr1->hashcode = vn_reference_compute_hash (vr1);
1593 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
1595 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1596 INSERT);
1598 /* Because we lookup stores using vuses, and value number failures
1599 using the vdefs (see visit_reference_op_store for how and why),
1600 it's possible that on failure we may try to insert an already
1601 inserted store. This is not wrong, there is no ssa name for a
1602 store that we could use as a differentiator anyway. Thus, unlike
1603 the other lookup functions, you cannot gcc_assert (!*slot)
1604 here. */
1606 /* But free the old slot in case of a collision. */
1607 if (*slot)
1608 free_reference (*slot);
1610 *slot = vr1;
1611 return vr1;
1614 /* Insert a reference by it's pieces into the current hash table with
1615 a value number of RESULT. Return the resulting reference
1616 structure we created. */
1618 vn_reference_t
1619 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
1620 VEC (vn_reference_op_s, heap) *operands,
1621 tree result, unsigned int value_id)
1624 void **slot;
1625 vn_reference_t vr1;
1627 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1628 vr1->value_id = value_id;
1629 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1630 vr1->operands = valueize_refs (operands);
1631 vr1->type = type;
1632 vr1->set = set;
1633 vr1->hashcode = vn_reference_compute_hash (vr1);
1634 if (result && TREE_CODE (result) == SSA_NAME)
1635 result = SSA_VAL (result);
1636 vr1->result = result;
1638 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1639 INSERT);
1641 /* At this point we should have all the things inserted that we have
1642 seen before, and we should never try inserting something that
1643 already exists. */
1644 gcc_assert (!*slot);
1645 if (*slot)
1646 free_reference (*slot);
1648 *slot = vr1;
1649 return vr1;
1652 /* Compute and return the hash value for nary operation VBO1. */
1654 hashval_t
1655 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
1657 hashval_t hash;
1658 unsigned i;
1660 for (i = 0; i < vno1->length; ++i)
1661 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
1662 vno1->op[i] = SSA_VAL (vno1->op[i]);
1664 if (vno1->length == 2
1665 && commutative_tree_code (vno1->opcode)
1666 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
1668 tree temp = vno1->op[0];
1669 vno1->op[0] = vno1->op[1];
1670 vno1->op[1] = temp;
1673 hash = iterative_hash_hashval_t (vno1->opcode, 0);
1674 for (i = 0; i < vno1->length; ++i)
1675 hash = iterative_hash_expr (vno1->op[i], hash);
1677 return hash;
1680 /* Return the computed hashcode for nary operation P1. */
1682 static hashval_t
1683 vn_nary_op_hash (const void *p1)
1685 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1686 return vno1->hashcode;
1689 /* Compare nary operations P1 and P2 and return true if they are
1690 equivalent. */
1693 vn_nary_op_eq (const void *p1, const void *p2)
1695 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1696 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
1697 unsigned i;
1699 if (vno1->hashcode != vno2->hashcode)
1700 return false;
1702 if (vno1->opcode != vno2->opcode
1703 || !types_compatible_p (vno1->type, vno2->type))
1704 return false;
1706 for (i = 0; i < vno1->length; ++i)
1707 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
1708 return false;
1710 return true;
1713 /* Initialize VNO from the pieces provided. */
1715 static void
1716 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
1717 enum tree_code code, tree type, tree op0,
1718 tree op1, tree op2, tree op3)
1720 vno->opcode = code;
1721 vno->length = length;
1722 vno->type = type;
1723 switch (length)
1725 /* The fallthrus here are deliberate. */
1726 case 4: vno->op[3] = op3;
1727 case 3: vno->op[2] = op2;
1728 case 2: vno->op[1] = op1;
1729 case 1: vno->op[0] = op0;
1730 default:
1731 break;
1735 /* Initialize VNO from OP. */
1737 static void
1738 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
1740 unsigned i;
1742 vno->opcode = TREE_CODE (op);
1743 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
1744 vno->type = TREE_TYPE (op);
1745 for (i = 0; i < vno->length; ++i)
1746 vno->op[i] = TREE_OPERAND (op, i);
1749 /* Initialize VNO from STMT. */
1751 static void
1752 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
1754 unsigned i;
1756 vno->opcode = gimple_assign_rhs_code (stmt);
1757 vno->length = gimple_num_ops (stmt) - 1;
1758 vno->type = gimple_expr_type (stmt);
1759 for (i = 0; i < vno->length; ++i)
1760 vno->op[i] = gimple_op (stmt, i + 1);
1761 if (vno->opcode == REALPART_EXPR
1762 || vno->opcode == IMAGPART_EXPR
1763 || vno->opcode == VIEW_CONVERT_EXPR)
1764 vno->op[0] = TREE_OPERAND (vno->op[0], 0);
1767 /* Compute the hashcode for VNO and look for it in the hash table;
1768 return the resulting value number if it exists in the hash table.
1769 Return NULL_TREE if it does not exist in the hash table or if the
1770 result field of the operation is NULL. VNRESULT will contain the
1771 vn_nary_op_t from the hashtable if it exists. */
1773 static tree
1774 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
1776 void **slot;
1778 if (vnresult)
1779 *vnresult = NULL;
1781 vno->hashcode = vn_nary_op_compute_hash (vno);
1782 slot = htab_find_slot_with_hash (current_info->nary, vno, vno->hashcode,
1783 NO_INSERT);
1784 if (!slot && current_info == optimistic_info)
1785 slot = htab_find_slot_with_hash (valid_info->nary, vno, vno->hashcode,
1786 NO_INSERT);
1787 if (!slot)
1788 return NULL_TREE;
1789 if (vnresult)
1790 *vnresult = (vn_nary_op_t)*slot;
1791 return ((vn_nary_op_t)*slot)->result;
1794 /* Lookup a n-ary operation by its pieces and return the resulting value
1795 number if it exists in the hash table. Return NULL_TREE if it does
1796 not exist in the hash table or if the result field of the operation
1797 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1798 if it exists. */
1800 tree
1801 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
1802 tree type, tree op0, tree op1, tree op2,
1803 tree op3, vn_nary_op_t *vnresult)
1805 struct vn_nary_op_s vno1;
1806 init_vn_nary_op_from_pieces (&vno1, length, code, type, op0, op1, op2, op3);
1807 return vn_nary_op_lookup_1 (&vno1, vnresult);
1810 /* Lookup OP in the current hash table, and return the resulting value
1811 number if it exists in the hash table. Return NULL_TREE if it does
1812 not exist in the hash table or if the result field of the operation
1813 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1814 if it exists. */
1816 tree
1817 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
1819 struct vn_nary_op_s vno1;
1820 init_vn_nary_op_from_op (&vno1, op);
1821 return vn_nary_op_lookup_1 (&vno1, vnresult);
1824 /* Lookup the rhs of STMT in the current hash table, and return the resulting
1825 value number if it exists in the hash table. Return NULL_TREE if
1826 it does not exist in the hash table. VNRESULT will contain the
1827 vn_nary_op_t from the hashtable if it exists. */
1829 tree
1830 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
1832 struct vn_nary_op_s vno1;
1833 init_vn_nary_op_from_stmt (&vno1, stmt);
1834 return vn_nary_op_lookup_1 (&vno1, vnresult);
1837 /* Return the size of a vn_nary_op_t with LENGTH operands. */
1839 static size_t
1840 sizeof_vn_nary_op (unsigned int length)
1842 return sizeof (struct vn_nary_op_s) - sizeof (tree) * (4 - length);
1845 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
1847 static vn_nary_op_t
1848 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
1850 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
1853 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
1854 obstack. */
1856 static vn_nary_op_t
1857 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
1859 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
1860 &current_info->nary_obstack);
1862 vno1->value_id = value_id;
1863 vno1->length = length;
1864 vno1->result = result;
1866 return vno1;
1869 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
1870 VNO->HASHCODE first. */
1872 static vn_nary_op_t
1873 vn_nary_op_insert_into (vn_nary_op_t vno, htab_t table, bool compute_hash)
1875 void **slot;
1877 if (compute_hash)
1878 vno->hashcode = vn_nary_op_compute_hash (vno);
1880 slot = htab_find_slot_with_hash (table, vno, vno->hashcode, INSERT);
1881 gcc_assert (!*slot);
1883 *slot = vno;
1884 return vno;
1887 /* Insert a n-ary operation into the current hash table using it's
1888 pieces. Return the vn_nary_op_t structure we created and put in
1889 the hashtable. */
1891 vn_nary_op_t
1892 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
1893 tree type, tree op0,
1894 tree op1, tree op2, tree op3,
1895 tree result,
1896 unsigned int value_id)
1898 vn_nary_op_t vno1;
1900 vno1 = alloc_vn_nary_op (length, result, value_id);
1901 init_vn_nary_op_from_pieces (vno1, length, code, type, op0, op1, op2, op3);
1902 return vn_nary_op_insert_into (vno1, current_info->nary, true);
1905 /* Insert OP into the current hash table with a value number of
1906 RESULT. Return the vn_nary_op_t structure we created and put in
1907 the hashtable. */
1909 vn_nary_op_t
1910 vn_nary_op_insert (tree op, tree result)
1912 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
1913 vn_nary_op_t vno1;
1915 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
1916 init_vn_nary_op_from_op (vno1, op);
1917 return vn_nary_op_insert_into (vno1, current_info->nary, true);
1920 /* Insert the rhs of STMT into the current hash table with a value number of
1921 RESULT. */
1923 vn_nary_op_t
1924 vn_nary_op_insert_stmt (gimple stmt, tree result)
1926 unsigned length = gimple_num_ops (stmt) - 1;
1927 vn_nary_op_t vno1;
1929 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
1930 init_vn_nary_op_from_stmt (vno1, stmt);
1931 return vn_nary_op_insert_into (vno1, current_info->nary, true);
1934 /* Compute a hashcode for PHI operation VP1 and return it. */
1936 static inline hashval_t
1937 vn_phi_compute_hash (vn_phi_t vp1)
1939 hashval_t result;
1940 int i;
1941 tree phi1op;
1942 tree type;
1944 result = vp1->block->index;
1946 /* If all PHI arguments are constants we need to distinguish
1947 the PHI node via its type. */
1948 type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
1949 result += (INTEGRAL_TYPE_P (type)
1950 + (INTEGRAL_TYPE_P (type)
1951 ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
1953 FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
1955 if (phi1op == VN_TOP)
1956 continue;
1957 result = iterative_hash_expr (phi1op, result);
1960 return result;
1963 /* Return the computed hashcode for phi operation P1. */
1965 static hashval_t
1966 vn_phi_hash (const void *p1)
1968 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1969 return vp1->hashcode;
1972 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
1974 static int
1975 vn_phi_eq (const void *p1, const void *p2)
1977 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1978 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
1980 if (vp1->hashcode != vp2->hashcode)
1981 return false;
1983 if (vp1->block == vp2->block)
1985 int i;
1986 tree phi1op;
1988 /* If the PHI nodes do not have compatible types
1989 they are not the same. */
1990 if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
1991 TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
1992 return false;
1994 /* Any phi in the same block will have it's arguments in the
1995 same edge order, because of how we store phi nodes. */
1996 FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
1998 tree phi2op = VEC_index (tree, vp2->phiargs, i);
1999 if (phi1op == VN_TOP || phi2op == VN_TOP)
2000 continue;
2001 if (!expressions_equal_p (phi1op, phi2op))
2002 return false;
2004 return true;
2006 return false;
2009 static VEC(tree, heap) *shared_lookup_phiargs;
2011 /* Lookup PHI in the current hash table, and return the resulting
2012 value number if it exists in the hash table. Return NULL_TREE if
2013 it does not exist in the hash table. */
2015 static tree
2016 vn_phi_lookup (gimple phi)
2018 void **slot;
2019 struct vn_phi_s vp1;
2020 unsigned i;
2022 VEC_truncate (tree, shared_lookup_phiargs, 0);
2024 /* Canonicalize the SSA_NAME's to their value number. */
2025 for (i = 0; i < gimple_phi_num_args (phi); i++)
2027 tree def = PHI_ARG_DEF (phi, i);
2028 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2029 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
2031 vp1.phiargs = shared_lookup_phiargs;
2032 vp1.block = gimple_bb (phi);
2033 vp1.hashcode = vn_phi_compute_hash (&vp1);
2034 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
2035 NO_INSERT);
2036 if (!slot && current_info == optimistic_info)
2037 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
2038 NO_INSERT);
2039 if (!slot)
2040 return NULL_TREE;
2041 return ((vn_phi_t)*slot)->result;
2044 /* Insert PHI into the current hash table with a value number of
2045 RESULT. */
2047 static vn_phi_t
2048 vn_phi_insert (gimple phi, tree result)
2050 void **slot;
2051 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
2052 unsigned i;
2053 VEC (tree, heap) *args = NULL;
2055 /* Canonicalize the SSA_NAME's to their value number. */
2056 for (i = 0; i < gimple_phi_num_args (phi); i++)
2058 tree def = PHI_ARG_DEF (phi, i);
2059 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2060 VEC_safe_push (tree, heap, args, def);
2062 vp1->value_id = VN_INFO (result)->value_id;
2063 vp1->phiargs = args;
2064 vp1->block = gimple_bb (phi);
2065 vp1->result = result;
2066 vp1->hashcode = vn_phi_compute_hash (vp1);
2068 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
2069 INSERT);
2071 /* Because we iterate over phi operations more than once, it's
2072 possible the slot might already exist here, hence no assert.*/
2073 *slot = vp1;
2074 return vp1;
2078 /* Print set of components in strongly connected component SCC to OUT. */
2080 static void
2081 print_scc (FILE *out, VEC (tree, heap) *scc)
2083 tree var;
2084 unsigned int i;
2086 fprintf (out, "SCC consists of: ");
2087 FOR_EACH_VEC_ELT (tree, scc, i, var)
2089 print_generic_expr (out, var, 0);
2090 fprintf (out, " ");
2092 fprintf (out, "\n");
2095 /* Set the value number of FROM to TO, return true if it has changed
2096 as a result. */
2098 static inline bool
2099 set_ssa_val_to (tree from, tree to)
2101 tree currval;
2103 if (from != to
2104 && TREE_CODE (to) == SSA_NAME
2105 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2106 to = from;
2108 /* The only thing we allow as value numbers are VN_TOP, ssa_names
2109 and invariants. So assert that here. */
2110 gcc_assert (to != NULL_TREE
2111 && (to == VN_TOP
2112 || TREE_CODE (to) == SSA_NAME
2113 || is_gimple_min_invariant (to)));
2115 if (dump_file && (dump_flags & TDF_DETAILS))
2117 fprintf (dump_file, "Setting value number of ");
2118 print_generic_expr (dump_file, from, 0);
2119 fprintf (dump_file, " to ");
2120 print_generic_expr (dump_file, to, 0);
2123 currval = SSA_VAL (from);
2125 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
2127 VN_INFO (from)->valnum = to;
2128 if (dump_file && (dump_flags & TDF_DETAILS))
2129 fprintf (dump_file, " (changed)\n");
2130 return true;
2132 if (dump_file && (dump_flags & TDF_DETAILS))
2133 fprintf (dump_file, "\n");
2134 return false;
2137 /* Set all definitions in STMT to value number to themselves.
2138 Return true if a value number changed. */
2140 static bool
2141 defs_to_varying (gimple stmt)
2143 bool changed = false;
2144 ssa_op_iter iter;
2145 def_operand_p defp;
2147 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2149 tree def = DEF_FROM_PTR (defp);
2151 VN_INFO (def)->use_processed = true;
2152 changed |= set_ssa_val_to (def, def);
2154 return changed;
2157 static bool expr_has_constants (tree expr);
2158 static tree valueize_expr (tree expr);
2160 /* Visit a copy between LHS and RHS, return true if the value number
2161 changed. */
2163 static bool
2164 visit_copy (tree lhs, tree rhs)
2166 /* Follow chains of copies to their destination. */
2167 while (TREE_CODE (rhs) == SSA_NAME
2168 && SSA_VAL (rhs) != rhs)
2169 rhs = SSA_VAL (rhs);
2171 /* The copy may have a more interesting constant filled expression
2172 (we don't, since we know our RHS is just an SSA name). */
2173 if (TREE_CODE (rhs) == SSA_NAME)
2175 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2176 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2179 return set_ssa_val_to (lhs, rhs);
2182 /* Visit a nary operator RHS, value number it, and return true if the
2183 value number of LHS has changed as a result. */
2185 static bool
2186 visit_nary_op (tree lhs, gimple stmt)
2188 bool changed = false;
2189 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2191 if (result)
2192 changed = set_ssa_val_to (lhs, result);
2193 else
2195 changed = set_ssa_val_to (lhs, lhs);
2196 vn_nary_op_insert_stmt (stmt, lhs);
2199 return changed;
2202 /* Visit a call STMT storing into LHS. Return true if the value number
2203 of the LHS has changed as a result. */
2205 static bool
2206 visit_reference_op_call (tree lhs, gimple stmt)
2208 bool changed = false;
2209 struct vn_reference_s vr1;
2210 tree result;
2211 tree vuse = gimple_vuse (stmt);
2213 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2214 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
2215 vr1.type = gimple_expr_type (stmt);
2216 vr1.set = 0;
2217 vr1.hashcode = vn_reference_compute_hash (&vr1);
2218 result = vn_reference_lookup_1 (&vr1, NULL);
2219 if (result)
2221 changed = set_ssa_val_to (lhs, result);
2222 if (TREE_CODE (result) == SSA_NAME
2223 && VN_INFO (result)->has_constants)
2224 VN_INFO (lhs)->has_constants = true;
2226 else
2228 void **slot;
2229 vn_reference_t vr2;
2230 changed = set_ssa_val_to (lhs, lhs);
2231 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
2232 vr2->vuse = vr1.vuse;
2233 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
2234 vr2->type = vr1.type;
2235 vr2->set = vr1.set;
2236 vr2->hashcode = vr1.hashcode;
2237 vr2->result = lhs;
2238 slot = htab_find_slot_with_hash (current_info->references,
2239 vr2, vr2->hashcode, INSERT);
2240 if (*slot)
2241 free_reference (*slot);
2242 *slot = vr2;
2245 return changed;
2248 /* Visit a load from a reference operator RHS, part of STMT, value number it,
2249 and return true if the value number of the LHS has changed as a result. */
2251 static bool
2252 visit_reference_op_load (tree lhs, tree op, gimple stmt)
2254 bool changed = false;
2255 tree last_vuse;
2256 tree result;
2258 last_vuse = gimple_vuse (stmt);
2259 last_vuse_ptr = &last_vuse;
2260 result = vn_reference_lookup (op, gimple_vuse (stmt), true, NULL);
2261 last_vuse_ptr = NULL;
2263 /* If we have a VCE, try looking up its operand as it might be stored in
2264 a different type. */
2265 if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
2266 result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
2267 true, NULL);
2269 /* We handle type-punning through unions by value-numbering based
2270 on offset and size of the access. Be prepared to handle a
2271 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
2272 if (result
2273 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
2275 /* We will be setting the value number of lhs to the value number
2276 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2277 So first simplify and lookup this expression to see if it
2278 is already available. */
2279 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
2280 if ((CONVERT_EXPR_P (val)
2281 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
2282 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
2284 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
2285 if ((CONVERT_EXPR_P (tem)
2286 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
2287 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
2288 TREE_TYPE (val), tem)))
2289 val = tem;
2291 result = val;
2292 if (!is_gimple_min_invariant (val)
2293 && TREE_CODE (val) != SSA_NAME)
2294 result = vn_nary_op_lookup (val, NULL);
2295 /* If the expression is not yet available, value-number lhs to
2296 a new SSA_NAME we create. */
2297 if (!result)
2299 result = make_ssa_name (SSA_NAME_VAR (lhs), gimple_build_nop ());
2300 /* Initialize value-number information properly. */
2301 VN_INFO_GET (result)->valnum = result;
2302 VN_INFO (result)->value_id = get_next_value_id ();
2303 VN_INFO (result)->expr = val;
2304 VN_INFO (result)->has_constants = expr_has_constants (val);
2305 VN_INFO (result)->needs_insertion = true;
2306 /* As all "inserted" statements are singleton SCCs, insert
2307 to the valid table. This is strictly needed to
2308 avoid re-generating new value SSA_NAMEs for the same
2309 expression during SCC iteration over and over (the
2310 optimistic table gets cleared after each iteration).
2311 We do not need to insert into the optimistic table, as
2312 lookups there will fall back to the valid table. */
2313 if (current_info == optimistic_info)
2315 current_info = valid_info;
2316 vn_nary_op_insert (val, result);
2317 current_info = optimistic_info;
2319 else
2320 vn_nary_op_insert (val, result);
2321 if (dump_file && (dump_flags & TDF_DETAILS))
2323 fprintf (dump_file, "Inserting name ");
2324 print_generic_expr (dump_file, result, 0);
2325 fprintf (dump_file, " for expression ");
2326 print_generic_expr (dump_file, val, 0);
2327 fprintf (dump_file, "\n");
2332 if (result)
2334 changed = set_ssa_val_to (lhs, result);
2335 if (TREE_CODE (result) == SSA_NAME
2336 && VN_INFO (result)->has_constants)
2338 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2339 VN_INFO (lhs)->has_constants = true;
2342 else
2344 changed = set_ssa_val_to (lhs, lhs);
2345 vn_reference_insert (op, lhs, last_vuse);
2348 return changed;
2352 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2353 and return true if the value number of the LHS has changed as a result. */
2355 static bool
2356 visit_reference_op_store (tree lhs, tree op, gimple stmt)
2358 bool changed = false;
2359 tree result;
2360 bool resultsame = false;
2362 /* First we want to lookup using the *vuses* from the store and see
2363 if there the last store to this location with the same address
2364 had the same value.
2366 The vuses represent the memory state before the store. If the
2367 memory state, address, and value of the store is the same as the
2368 last store to this location, then this store will produce the
2369 same memory state as that store.
2371 In this case the vdef versions for this store are value numbered to those
2372 vuse versions, since they represent the same memory state after
2373 this store.
2375 Otherwise, the vdefs for the store are used when inserting into
2376 the table, since the store generates a new memory state. */
2378 result = vn_reference_lookup (lhs, gimple_vuse (stmt), false, NULL);
2380 if (result)
2382 if (TREE_CODE (result) == SSA_NAME)
2383 result = SSA_VAL (result);
2384 if (TREE_CODE (op) == SSA_NAME)
2385 op = SSA_VAL (op);
2386 resultsame = expressions_equal_p (result, op);
2389 if (!result || !resultsame)
2391 tree vdef;
2393 if (dump_file && (dump_flags & TDF_DETAILS))
2395 fprintf (dump_file, "No store match\n");
2396 fprintf (dump_file, "Value numbering store ");
2397 print_generic_expr (dump_file, lhs, 0);
2398 fprintf (dump_file, " to ");
2399 print_generic_expr (dump_file, op, 0);
2400 fprintf (dump_file, "\n");
2402 /* Have to set value numbers before insert, since insert is
2403 going to valueize the references in-place. */
2404 if ((vdef = gimple_vdef (stmt)))
2406 VN_INFO (vdef)->use_processed = true;
2407 changed |= set_ssa_val_to (vdef, vdef);
2410 /* Do not insert structure copies into the tables. */
2411 if (is_gimple_min_invariant (op)
2412 || is_gimple_reg (op))
2413 vn_reference_insert (lhs, op, vdef);
2415 else
2417 /* We had a match, so value number the vdef to have the value
2418 number of the vuse it came from. */
2419 tree def, use;
2421 if (dump_file && (dump_flags & TDF_DETAILS))
2422 fprintf (dump_file, "Store matched earlier value,"
2423 "value numbering store vdefs to matching vuses.\n");
2425 def = gimple_vdef (stmt);
2426 use = gimple_vuse (stmt);
2428 VN_INFO (def)->use_processed = true;
2429 changed |= set_ssa_val_to (def, SSA_VAL (use));
2432 return changed;
2435 /* Visit and value number PHI, return true if the value number
2436 changed. */
2438 static bool
2439 visit_phi (gimple phi)
2441 bool changed = false;
2442 tree result;
2443 tree sameval = VN_TOP;
2444 bool allsame = true;
2445 unsigned i;
2447 /* TODO: We could check for this in init_sccvn, and replace this
2448 with a gcc_assert. */
2449 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
2450 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2452 /* See if all non-TOP arguments have the same value. TOP is
2453 equivalent to everything, so we can ignore it. */
2454 for (i = 0; i < gimple_phi_num_args (phi); i++)
2456 tree def = PHI_ARG_DEF (phi, i);
2458 if (TREE_CODE (def) == SSA_NAME)
2459 def = SSA_VAL (def);
2460 if (def == VN_TOP)
2461 continue;
2462 if (sameval == VN_TOP)
2464 sameval = def;
2466 else
2468 if (!expressions_equal_p (def, sameval))
2470 allsame = false;
2471 break;
2476 /* If all value numbered to the same value, the phi node has that
2477 value. */
2478 if (allsame)
2480 if (is_gimple_min_invariant (sameval))
2482 VN_INFO (PHI_RESULT (phi))->has_constants = true;
2483 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2485 else
2487 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2488 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2491 if (TREE_CODE (sameval) == SSA_NAME)
2492 return visit_copy (PHI_RESULT (phi), sameval);
2494 return set_ssa_val_to (PHI_RESULT (phi), sameval);
2497 /* Otherwise, see if it is equivalent to a phi node in this block. */
2498 result = vn_phi_lookup (phi);
2499 if (result)
2501 if (TREE_CODE (result) == SSA_NAME)
2502 changed = visit_copy (PHI_RESULT (phi), result);
2503 else
2504 changed = set_ssa_val_to (PHI_RESULT (phi), result);
2506 else
2508 vn_phi_insert (phi, PHI_RESULT (phi));
2509 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2510 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
2511 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2514 return changed;
2517 /* Return true if EXPR contains constants. */
2519 static bool
2520 expr_has_constants (tree expr)
2522 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2524 case tcc_unary:
2525 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
2527 case tcc_binary:
2528 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
2529 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
2530 /* Constants inside reference ops are rarely interesting, but
2531 it can take a lot of looking to find them. */
2532 case tcc_reference:
2533 case tcc_declaration:
2534 return false;
2535 default:
2536 return is_gimple_min_invariant (expr);
2538 return false;
2541 /* Return true if STMT contains constants. */
2543 static bool
2544 stmt_has_constants (gimple stmt)
2546 if (gimple_code (stmt) != GIMPLE_ASSIGN)
2547 return false;
2549 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2551 case GIMPLE_UNARY_RHS:
2552 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2554 case GIMPLE_BINARY_RHS:
2555 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2556 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
2557 case GIMPLE_TERNARY_RHS:
2558 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2559 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt))
2560 || is_gimple_min_invariant (gimple_assign_rhs3 (stmt)));
2561 case GIMPLE_SINGLE_RHS:
2562 /* Constants inside reference ops are rarely interesting, but
2563 it can take a lot of looking to find them. */
2564 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2565 default:
2566 gcc_unreachable ();
2568 return false;
2571 /* Replace SSA_NAMES in expr with their value numbers, and return the
2572 result.
2573 This is performed in place. */
2575 static tree
2576 valueize_expr (tree expr)
2578 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2580 case tcc_unary:
2581 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2582 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2583 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2584 break;
2585 case tcc_binary:
2586 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2587 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2588 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2589 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
2590 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
2591 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
2592 break;
2593 default:
2594 break;
2596 return expr;
2599 /* Simplify the binary expression RHS, and return the result if
2600 simplified. */
2602 static tree
2603 simplify_binary_expression (gimple stmt)
2605 tree result = NULL_TREE;
2606 tree op0 = gimple_assign_rhs1 (stmt);
2607 tree op1 = gimple_assign_rhs2 (stmt);
2609 /* This will not catch every single case we could combine, but will
2610 catch those with constants. The goal here is to simultaneously
2611 combine constants between expressions, but avoid infinite
2612 expansion of expressions during simplification. */
2613 if (TREE_CODE (op0) == SSA_NAME)
2615 if (VN_INFO (op0)->has_constants
2616 || TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)) == tcc_comparison)
2617 op0 = valueize_expr (vn_get_expr_for (op0));
2618 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
2619 op0 = SSA_VAL (op0);
2622 if (TREE_CODE (op1) == SSA_NAME)
2624 if (VN_INFO (op1)->has_constants)
2625 op1 = valueize_expr (vn_get_expr_for (op1));
2626 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
2627 op1 = SSA_VAL (op1);
2630 /* Avoid folding if nothing changed. */
2631 if (op0 == gimple_assign_rhs1 (stmt)
2632 && op1 == gimple_assign_rhs2 (stmt))
2633 return NULL_TREE;
2635 fold_defer_overflow_warnings ();
2637 result = fold_binary (gimple_assign_rhs_code (stmt),
2638 gimple_expr_type (stmt), op0, op1);
2639 if (result)
2640 STRIP_USELESS_TYPE_CONVERSION (result);
2642 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
2643 stmt, 0);
2645 /* Make sure result is not a complex expression consisting
2646 of operators of operators (IE (a + b) + (a + c))
2647 Otherwise, we will end up with unbounded expressions if
2648 fold does anything at all. */
2649 if (result && valid_gimple_rhs_p (result))
2650 return result;
2652 return NULL_TREE;
2655 /* Simplify the unary expression RHS, and return the result if
2656 simplified. */
2658 static tree
2659 simplify_unary_expression (gimple stmt)
2661 tree result = NULL_TREE;
2662 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
2664 /* We handle some tcc_reference codes here that are all
2665 GIMPLE_ASSIGN_SINGLE codes. */
2666 if (gimple_assign_rhs_code (stmt) == REALPART_EXPR
2667 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2668 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2669 op0 = TREE_OPERAND (op0, 0);
2671 if (TREE_CODE (op0) != SSA_NAME)
2672 return NULL_TREE;
2674 orig_op0 = op0;
2675 if (VN_INFO (op0)->has_constants)
2676 op0 = valueize_expr (vn_get_expr_for (op0));
2677 else if (gimple_assign_cast_p (stmt)
2678 || gimple_assign_rhs_code (stmt) == REALPART_EXPR
2679 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2680 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2682 /* We want to do tree-combining on conversion-like expressions.
2683 Make sure we feed only SSA_NAMEs or constants to fold though. */
2684 tree tem = valueize_expr (vn_get_expr_for (op0));
2685 if (UNARY_CLASS_P (tem)
2686 || BINARY_CLASS_P (tem)
2687 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
2688 || TREE_CODE (tem) == SSA_NAME
2689 || is_gimple_min_invariant (tem))
2690 op0 = tem;
2693 /* Avoid folding if nothing changed, but remember the expression. */
2694 if (op0 == orig_op0)
2695 return NULL_TREE;
2697 result = fold_unary_ignore_overflow (gimple_assign_rhs_code (stmt),
2698 gimple_expr_type (stmt), op0);
2699 if (result)
2701 STRIP_USELESS_TYPE_CONVERSION (result);
2702 if (valid_gimple_rhs_p (result))
2703 return result;
2706 return NULL_TREE;
2709 /* Try to simplify RHS using equivalences and constant folding. */
2711 static tree
2712 try_to_simplify (gimple stmt)
2714 tree tem;
2716 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
2717 in this case, there is no point in doing extra work. */
2718 if (gimple_assign_copy_p (stmt)
2719 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2720 return NULL_TREE;
2722 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2724 case tcc_declaration:
2725 tem = get_symbol_constant_value (gimple_assign_rhs1 (stmt));
2726 if (tem)
2727 return tem;
2728 break;
2730 case tcc_reference:
2731 /* Do not do full-blown reference lookup here, but simplify
2732 reads from constant aggregates. */
2733 tem = fold_const_aggregate_ref (gimple_assign_rhs1 (stmt));
2734 if (tem)
2735 return tem;
2737 /* Fallthrough for some codes that can operate on registers. */
2738 if (!(TREE_CODE (gimple_assign_rhs1 (stmt)) == REALPART_EXPR
2739 || TREE_CODE (gimple_assign_rhs1 (stmt)) == IMAGPART_EXPR
2740 || TREE_CODE (gimple_assign_rhs1 (stmt)) == VIEW_CONVERT_EXPR))
2741 break;
2742 /* We could do a little more with unary ops, if they expand
2743 into binary ops, but it's debatable whether it is worth it. */
2744 case tcc_unary:
2745 return simplify_unary_expression (stmt);
2746 break;
2747 case tcc_comparison:
2748 case tcc_binary:
2749 return simplify_binary_expression (stmt);
2750 break;
2751 default:
2752 break;
2755 return NULL_TREE;
2758 /* Visit and value number USE, return true if the value number
2759 changed. */
2761 static bool
2762 visit_use (tree use)
2764 bool changed = false;
2765 gimple stmt = SSA_NAME_DEF_STMT (use);
2767 VN_INFO (use)->use_processed = true;
2769 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
2770 if (dump_file && (dump_flags & TDF_DETAILS)
2771 && !SSA_NAME_IS_DEFAULT_DEF (use))
2773 fprintf (dump_file, "Value numbering ");
2774 print_generic_expr (dump_file, use, 0);
2775 fprintf (dump_file, " stmt = ");
2776 print_gimple_stmt (dump_file, stmt, 0, 0);
2779 /* Handle uninitialized uses. */
2780 if (SSA_NAME_IS_DEFAULT_DEF (use))
2781 changed = set_ssa_val_to (use, use);
2782 else
2784 if (gimple_code (stmt) == GIMPLE_PHI)
2785 changed = visit_phi (stmt);
2786 else if (!gimple_has_lhs (stmt)
2787 || gimple_has_volatile_ops (stmt)
2788 || stmt_could_throw_p (stmt))
2789 changed = defs_to_varying (stmt);
2790 else if (is_gimple_assign (stmt))
2792 tree lhs = gimple_assign_lhs (stmt);
2793 tree simplified;
2795 /* Shortcut for copies. Simplifying copies is pointless,
2796 since we copy the expression and value they represent. */
2797 if (gimple_assign_copy_p (stmt)
2798 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2799 && TREE_CODE (lhs) == SSA_NAME)
2801 changed = visit_copy (lhs, gimple_assign_rhs1 (stmt));
2802 goto done;
2804 simplified = try_to_simplify (stmt);
2805 if (simplified)
2807 if (dump_file && (dump_flags & TDF_DETAILS))
2809 fprintf (dump_file, "RHS ");
2810 print_gimple_expr (dump_file, stmt, 0, 0);
2811 fprintf (dump_file, " simplified to ");
2812 print_generic_expr (dump_file, simplified, 0);
2813 if (TREE_CODE (lhs) == SSA_NAME)
2814 fprintf (dump_file, " has constants %d\n",
2815 expr_has_constants (simplified));
2816 else
2817 fprintf (dump_file, "\n");
2820 /* Setting value numbers to constants will occasionally
2821 screw up phi congruence because constants are not
2822 uniquely associated with a single ssa name that can be
2823 looked up. */
2824 if (simplified
2825 && is_gimple_min_invariant (simplified)
2826 && TREE_CODE (lhs) == SSA_NAME)
2828 VN_INFO (lhs)->expr = simplified;
2829 VN_INFO (lhs)->has_constants = true;
2830 changed = set_ssa_val_to (lhs, simplified);
2831 goto done;
2833 else if (simplified
2834 && TREE_CODE (simplified) == SSA_NAME
2835 && TREE_CODE (lhs) == SSA_NAME)
2837 changed = visit_copy (lhs, simplified);
2838 goto done;
2840 else if (simplified)
2842 if (TREE_CODE (lhs) == SSA_NAME)
2844 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
2845 /* We have to unshare the expression or else
2846 valuizing may change the IL stream. */
2847 VN_INFO (lhs)->expr = unshare_expr (simplified);
2850 else if (stmt_has_constants (stmt)
2851 && TREE_CODE (lhs) == SSA_NAME)
2852 VN_INFO (lhs)->has_constants = true;
2853 else if (TREE_CODE (lhs) == SSA_NAME)
2855 /* We reset expr and constantness here because we may
2856 have been value numbering optimistically, and
2857 iterating. They may become non-constant in this case,
2858 even if they were optimistically constant. */
2860 VN_INFO (lhs)->has_constants = false;
2861 VN_INFO (lhs)->expr = NULL_TREE;
2864 if ((TREE_CODE (lhs) == SSA_NAME
2865 /* We can substitute SSA_NAMEs that are live over
2866 abnormal edges with their constant value. */
2867 && !(gimple_assign_copy_p (stmt)
2868 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2869 && !(simplified
2870 && is_gimple_min_invariant (simplified))
2871 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2872 /* Stores or copies from SSA_NAMEs that are live over
2873 abnormal edges are a problem. */
2874 || (gimple_assign_single_p (stmt)
2875 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2876 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt))))
2877 changed = defs_to_varying (stmt);
2878 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
2880 changed = visit_reference_op_store (lhs, gimple_assign_rhs1 (stmt), stmt);
2882 else if (TREE_CODE (lhs) == SSA_NAME)
2884 if ((gimple_assign_copy_p (stmt)
2885 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2886 || (simplified
2887 && is_gimple_min_invariant (simplified)))
2889 VN_INFO (lhs)->has_constants = true;
2890 if (simplified)
2891 changed = set_ssa_val_to (lhs, simplified);
2892 else
2893 changed = set_ssa_val_to (lhs, gimple_assign_rhs1 (stmt));
2895 else
2897 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2899 case GIMPLE_UNARY_RHS:
2900 case GIMPLE_BINARY_RHS:
2901 case GIMPLE_TERNARY_RHS:
2902 changed = visit_nary_op (lhs, stmt);
2903 break;
2904 case GIMPLE_SINGLE_RHS:
2905 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2907 case tcc_reference:
2908 /* VOP-less references can go through unary case. */
2909 if ((gimple_assign_rhs_code (stmt) == REALPART_EXPR
2910 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2911 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2912 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (stmt), 0)) == SSA_NAME)
2914 changed = visit_nary_op (lhs, stmt);
2915 break;
2917 /* Fallthrough. */
2918 case tcc_declaration:
2919 changed = visit_reference_op_load
2920 (lhs, gimple_assign_rhs1 (stmt), stmt);
2921 break;
2922 case tcc_expression:
2923 if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
2925 changed = visit_nary_op (lhs, stmt);
2926 break;
2928 /* Fallthrough. */
2929 default:
2930 changed = defs_to_varying (stmt);
2932 break;
2933 default:
2934 changed = defs_to_varying (stmt);
2935 break;
2939 else
2940 changed = defs_to_varying (stmt);
2942 else if (is_gimple_call (stmt))
2944 tree lhs = gimple_call_lhs (stmt);
2946 /* ??? We could try to simplify calls. */
2948 if (stmt_has_constants (stmt)
2949 && TREE_CODE (lhs) == SSA_NAME)
2950 VN_INFO (lhs)->has_constants = true;
2951 else if (TREE_CODE (lhs) == SSA_NAME)
2953 /* We reset expr and constantness here because we may
2954 have been value numbering optimistically, and
2955 iterating. They may become non-constant in this case,
2956 even if they were optimistically constant. */
2957 VN_INFO (lhs)->has_constants = false;
2958 VN_INFO (lhs)->expr = NULL_TREE;
2961 if (TREE_CODE (lhs) == SSA_NAME
2962 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2963 changed = defs_to_varying (stmt);
2964 /* ??? We should handle stores from calls. */
2965 else if (TREE_CODE (lhs) == SSA_NAME)
2967 if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
2968 changed = visit_reference_op_call (lhs, stmt);
2969 else
2970 changed = defs_to_varying (stmt);
2972 else
2973 changed = defs_to_varying (stmt);
2976 done:
2977 return changed;
2980 /* Compare two operands by reverse postorder index */
2982 static int
2983 compare_ops (const void *pa, const void *pb)
2985 const tree opa = *((const tree *)pa);
2986 const tree opb = *((const tree *)pb);
2987 gimple opstmta = SSA_NAME_DEF_STMT (opa);
2988 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
2989 basic_block bba;
2990 basic_block bbb;
2992 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
2993 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2994 else if (gimple_nop_p (opstmta))
2995 return -1;
2996 else if (gimple_nop_p (opstmtb))
2997 return 1;
2999 bba = gimple_bb (opstmta);
3000 bbb = gimple_bb (opstmtb);
3002 if (!bba && !bbb)
3003 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3004 else if (!bba)
3005 return -1;
3006 else if (!bbb)
3007 return 1;
3009 if (bba == bbb)
3011 if (gimple_code (opstmta) == GIMPLE_PHI
3012 && gimple_code (opstmtb) == GIMPLE_PHI)
3013 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3014 else if (gimple_code (opstmta) == GIMPLE_PHI)
3015 return -1;
3016 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3017 return 1;
3018 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3019 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3020 else
3021 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3023 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3026 /* Sort an array containing members of a strongly connected component
3027 SCC so that the members are ordered by RPO number.
3028 This means that when the sort is complete, iterating through the
3029 array will give you the members in RPO order. */
3031 static void
3032 sort_scc (VEC (tree, heap) *scc)
3034 VEC_qsort (tree, scc, compare_ops);
3037 /* Insert the no longer used nary ONARY to the hash INFO. */
3039 static void
3040 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3042 size_t size = sizeof_vn_nary_op (onary->length);
3043 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3044 &info->nary_obstack);
3045 memcpy (nary, onary, size);
3046 vn_nary_op_insert_into (nary, info->nary, false);
3049 /* Insert the no longer used phi OPHI to the hash INFO. */
3051 static void
3052 copy_phi (vn_phi_t ophi, vn_tables_t info)
3054 vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
3055 void **slot;
3056 memcpy (phi, ophi, sizeof (*phi));
3057 ophi->phiargs = NULL;
3058 slot = htab_find_slot_with_hash (info->phis, phi, phi->hashcode, INSERT);
3059 gcc_assert (!*slot);
3060 *slot = phi;
3063 /* Insert the no longer used reference OREF to the hash INFO. */
3065 static void
3066 copy_reference (vn_reference_t oref, vn_tables_t info)
3068 vn_reference_t ref;
3069 void **slot;
3070 ref = (vn_reference_t) pool_alloc (info->references_pool);
3071 memcpy (ref, oref, sizeof (*ref));
3072 oref->operands = NULL;
3073 slot = htab_find_slot_with_hash (info->references, ref, ref->hashcode,
3074 INSERT);
3075 if (*slot)
3076 free_reference (*slot);
3077 *slot = ref;
3080 /* Process a strongly connected component in the SSA graph. */
3082 static void
3083 process_scc (VEC (tree, heap) *scc)
3085 tree var;
3086 unsigned int i;
3087 unsigned int iterations = 0;
3088 bool changed = true;
3089 htab_iterator hi;
3090 vn_nary_op_t nary;
3091 vn_phi_t phi;
3092 vn_reference_t ref;
3094 /* If the SCC has a single member, just visit it. */
3095 if (VEC_length (tree, scc) == 1)
3097 tree use = VEC_index (tree, scc, 0);
3098 if (VN_INFO (use)->use_processed)
3099 return;
3100 /* We need to make sure it doesn't form a cycle itself, which can
3101 happen for self-referential PHI nodes. In that case we would
3102 end up inserting an expression with VN_TOP operands into the
3103 valid table which makes us derive bogus equivalences later.
3104 The cheapest way to check this is to assume it for all PHI nodes. */
3105 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3106 /* Fallthru to iteration. */ ;
3107 else
3109 visit_use (use);
3110 return;
3114 /* Iterate over the SCC with the optimistic table until it stops
3115 changing. */
3116 current_info = optimistic_info;
3117 while (changed)
3119 changed = false;
3120 iterations++;
3121 /* As we are value-numbering optimistically we have to
3122 clear the expression tables and the simplified expressions
3123 in each iteration until we converge. */
3124 htab_empty (optimistic_info->nary);
3125 htab_empty (optimistic_info->phis);
3126 htab_empty (optimistic_info->references);
3127 obstack_free (&optimistic_info->nary_obstack, NULL);
3128 gcc_obstack_init (&optimistic_info->nary_obstack);
3129 empty_alloc_pool (optimistic_info->phis_pool);
3130 empty_alloc_pool (optimistic_info->references_pool);
3131 FOR_EACH_VEC_ELT (tree, scc, i, var)
3132 VN_INFO (var)->expr = NULL_TREE;
3133 FOR_EACH_VEC_ELT (tree, scc, i, var)
3134 changed |= visit_use (var);
3137 statistics_histogram_event (cfun, "SCC iterations", iterations);
3139 /* Finally, copy the contents of the no longer used optimistic
3140 table to the valid table. */
3141 FOR_EACH_HTAB_ELEMENT (optimistic_info->nary, nary, vn_nary_op_t, hi)
3142 copy_nary (nary, valid_info);
3143 FOR_EACH_HTAB_ELEMENT (optimistic_info->phis, phi, vn_phi_t, hi)
3144 copy_phi (phi, valid_info);
3145 FOR_EACH_HTAB_ELEMENT (optimistic_info->references, ref, vn_reference_t, hi)
3146 copy_reference (ref, valid_info);
3148 current_info = valid_info;
3151 DEF_VEC_O(ssa_op_iter);
3152 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
3154 /* Pop the components of the found SCC for NAME off the SCC stack
3155 and process them. Returns true if all went well, false if
3156 we run into resource limits. */
3158 static bool
3159 extract_and_process_scc_for_name (tree name)
3161 VEC (tree, heap) *scc = NULL;
3162 tree x;
3164 /* Found an SCC, pop the components off the SCC stack and
3165 process them. */
3168 x = VEC_pop (tree, sccstack);
3170 VN_INFO (x)->on_sccstack = false;
3171 VEC_safe_push (tree, heap, scc, x);
3172 } while (x != name);
3174 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
3175 if (VEC_length (tree, scc)
3176 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
3178 if (dump_file)
3179 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
3180 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
3181 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
3182 return false;
3185 if (VEC_length (tree, scc) > 1)
3186 sort_scc (scc);
3188 if (dump_file && (dump_flags & TDF_DETAILS))
3189 print_scc (dump_file, scc);
3191 process_scc (scc);
3193 VEC_free (tree, heap, scc);
3195 return true;
3198 /* Depth first search on NAME to discover and process SCC's in the SSA
3199 graph.
3200 Execution of this algorithm relies on the fact that the SCC's are
3201 popped off the stack in topological order.
3202 Returns true if successful, false if we stopped processing SCC's due
3203 to resource constraints. */
3205 static bool
3206 DFS (tree name)
3208 VEC(ssa_op_iter, heap) *itervec = NULL;
3209 VEC(tree, heap) *namevec = NULL;
3210 use_operand_p usep = NULL;
3211 gimple defstmt;
3212 tree use;
3213 ssa_op_iter iter;
3215 start_over:
3216 /* SCC info */
3217 VN_INFO (name)->dfsnum = next_dfs_num++;
3218 VN_INFO (name)->visited = true;
3219 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
3221 VEC_safe_push (tree, heap, sccstack, name);
3222 VN_INFO (name)->on_sccstack = true;
3223 defstmt = SSA_NAME_DEF_STMT (name);
3225 /* Recursively DFS on our operands, looking for SCC's. */
3226 if (!gimple_nop_p (defstmt))
3228 /* Push a new iterator. */
3229 if (gimple_code (defstmt) == GIMPLE_PHI)
3230 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
3231 else
3232 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
3234 else
3235 clear_and_done_ssa_iter (&iter);
3237 while (1)
3239 /* If we are done processing uses of a name, go up the stack
3240 of iterators and process SCCs as we found them. */
3241 if (op_iter_done (&iter))
3243 /* See if we found an SCC. */
3244 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
3245 if (!extract_and_process_scc_for_name (name))
3247 VEC_free (tree, heap, namevec);
3248 VEC_free (ssa_op_iter, heap, itervec);
3249 return false;
3252 /* Check if we are done. */
3253 if (VEC_empty (tree, namevec))
3255 VEC_free (tree, heap, namevec);
3256 VEC_free (ssa_op_iter, heap, itervec);
3257 return true;
3260 /* Restore the last use walker and continue walking there. */
3261 use = name;
3262 name = VEC_pop (tree, namevec);
3263 memcpy (&iter, VEC_last (ssa_op_iter, itervec),
3264 sizeof (ssa_op_iter));
3265 VEC_pop (ssa_op_iter, itervec);
3266 goto continue_walking;
3269 use = USE_FROM_PTR (usep);
3271 /* Since we handle phi nodes, we will sometimes get
3272 invariants in the use expression. */
3273 if (TREE_CODE (use) == SSA_NAME)
3275 if (! (VN_INFO (use)->visited))
3277 /* Recurse by pushing the current use walking state on
3278 the stack and starting over. */
3279 VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
3280 VEC_safe_push(tree, heap, namevec, name);
3281 name = use;
3282 goto start_over;
3284 continue_walking:
3285 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3286 VN_INFO (use)->low);
3288 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3289 && VN_INFO (use)->on_sccstack)
3291 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3292 VN_INFO (name)->low);
3296 usep = op_iter_next_use (&iter);
3300 /* Allocate a value number table. */
3302 static void
3303 allocate_vn_table (vn_tables_t table)
3305 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
3306 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
3307 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
3308 free_reference);
3310 gcc_obstack_init (&table->nary_obstack);
3311 table->phis_pool = create_alloc_pool ("VN phis",
3312 sizeof (struct vn_phi_s),
3313 30);
3314 table->references_pool = create_alloc_pool ("VN references",
3315 sizeof (struct vn_reference_s),
3316 30);
3319 /* Free a value number table. */
3321 static void
3322 free_vn_table (vn_tables_t table)
3324 htab_delete (table->phis);
3325 htab_delete (table->nary);
3326 htab_delete (table->references);
3327 obstack_free (&table->nary_obstack, NULL);
3328 free_alloc_pool (table->phis_pool);
3329 free_alloc_pool (table->references_pool);
3332 static void
3333 init_scc_vn (void)
3335 size_t i;
3336 int j;
3337 int *rpo_numbers_temp;
3339 calculate_dominance_info (CDI_DOMINATORS);
3340 sccstack = NULL;
3341 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
3342 free);
3344 constant_value_ids = BITMAP_ALLOC (NULL);
3346 next_dfs_num = 1;
3347 next_value_id = 1;
3349 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
3350 /* VEC_alloc doesn't actually grow it to the right size, it just
3351 preallocates the space to do so. */
3352 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
3353 gcc_obstack_init (&vn_ssa_aux_obstack);
3355 shared_lookup_phiargs = NULL;
3356 shared_lookup_references = NULL;
3357 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3358 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3359 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
3361 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3362 the i'th block in RPO order is bb. We want to map bb's to RPO
3363 numbers, so we need to rearrange this array. */
3364 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
3365 rpo_numbers[rpo_numbers_temp[j]] = j;
3367 XDELETE (rpo_numbers_temp);
3369 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
3371 /* Create the VN_INFO structures, and initialize value numbers to
3372 TOP. */
3373 for (i = 0; i < num_ssa_names; i++)
3375 tree name = ssa_name (i);
3376 if (name)
3378 VN_INFO_GET (name)->valnum = VN_TOP;
3379 VN_INFO (name)->expr = NULL_TREE;
3380 VN_INFO (name)->value_id = 0;
3384 renumber_gimple_stmt_uids ();
3386 /* Create the valid and optimistic value numbering tables. */
3387 valid_info = XCNEW (struct vn_tables_s);
3388 allocate_vn_table (valid_info);
3389 optimistic_info = XCNEW (struct vn_tables_s);
3390 allocate_vn_table (optimistic_info);
3393 void
3394 free_scc_vn (void)
3396 size_t i;
3398 htab_delete (constant_to_value_id);
3399 BITMAP_FREE (constant_value_ids);
3400 VEC_free (tree, heap, shared_lookup_phiargs);
3401 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
3402 XDELETEVEC (rpo_numbers);
3404 for (i = 0; i < num_ssa_names; i++)
3406 tree name = ssa_name (i);
3407 if (name
3408 && VN_INFO (name)->needs_insertion)
3409 release_ssa_name (name);
3411 obstack_free (&vn_ssa_aux_obstack, NULL);
3412 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
3414 VEC_free (tree, heap, sccstack);
3415 free_vn_table (valid_info);
3416 XDELETE (valid_info);
3417 free_vn_table (optimistic_info);
3418 XDELETE (optimistic_info);
3421 /* Set *ID if we computed something useful in RESULT. */
3423 static void
3424 set_value_id_for_result (tree result, unsigned int *id)
3426 if (result)
3428 if (TREE_CODE (result) == SSA_NAME)
3429 *id = VN_INFO (result)->value_id;
3430 else if (is_gimple_min_invariant (result))
3431 *id = get_or_alloc_constant_value_id (result);
3435 /* Set the value ids in the valid hash tables. */
3437 static void
3438 set_hashtable_value_ids (void)
3440 htab_iterator hi;
3441 vn_nary_op_t vno;
3442 vn_reference_t vr;
3443 vn_phi_t vp;
3445 /* Now set the value ids of the things we had put in the hash
3446 table. */
3448 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
3449 vno, vn_nary_op_t, hi)
3450 set_value_id_for_result (vno->result, &vno->value_id);
3452 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
3453 vp, vn_phi_t, hi)
3454 set_value_id_for_result (vp->result, &vp->value_id);
3456 FOR_EACH_HTAB_ELEMENT (valid_info->references,
3457 vr, vn_reference_t, hi)
3458 set_value_id_for_result (vr->result, &vr->value_id);
3461 /* Do SCCVN. Returns true if it finished, false if we bailed out
3462 due to resource constraints. */
3464 bool
3465 run_scc_vn (void)
3467 size_t i;
3468 tree param;
3469 bool changed = true;
3471 init_scc_vn ();
3472 current_info = valid_info;
3474 for (param = DECL_ARGUMENTS (current_function_decl);
3475 param;
3476 param = DECL_CHAIN (param))
3478 if (gimple_default_def (cfun, param) != NULL)
3480 tree def = gimple_default_def (cfun, param);
3481 VN_INFO (def)->valnum = def;
3485 for (i = 1; i < num_ssa_names; ++i)
3487 tree name = ssa_name (i);
3488 if (name
3489 && VN_INFO (name)->visited == false
3490 && !has_zero_uses (name))
3491 if (!DFS (name))
3493 free_scc_vn ();
3494 return false;
3498 /* Initialize the value ids. */
3500 for (i = 1; i < num_ssa_names; ++i)
3502 tree name = ssa_name (i);
3503 vn_ssa_aux_t info;
3504 if (!name)
3505 continue;
3506 info = VN_INFO (name);
3507 if (info->valnum == name
3508 || info->valnum == VN_TOP)
3509 info->value_id = get_next_value_id ();
3510 else if (is_gimple_min_invariant (info->valnum))
3511 info->value_id = get_or_alloc_constant_value_id (info->valnum);
3514 /* Propagate until they stop changing. */
3515 while (changed)
3517 changed = false;
3518 for (i = 1; i < num_ssa_names; ++i)
3520 tree name = ssa_name (i);
3521 vn_ssa_aux_t info;
3522 if (!name)
3523 continue;
3524 info = VN_INFO (name);
3525 if (TREE_CODE (info->valnum) == SSA_NAME
3526 && info->valnum != name
3527 && info->value_id != VN_INFO (info->valnum)->value_id)
3529 changed = true;
3530 info->value_id = VN_INFO (info->valnum)->value_id;
3535 set_hashtable_value_ids ();
3537 if (dump_file && (dump_flags & TDF_DETAILS))
3539 fprintf (dump_file, "Value numbers:\n");
3540 for (i = 0; i < num_ssa_names; i++)
3542 tree name = ssa_name (i);
3543 if (name
3544 && VN_INFO (name)->visited
3545 && SSA_VAL (name) != name)
3547 print_generic_expr (dump_file, name, 0);
3548 fprintf (dump_file, " = ");
3549 print_generic_expr (dump_file, SSA_VAL (name), 0);
3550 fprintf (dump_file, "\n");
3555 return true;
3558 /* Return the maximum value id we have ever seen. */
3560 unsigned int
3561 get_max_value_id (void)
3563 return next_value_id;
3566 /* Return the next unique value id. */
3568 unsigned int
3569 get_next_value_id (void)
3571 return next_value_id++;
3575 /* Compare two expressions E1 and E2 and return true if they are equal. */
3577 bool
3578 expressions_equal_p (tree e1, tree e2)
3580 /* The obvious case. */
3581 if (e1 == e2)
3582 return true;
3584 /* If only one of them is null, they cannot be equal. */
3585 if (!e1 || !e2)
3586 return false;
3588 /* Now perform the actual comparison. */
3589 if (TREE_CODE (e1) == TREE_CODE (e2)
3590 && operand_equal_p (e1, e2, OEP_PURE_SAME))
3591 return true;
3593 return false;
3597 /* Return true if the nary operation NARY may trap. This is a copy
3598 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
3600 bool
3601 vn_nary_may_trap (vn_nary_op_t nary)
3603 tree type;
3604 tree rhs2 = NULL_TREE;
3605 bool honor_nans = false;
3606 bool honor_snans = false;
3607 bool fp_operation = false;
3608 bool honor_trapv = false;
3609 bool handled, ret;
3610 unsigned i;
3612 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
3613 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
3614 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
3616 type = nary->type;
3617 fp_operation = FLOAT_TYPE_P (type);
3618 if (fp_operation)
3620 honor_nans = flag_trapping_math && !flag_finite_math_only;
3621 honor_snans = flag_signaling_nans != 0;
3623 else if (INTEGRAL_TYPE_P (type)
3624 && TYPE_OVERFLOW_TRAPS (type))
3625 honor_trapv = true;
3627 if (nary->length >= 2)
3628 rhs2 = nary->op[1];
3629 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
3630 honor_trapv,
3631 honor_nans, honor_snans, rhs2,
3632 &handled);
3633 if (handled
3634 && ret)
3635 return true;
3637 for (i = 0; i < nary->length; ++i)
3638 if (tree_could_trap_p (nary->op[i]))
3639 return true;
3641 return false;