* common.opt (optimize_fast): New Variable.
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blob9222cb5f583f9c2847f6a51897e941fb3a006525
1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "basic-block.h"
28 #include "tree-pretty-print.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "gimple.h"
33 #include "tree-dump.h"
34 #include "timevar.h"
35 #include "fibheap.h"
36 #include "hashtab.h"
37 #include "tree-iterator.h"
38 #include "alloc-pool.h"
39 #include "tree-pass.h"
40 #include "flags.h"
41 #include "bitmap.h"
42 #include "langhooks.h"
43 #include "cfgloop.h"
44 #include "params.h"
45 #include "tree-ssa-propagate.h"
46 #include "tree-ssa-sccvn.h"
48 /* This algorithm is based on the SCC algorithm presented by Keith
49 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
50 (http://citeseer.ist.psu.edu/41805.html). In
51 straight line code, it is equivalent to a regular hash based value
52 numbering that is performed in reverse postorder.
54 For code with cycles, there are two alternatives, both of which
55 require keeping the hashtables separate from the actual list of
56 value numbers for SSA names.
58 1. Iterate value numbering in an RPO walk of the blocks, removing
59 all the entries from the hashtable after each iteration (but
60 keeping the SSA name->value number mapping between iterations).
61 Iterate until it does not change.
63 2. Perform value numbering as part of an SCC walk on the SSA graph,
64 iterating only the cycles in the SSA graph until they do not change
65 (using a separate, optimistic hashtable for value numbering the SCC
66 operands).
68 The second is not just faster in practice (because most SSA graph
69 cycles do not involve all the variables in the graph), it also has
70 some nice properties.
72 One of these nice properties is that when we pop an SCC off the
73 stack, we are guaranteed to have processed all the operands coming from
74 *outside of that SCC*, so we do not need to do anything special to
75 ensure they have value numbers.
77 Another nice property is that the SCC walk is done as part of a DFS
78 of the SSA graph, which makes it easy to perform combining and
79 simplifying operations at the same time.
81 The code below is deliberately written in a way that makes it easy
82 to separate the SCC walk from the other work it does.
84 In order to propagate constants through the code, we track which
85 expressions contain constants, and use those while folding. In
86 theory, we could also track expressions whose value numbers are
87 replaced, in case we end up folding based on expression
88 identities.
90 In order to value number memory, we assign value numbers to vuses.
91 This enables us to note that, for example, stores to the same
92 address of the same value from the same starting memory states are
93 equivalent.
94 TODO:
96 1. We can iterate only the changing portions of the SCC's, but
97 I have not seen an SCC big enough for this to be a win.
98 2. If you differentiate between phi nodes for loops and phi nodes
99 for if-then-else, you can properly consider phi nodes in different
100 blocks for equivalence.
101 3. We could value number vuses in more cases, particularly, whole
102 structure copies.
105 /* The set of hashtables and alloc_pool's for their items. */
107 typedef struct vn_tables_s
109 htab_t nary;
110 htab_t phis;
111 htab_t references;
112 struct obstack nary_obstack;
113 alloc_pool phis_pool;
114 alloc_pool references_pool;
115 } *vn_tables_t;
117 static htab_t constant_to_value_id;
118 static bitmap constant_value_ids;
121 /* Valid hashtables storing information we have proven to be
122 correct. */
124 static vn_tables_t valid_info;
126 /* Optimistic hashtables storing information we are making assumptions about
127 during iterations. */
129 static vn_tables_t optimistic_info;
131 /* Pointer to the set of hashtables that is currently being used.
132 Should always point to either the optimistic_info, or the
133 valid_info. */
135 static vn_tables_t current_info;
138 /* Reverse post order index for each basic block. */
140 static int *rpo_numbers;
142 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
144 /* This represents the top of the VN lattice, which is the universal
145 value. */
147 tree VN_TOP;
149 /* Unique counter for our value ids. */
151 static unsigned int next_value_id;
153 /* Next DFS number and the stack for strongly connected component
154 detection. */
156 static unsigned int next_dfs_num;
157 static VEC (tree, heap) *sccstack;
160 DEF_VEC_P(vn_ssa_aux_t);
161 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
163 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
164 are allocated on an obstack for locality reasons, and to free them
165 without looping over the VEC. */
167 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
168 static struct obstack vn_ssa_aux_obstack;
170 /* Return the value numbering information for a given SSA name. */
172 vn_ssa_aux_t
173 VN_INFO (tree name)
175 vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
176 SSA_NAME_VERSION (name));
177 gcc_checking_assert (res);
178 return res;
181 /* Set the value numbering info for a given SSA name to a given
182 value. */
184 static inline void
185 VN_INFO_SET (tree name, vn_ssa_aux_t value)
187 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
188 SSA_NAME_VERSION (name), value);
191 /* Initialize the value numbering info for a given SSA name.
192 This should be called just once for every SSA name. */
194 vn_ssa_aux_t
195 VN_INFO_GET (tree name)
197 vn_ssa_aux_t newinfo;
199 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
200 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
201 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
202 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
203 SSA_NAME_VERSION (name) + 1);
204 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
205 SSA_NAME_VERSION (name), newinfo);
206 return newinfo;
210 /* Get the representative expression for the SSA_NAME NAME. Returns
211 the representative SSA_NAME if there is no expression associated with it. */
213 tree
214 vn_get_expr_for (tree name)
216 vn_ssa_aux_t vn = VN_INFO (name);
217 gimple def_stmt;
218 tree expr = NULL_TREE;
220 if (vn->valnum == VN_TOP)
221 return name;
223 /* If the value-number is a constant it is the representative
224 expression. */
225 if (TREE_CODE (vn->valnum) != SSA_NAME)
226 return vn->valnum;
228 /* Get to the information of the value of this SSA_NAME. */
229 vn = VN_INFO (vn->valnum);
231 /* If the value-number is a constant it is the representative
232 expression. */
233 if (TREE_CODE (vn->valnum) != SSA_NAME)
234 return vn->valnum;
236 /* Else if we have an expression, return it. */
237 if (vn->expr != NULL_TREE)
238 return vn->expr;
240 /* Otherwise use the defining statement to build the expression. */
241 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
243 /* If the value number is a default-definition or a PHI result
244 use it directly. */
245 if (gimple_nop_p (def_stmt)
246 || gimple_code (def_stmt) == GIMPLE_PHI)
247 return vn->valnum;
249 if (!is_gimple_assign (def_stmt))
250 return vn->valnum;
252 /* FIXME tuples. This is incomplete and likely will miss some
253 simplifications. */
254 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)))
256 case tcc_reference:
257 if ((gimple_assign_rhs_code (def_stmt) == VIEW_CONVERT_EXPR
258 || gimple_assign_rhs_code (def_stmt) == REALPART_EXPR
259 || gimple_assign_rhs_code (def_stmt) == IMAGPART_EXPR)
260 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
261 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
262 gimple_expr_type (def_stmt),
263 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
264 break;
266 case tcc_unary:
267 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
268 gimple_expr_type (def_stmt),
269 gimple_assign_rhs1 (def_stmt));
270 break;
272 case tcc_binary:
273 expr = fold_build2 (gimple_assign_rhs_code (def_stmt),
274 gimple_expr_type (def_stmt),
275 gimple_assign_rhs1 (def_stmt),
276 gimple_assign_rhs2 (def_stmt));
277 break;
279 default:;
281 if (expr == NULL_TREE)
282 return vn->valnum;
284 /* Cache the expression. */
285 vn->expr = expr;
287 return expr;
291 /* Free a phi operation structure VP. */
293 static void
294 free_phi (void *vp)
296 vn_phi_t phi = (vn_phi_t) vp;
297 VEC_free (tree, heap, phi->phiargs);
300 /* Free a reference operation structure VP. */
302 static void
303 free_reference (void *vp)
305 vn_reference_t vr = (vn_reference_t) vp;
306 VEC_free (vn_reference_op_s, heap, vr->operands);
309 /* Hash table equality function for vn_constant_t. */
311 static int
312 vn_constant_eq (const void *p1, const void *p2)
314 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
315 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
317 if (vc1->hashcode != vc2->hashcode)
318 return false;
320 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
323 /* Hash table hash function for vn_constant_t. */
325 static hashval_t
326 vn_constant_hash (const void *p1)
328 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
329 return vc1->hashcode;
332 /* Lookup a value id for CONSTANT and return it. If it does not
333 exist returns 0. */
335 unsigned int
336 get_constant_value_id (tree constant)
338 void **slot;
339 struct vn_constant_s vc;
341 vc.hashcode = vn_hash_constant_with_type (constant);
342 vc.constant = constant;
343 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
344 vc.hashcode, NO_INSERT);
345 if (slot)
346 return ((vn_constant_t)*slot)->value_id;
347 return 0;
350 /* Lookup a value id for CONSTANT, and if it does not exist, create a
351 new one and return it. If it does exist, return it. */
353 unsigned int
354 get_or_alloc_constant_value_id (tree constant)
356 void **slot;
357 struct vn_constant_s vc;
358 vn_constant_t vcp;
360 vc.hashcode = vn_hash_constant_with_type (constant);
361 vc.constant = constant;
362 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
363 vc.hashcode, INSERT);
364 if (*slot)
365 return ((vn_constant_t)*slot)->value_id;
367 vcp = XNEW (struct vn_constant_s);
368 vcp->hashcode = vc.hashcode;
369 vcp->constant = constant;
370 vcp->value_id = get_next_value_id ();
371 *slot = (void *) vcp;
372 bitmap_set_bit (constant_value_ids, vcp->value_id);
373 return vcp->value_id;
376 /* Return true if V is a value id for a constant. */
378 bool
379 value_id_constant_p (unsigned int v)
381 return bitmap_bit_p (constant_value_ids, v);
384 /* Compare two reference operands P1 and P2 for equality. Return true if
385 they are equal, and false otherwise. */
387 static int
388 vn_reference_op_eq (const void *p1, const void *p2)
390 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
391 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
393 return vro1->opcode == vro2->opcode
394 && types_compatible_p (vro1->type, vro2->type)
395 && expressions_equal_p (vro1->op0, vro2->op0)
396 && expressions_equal_p (vro1->op1, vro2->op1)
397 && expressions_equal_p (vro1->op2, vro2->op2);
400 /* Compute the hash for a reference operand VRO1. */
402 static hashval_t
403 vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result)
405 result = iterative_hash_hashval_t (vro1->opcode, result);
406 if (vro1->op0)
407 result = iterative_hash_expr (vro1->op0, result);
408 if (vro1->op1)
409 result = iterative_hash_expr (vro1->op1, result);
410 if (vro1->op2)
411 result = iterative_hash_expr (vro1->op2, result);
412 return result;
415 /* Return the hashcode for a given reference operation P1. */
417 static hashval_t
418 vn_reference_hash (const void *p1)
420 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
421 return vr1->hashcode;
424 /* Compute a hash for the reference operation VR1 and return it. */
426 hashval_t
427 vn_reference_compute_hash (const vn_reference_t vr1)
429 hashval_t result = 0;
430 int i;
431 vn_reference_op_t vro;
432 HOST_WIDE_INT off = -1;
433 bool deref = false;
435 FOR_EACH_VEC_ELT (vn_reference_op_s, vr1->operands, i, vro)
437 if (vro->opcode == MEM_REF)
438 deref = true;
439 else if (vro->opcode != ADDR_EXPR)
440 deref = false;
441 if (vro->off != -1)
443 if (off == -1)
444 off = 0;
445 off += vro->off;
447 else
449 if (off != -1
450 && off != 0)
451 result = iterative_hash_hashval_t (off, result);
452 off = -1;
453 if (deref
454 && vro->opcode == ADDR_EXPR)
456 if (vro->op0)
458 tree op = TREE_OPERAND (vro->op0, 0);
459 result = iterative_hash_hashval_t (TREE_CODE (op), result);
460 result = iterative_hash_expr (op, result);
463 else
464 result = vn_reference_op_compute_hash (vro, result);
467 if (vr1->vuse)
468 result += SSA_NAME_VERSION (vr1->vuse);
470 return result;
473 /* Return true if reference operations P1 and P2 are equivalent. This
474 means they have the same set of operands and vuses. */
477 vn_reference_eq (const void *p1, const void *p2)
479 unsigned i, j;
481 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
482 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
483 if (vr1->hashcode != vr2->hashcode)
484 return false;
486 /* Early out if this is not a hash collision. */
487 if (vr1->hashcode != vr2->hashcode)
488 return false;
490 /* The VOP needs to be the same. */
491 if (vr1->vuse != vr2->vuse)
492 return false;
494 /* If the operands are the same we are done. */
495 if (vr1->operands == vr2->operands)
496 return true;
498 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
499 return false;
501 if (INTEGRAL_TYPE_P (vr1->type)
502 && INTEGRAL_TYPE_P (vr2->type))
504 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
505 return false;
507 else if (INTEGRAL_TYPE_P (vr1->type)
508 && (TYPE_PRECISION (vr1->type)
509 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
510 return false;
511 else if (INTEGRAL_TYPE_P (vr2->type)
512 && (TYPE_PRECISION (vr2->type)
513 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
514 return false;
516 i = 0;
517 j = 0;
520 HOST_WIDE_INT off1 = 0, off2 = 0;
521 vn_reference_op_t vro1, vro2;
522 vn_reference_op_s tem1, tem2;
523 bool deref1 = false, deref2 = false;
524 for (; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro1); i++)
526 if (vro1->opcode == MEM_REF)
527 deref1 = true;
528 if (vro1->off == -1)
529 break;
530 off1 += vro1->off;
532 for (; VEC_iterate (vn_reference_op_s, vr2->operands, j, vro2); j++)
534 if (vro2->opcode == MEM_REF)
535 deref2 = true;
536 if (vro2->off == -1)
537 break;
538 off2 += vro2->off;
540 if (off1 != off2)
541 return false;
542 if (deref1 && vro1->opcode == ADDR_EXPR)
544 memset (&tem1, 0, sizeof (tem1));
545 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
546 tem1.type = TREE_TYPE (tem1.op0);
547 tem1.opcode = TREE_CODE (tem1.op0);
548 vro1 = &tem1;
550 if (deref2 && vro2->opcode == ADDR_EXPR)
552 memset (&tem2, 0, sizeof (tem2));
553 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
554 tem2.type = TREE_TYPE (tem2.op0);
555 tem2.opcode = TREE_CODE (tem2.op0);
556 vro2 = &tem2;
558 if (!vn_reference_op_eq (vro1, vro2))
559 return false;
560 ++j;
561 ++i;
563 while (VEC_length (vn_reference_op_s, vr1->operands) != i
564 || VEC_length (vn_reference_op_s, vr2->operands) != j);
566 return true;
569 /* Copy the operations present in load/store REF into RESULT, a vector of
570 vn_reference_op_s's. */
572 void
573 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
575 if (TREE_CODE (ref) == TARGET_MEM_REF)
577 vn_reference_op_s temp;
579 memset (&temp, 0, sizeof (temp));
580 /* We do not care for spurious type qualifications. */
581 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
582 temp.opcode = TREE_CODE (ref);
583 temp.op0 = TMR_INDEX (ref);
584 temp.op1 = TMR_STEP (ref);
585 temp.op2 = TMR_OFFSET (ref);
586 temp.off = -1;
587 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
589 memset (&temp, 0, sizeof (temp));
590 temp.type = NULL_TREE;
591 temp.opcode = ERROR_MARK;
592 temp.op0 = TMR_INDEX2 (ref);
593 temp.off = -1;
594 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
596 memset (&temp, 0, sizeof (temp));
597 temp.type = NULL_TREE;
598 temp.opcode = TREE_CODE (TMR_BASE (ref));
599 temp.op0 = TMR_BASE (ref);
600 temp.off = -1;
601 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
602 return;
605 /* For non-calls, store the information that makes up the address. */
607 while (ref)
609 vn_reference_op_s temp;
611 memset (&temp, 0, sizeof (temp));
612 /* We do not care for spurious type qualifications. */
613 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
614 temp.opcode = TREE_CODE (ref);
615 temp.off = -1;
617 switch (temp.opcode)
619 case MEM_REF:
620 /* The base address gets its own vn_reference_op_s structure. */
621 temp.op0 = TREE_OPERAND (ref, 1);
622 if (host_integerp (TREE_OPERAND (ref, 1), 0))
623 temp.off = TREE_INT_CST_LOW (TREE_OPERAND (ref, 1));
624 break;
625 case BIT_FIELD_REF:
626 /* Record bits and position. */
627 temp.op0 = TREE_OPERAND (ref, 1);
628 temp.op1 = TREE_OPERAND (ref, 2);
629 break;
630 case COMPONENT_REF:
631 /* The field decl is enough to unambiguously specify the field,
632 a matching type is not necessary and a mismatching type
633 is always a spurious difference. */
634 temp.type = NULL_TREE;
635 temp.op0 = TREE_OPERAND (ref, 1);
636 temp.op1 = TREE_OPERAND (ref, 2);
638 tree this_offset = component_ref_field_offset (ref);
639 if (this_offset
640 && TREE_CODE (this_offset) == INTEGER_CST)
642 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
643 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
645 double_int off
646 = double_int_add (tree_to_double_int (this_offset),
647 double_int_rshift
648 (tree_to_double_int (bit_offset),
649 BITS_PER_UNIT == 8
650 ? 3 : exact_log2 (BITS_PER_UNIT),
651 HOST_BITS_PER_DOUBLE_INT, true));
652 if (double_int_fits_in_shwi_p (off))
653 temp.off = off.low;
657 break;
658 case ARRAY_RANGE_REF:
659 case ARRAY_REF:
660 /* Record index as operand. */
661 temp.op0 = TREE_OPERAND (ref, 1);
662 /* Always record lower bounds and element size. */
663 temp.op1 = array_ref_low_bound (ref);
664 temp.op2 = array_ref_element_size (ref);
665 if (TREE_CODE (temp.op0) == INTEGER_CST
666 && TREE_CODE (temp.op1) == INTEGER_CST
667 && TREE_CODE (temp.op2) == INTEGER_CST)
669 double_int off = tree_to_double_int (temp.op0);
670 off = double_int_add (off,
671 double_int_neg
672 (tree_to_double_int (temp.op1)));
673 off = double_int_mul (off, tree_to_double_int (temp.op2));
674 if (double_int_fits_in_shwi_p (off))
675 temp.off = off.low;
677 break;
678 case STRING_CST:
679 case INTEGER_CST:
680 case COMPLEX_CST:
681 case VECTOR_CST:
682 case REAL_CST:
683 case CONSTRUCTOR:
684 case VAR_DECL:
685 case PARM_DECL:
686 case CONST_DECL:
687 case RESULT_DECL:
688 case SSA_NAME:
689 temp.op0 = ref;
690 break;
691 case ADDR_EXPR:
692 if (is_gimple_min_invariant (ref))
694 temp.op0 = ref;
695 break;
697 /* Fallthrough. */
698 /* These are only interesting for their operands, their
699 existence, and their type. They will never be the last
700 ref in the chain of references (IE they require an
701 operand), so we don't have to put anything
702 for op* as it will be handled by the iteration */
703 case REALPART_EXPR:
704 case VIEW_CONVERT_EXPR:
705 temp.off = 0;
706 break;
707 case IMAGPART_EXPR:
708 /* This is only interesting for its constant offset. */
709 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
710 break;
711 default:
712 gcc_unreachable ();
714 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
716 if (REFERENCE_CLASS_P (ref)
717 || (TREE_CODE (ref) == ADDR_EXPR
718 && !is_gimple_min_invariant (ref)))
719 ref = TREE_OPERAND (ref, 0);
720 else
721 ref = NULL_TREE;
725 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
726 operands in *OPS, the reference alias set SET and the reference type TYPE.
727 Return true if something useful was produced. */
729 bool
730 ao_ref_init_from_vn_reference (ao_ref *ref,
731 alias_set_type set, tree type,
732 VEC (vn_reference_op_s, heap) *ops)
734 vn_reference_op_t op;
735 unsigned i;
736 tree base = NULL_TREE;
737 tree *op0_p = &base;
738 HOST_WIDE_INT offset = 0;
739 HOST_WIDE_INT max_size;
740 HOST_WIDE_INT size = -1;
741 tree size_tree = NULL_TREE;
742 alias_set_type base_alias_set = -1;
744 /* First get the final access size from just the outermost expression. */
745 op = VEC_index (vn_reference_op_s, ops, 0);
746 if (op->opcode == COMPONENT_REF)
747 size_tree = DECL_SIZE (op->op0);
748 else if (op->opcode == BIT_FIELD_REF)
749 size_tree = op->op0;
750 else
752 enum machine_mode mode = TYPE_MODE (type);
753 if (mode == BLKmode)
754 size_tree = TYPE_SIZE (type);
755 else
756 size = GET_MODE_BITSIZE (mode);
758 if (size_tree != NULL_TREE)
760 if (!host_integerp (size_tree, 1))
761 size = -1;
762 else
763 size = TREE_INT_CST_LOW (size_tree);
766 /* Initially, maxsize is the same as the accessed element size.
767 In the following it will only grow (or become -1). */
768 max_size = size;
770 /* Compute cumulative bit-offset for nested component-refs and array-refs,
771 and find the ultimate containing object. */
772 FOR_EACH_VEC_ELT (vn_reference_op_s, ops, i, op)
774 switch (op->opcode)
776 /* These may be in the reference ops, but we cannot do anything
777 sensible with them here. */
778 case ADDR_EXPR:
779 /* Apart from ADDR_EXPR arguments to MEM_REF. */
780 if (base != NULL_TREE
781 && TREE_CODE (base) == MEM_REF
782 && op->op0
783 && DECL_P (TREE_OPERAND (op->op0, 0)))
785 vn_reference_op_t pop = VEC_index (vn_reference_op_s, ops, i-1);
786 base = TREE_OPERAND (op->op0, 0);
787 if (pop->off == -1)
789 max_size = -1;
790 offset = 0;
792 else
793 offset += pop->off * BITS_PER_UNIT;
794 op0_p = NULL;
795 break;
797 /* Fallthru. */
798 case CALL_EXPR:
799 return false;
801 /* Record the base objects. */
802 case MEM_REF:
803 base_alias_set = get_deref_alias_set (op->op0);
804 *op0_p = build2 (MEM_REF, op->type,
805 NULL_TREE, op->op0);
806 op0_p = &TREE_OPERAND (*op0_p, 0);
807 break;
809 case VAR_DECL:
810 case PARM_DECL:
811 case RESULT_DECL:
812 case SSA_NAME:
813 *op0_p = op->op0;
814 op0_p = NULL;
815 break;
817 /* And now the usual component-reference style ops. */
818 case BIT_FIELD_REF:
819 offset += tree_low_cst (op->op1, 0);
820 break;
822 case COMPONENT_REF:
824 tree field = op->op0;
825 /* We do not have a complete COMPONENT_REF tree here so we
826 cannot use component_ref_field_offset. Do the interesting
827 parts manually. */
829 if (op->op1
830 || !host_integerp (DECL_FIELD_OFFSET (field), 1))
831 max_size = -1;
832 else
834 offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
835 * BITS_PER_UNIT);
836 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
838 break;
841 case ARRAY_RANGE_REF:
842 case ARRAY_REF:
843 /* We recorded the lower bound and the element size. */
844 if (!host_integerp (op->op0, 0)
845 || !host_integerp (op->op1, 0)
846 || !host_integerp (op->op2, 0))
847 max_size = -1;
848 else
850 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
851 hindex -= TREE_INT_CST_LOW (op->op1);
852 hindex *= TREE_INT_CST_LOW (op->op2);
853 hindex *= BITS_PER_UNIT;
854 offset += hindex;
856 break;
858 case REALPART_EXPR:
859 break;
861 case IMAGPART_EXPR:
862 offset += size;
863 break;
865 case VIEW_CONVERT_EXPR:
866 break;
868 case STRING_CST:
869 case INTEGER_CST:
870 case COMPLEX_CST:
871 case VECTOR_CST:
872 case REAL_CST:
873 case CONSTRUCTOR:
874 case CONST_DECL:
875 return false;
877 default:
878 return false;
882 if (base == NULL_TREE)
883 return false;
885 ref->ref = NULL_TREE;
886 ref->base = base;
887 ref->offset = offset;
888 ref->size = size;
889 ref->max_size = max_size;
890 ref->ref_alias_set = set;
891 if (base_alias_set != -1)
892 ref->base_alias_set = base_alias_set;
893 else
894 ref->base_alias_set = get_alias_set (base);
896 return true;
899 /* Copy the operations present in load/store/call REF into RESULT, a vector of
900 vn_reference_op_s's. */
902 void
903 copy_reference_ops_from_call (gimple call,
904 VEC(vn_reference_op_s, heap) **result)
906 vn_reference_op_s temp;
907 unsigned i;
909 /* Copy the type, opcode, function being called and static chain. */
910 memset (&temp, 0, sizeof (temp));
911 temp.type = gimple_call_return_type (call);
912 temp.opcode = CALL_EXPR;
913 temp.op0 = gimple_call_fn (call);
914 temp.op1 = gimple_call_chain (call);
915 temp.off = -1;
916 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
918 /* Copy the call arguments. As they can be references as well,
919 just chain them together. */
920 for (i = 0; i < gimple_call_num_args (call); ++i)
922 tree callarg = gimple_call_arg (call, i);
923 copy_reference_ops_from_ref (callarg, result);
927 /* Create a vector of vn_reference_op_s structures from REF, a
928 REFERENCE_CLASS_P tree. The vector is not shared. */
930 static VEC(vn_reference_op_s, heap) *
931 create_reference_ops_from_ref (tree ref)
933 VEC (vn_reference_op_s, heap) *result = NULL;
935 copy_reference_ops_from_ref (ref, &result);
936 return result;
939 /* Create a vector of vn_reference_op_s structures from CALL, a
940 call statement. The vector is not shared. */
942 static VEC(vn_reference_op_s, heap) *
943 create_reference_ops_from_call (gimple call)
945 VEC (vn_reference_op_s, heap) *result = NULL;
947 copy_reference_ops_from_call (call, &result);
948 return result;
951 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
952 *I_P to point to the last element of the replacement. */
953 void
954 vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
955 unsigned int *i_p)
957 unsigned int i = *i_p;
958 vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
959 vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
960 tree addr_base;
961 HOST_WIDE_INT addr_offset;
963 /* The only thing we have to do is from &OBJ.foo.bar add the offset
964 from .foo.bar to the preceeding MEM_REF offset and replace the
965 address with &OBJ. */
966 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
967 &addr_offset);
968 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
969 if (addr_base != op->op0)
971 double_int off = tree_to_double_int (mem_op->op0);
972 off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
973 off = double_int_add (off, shwi_to_double_int (addr_offset));
974 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
975 op->op0 = build_fold_addr_expr (addr_base);
976 if (host_integerp (mem_op->op0, 0))
977 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
978 else
979 mem_op->off = -1;
983 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
984 *I_P to point to the last element of the replacement. */
985 static void
986 vn_reference_maybe_forwprop_address (VEC (vn_reference_op_s, heap) **ops,
987 unsigned int *i_p)
989 unsigned int i = *i_p;
990 vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
991 vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
992 gimple def_stmt;
993 enum tree_code code;
994 double_int off;
996 def_stmt = SSA_NAME_DEF_STMT (op->op0);
997 if (!is_gimple_assign (def_stmt))
998 return;
1000 code = gimple_assign_rhs_code (def_stmt);
1001 if (code != ADDR_EXPR
1002 && code != POINTER_PLUS_EXPR)
1003 return;
1005 off = tree_to_double_int (mem_op->op0);
1006 off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1008 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1009 from .foo.bar to the preceeding MEM_REF offset and replace the
1010 address with &OBJ. */
1011 if (code == ADDR_EXPR)
1013 tree addr, addr_base;
1014 HOST_WIDE_INT addr_offset;
1016 addr = gimple_assign_rhs1 (def_stmt);
1017 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1018 &addr_offset);
1019 if (!addr_base
1020 || TREE_CODE (addr_base) != MEM_REF)
1021 return;
1023 off = double_int_add (off, shwi_to_double_int (addr_offset));
1024 off = double_int_add (off, mem_ref_offset (addr_base));
1025 op->op0 = TREE_OPERAND (addr_base, 0);
1027 else
1029 tree ptr, ptroff;
1030 ptr = gimple_assign_rhs1 (def_stmt);
1031 ptroff = gimple_assign_rhs2 (def_stmt);
1032 if (TREE_CODE (ptr) != SSA_NAME
1033 || TREE_CODE (ptroff) != INTEGER_CST)
1034 return;
1036 off = double_int_add (off, tree_to_double_int (ptroff));
1037 op->op0 = ptr;
1040 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1041 if (host_integerp (mem_op->op0, 0))
1042 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1043 else
1044 mem_op->off = -1;
1045 if (TREE_CODE (op->op0) == SSA_NAME)
1046 op->op0 = SSA_VAL (op->op0);
1047 if (TREE_CODE (op->op0) != SSA_NAME)
1048 op->opcode = TREE_CODE (op->op0);
1050 /* And recurse. */
1051 if (TREE_CODE (op->op0) == SSA_NAME)
1052 vn_reference_maybe_forwprop_address (ops, i_p);
1053 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1054 vn_reference_fold_indirect (ops, i_p);
1057 /* Optimize the reference REF to a constant if possible or return
1058 NULL_TREE if not. */
1060 tree
1061 fully_constant_vn_reference_p (vn_reference_t ref)
1063 VEC (vn_reference_op_s, heap) *operands = ref->operands;
1064 vn_reference_op_t op;
1066 /* Try to simplify the translated expression if it is
1067 a call to a builtin function with at most two arguments. */
1068 op = VEC_index (vn_reference_op_s, operands, 0);
1069 if (op->opcode == CALL_EXPR
1070 && TREE_CODE (op->op0) == ADDR_EXPR
1071 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1072 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1073 && VEC_length (vn_reference_op_s, operands) >= 2
1074 && VEC_length (vn_reference_op_s, operands) <= 3)
1076 vn_reference_op_t arg0, arg1 = NULL;
1077 bool anyconst = false;
1078 arg0 = VEC_index (vn_reference_op_s, operands, 1);
1079 if (VEC_length (vn_reference_op_s, operands) > 2)
1080 arg1 = VEC_index (vn_reference_op_s, operands, 2);
1081 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1082 || (arg0->opcode == ADDR_EXPR
1083 && is_gimple_min_invariant (arg0->op0)))
1084 anyconst = true;
1085 if (arg1
1086 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1087 || (arg1->opcode == ADDR_EXPR
1088 && is_gimple_min_invariant (arg1->op0))))
1089 anyconst = true;
1090 if (anyconst)
1092 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1093 arg1 ? 2 : 1,
1094 arg0->op0,
1095 arg1 ? arg1->op0 : NULL);
1096 if (folded
1097 && TREE_CODE (folded) == NOP_EXPR)
1098 folded = TREE_OPERAND (folded, 0);
1099 if (folded
1100 && is_gimple_min_invariant (folded))
1101 return folded;
1105 /* Simplify reads from constant strings. */
1106 else if (op->opcode == ARRAY_REF
1107 && TREE_CODE (op->op0) == INTEGER_CST
1108 && integer_zerop (op->op1)
1109 && VEC_length (vn_reference_op_s, operands) == 2)
1111 vn_reference_op_t arg0;
1112 arg0 = VEC_index (vn_reference_op_s, operands, 1);
1113 if (arg0->opcode == STRING_CST
1114 && (TYPE_MODE (op->type)
1115 == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
1116 && GET_MODE_CLASS (TYPE_MODE (op->type)) == MODE_INT
1117 && GET_MODE_SIZE (TYPE_MODE (op->type)) == 1
1118 && compare_tree_int (op->op0, TREE_STRING_LENGTH (arg0->op0)) < 0)
1119 return build_int_cst_type (op->type,
1120 (TREE_STRING_POINTER (arg0->op0)
1121 [TREE_INT_CST_LOW (op->op0)]));
1124 return NULL_TREE;
1127 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1128 structures into their value numbers. This is done in-place, and
1129 the vector passed in is returned. */
1131 static VEC (vn_reference_op_s, heap) *
1132 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
1134 vn_reference_op_t vro;
1135 unsigned int i;
1137 FOR_EACH_VEC_ELT (vn_reference_op_s, orig, i, vro)
1139 if (vro->opcode == SSA_NAME
1140 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1142 vro->op0 = SSA_VAL (vro->op0);
1143 /* If it transforms from an SSA_NAME to a constant, update
1144 the opcode. */
1145 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1146 vro->opcode = TREE_CODE (vro->op0);
1148 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1149 vro->op1 = SSA_VAL (vro->op1);
1150 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1151 vro->op2 = SSA_VAL (vro->op2);
1152 /* If it transforms from an SSA_NAME to an address, fold with
1153 a preceding indirect reference. */
1154 if (i > 0
1155 && vro->op0
1156 && TREE_CODE (vro->op0) == ADDR_EXPR
1157 && VEC_index (vn_reference_op_s,
1158 orig, i - 1)->opcode == MEM_REF)
1159 vn_reference_fold_indirect (&orig, &i);
1160 else if (i > 0
1161 && vro->opcode == SSA_NAME
1162 && VEC_index (vn_reference_op_s,
1163 orig, i - 1)->opcode == MEM_REF)
1164 vn_reference_maybe_forwprop_address (&orig, &i);
1165 /* If it transforms a non-constant ARRAY_REF into a constant
1166 one, adjust the constant offset. */
1167 else if (vro->opcode == ARRAY_REF
1168 && vro->off == -1
1169 && TREE_CODE (vro->op0) == INTEGER_CST
1170 && TREE_CODE (vro->op1) == INTEGER_CST
1171 && TREE_CODE (vro->op2) == INTEGER_CST)
1173 double_int off = tree_to_double_int (vro->op0);
1174 off = double_int_add (off,
1175 double_int_neg
1176 (tree_to_double_int (vro->op1)));
1177 off = double_int_mul (off, tree_to_double_int (vro->op2));
1178 if (double_int_fits_in_shwi_p (off))
1179 vro->off = off.low;
1183 return orig;
1186 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
1188 /* Create a vector of vn_reference_op_s structures from REF, a
1189 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1190 this function. */
1192 static VEC(vn_reference_op_s, heap) *
1193 valueize_shared_reference_ops_from_ref (tree ref)
1195 if (!ref)
1196 return NULL;
1197 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1198 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1199 shared_lookup_references = valueize_refs (shared_lookup_references);
1200 return shared_lookup_references;
1203 /* Create a vector of vn_reference_op_s structures from CALL, a
1204 call statement. The vector is shared among all callers of
1205 this function. */
1207 static VEC(vn_reference_op_s, heap) *
1208 valueize_shared_reference_ops_from_call (gimple call)
1210 if (!call)
1211 return NULL;
1212 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1213 copy_reference_ops_from_call (call, &shared_lookup_references);
1214 shared_lookup_references = valueize_refs (shared_lookup_references);
1215 return shared_lookup_references;
1218 /* Lookup a SCCVN reference operation VR in the current hash table.
1219 Returns the resulting value number if it exists in the hash table,
1220 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1221 vn_reference_t stored in the hashtable if something is found. */
1223 static tree
1224 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1226 void **slot;
1227 hashval_t hash;
1229 hash = vr->hashcode;
1230 slot = htab_find_slot_with_hash (current_info->references, vr,
1231 hash, NO_INSERT);
1232 if (!slot && current_info == optimistic_info)
1233 slot = htab_find_slot_with_hash (valid_info->references, vr,
1234 hash, NO_INSERT);
1235 if (slot)
1237 if (vnresult)
1238 *vnresult = (vn_reference_t)*slot;
1239 return ((vn_reference_t)*slot)->result;
1242 return NULL_TREE;
1245 static tree *last_vuse_ptr;
1246 static vn_lookup_kind vn_walk_kind;
1247 static vn_lookup_kind default_vn_walk_kind;
1249 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1250 with the current VUSE and performs the expression lookup. */
1252 static void *
1253 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
1255 vn_reference_t vr = (vn_reference_t)vr_;
1256 void **slot;
1257 hashval_t hash;
1259 if (last_vuse_ptr)
1260 *last_vuse_ptr = vuse;
1262 /* Fixup vuse and hash. */
1263 if (vr->vuse)
1264 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1265 vr->vuse = SSA_VAL (vuse);
1266 if (vr->vuse)
1267 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1269 hash = vr->hashcode;
1270 slot = htab_find_slot_with_hash (current_info->references, vr,
1271 hash, NO_INSERT);
1272 if (!slot && current_info == optimistic_info)
1273 slot = htab_find_slot_with_hash (valid_info->references, vr,
1274 hash, NO_INSERT);
1275 if (slot)
1276 return *slot;
1278 return NULL;
1281 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1282 from the statement defining VUSE and if not successful tries to
1283 translate *REFP and VR_ through an aggregate copy at the defintion
1284 of VUSE. */
1286 static void *
1287 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
1289 vn_reference_t vr = (vn_reference_t)vr_;
1290 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1291 tree fndecl;
1292 tree base;
1293 HOST_WIDE_INT offset, maxsize;
1294 static VEC (vn_reference_op_s, heap) *lhs_ops = NULL;
1295 ao_ref lhs_ref;
1296 bool lhs_ref_ok = false;
1298 /* First try to disambiguate after value-replacing in the definitions LHS. */
1299 if (is_gimple_assign (def_stmt))
1301 VEC (vn_reference_op_s, heap) *tem;
1302 tree lhs = gimple_assign_lhs (def_stmt);
1303 /* Avoid re-allocation overhead. */
1304 VEC_truncate (vn_reference_op_s, lhs_ops, 0);
1305 copy_reference_ops_from_ref (lhs, &lhs_ops);
1306 tem = lhs_ops;
1307 lhs_ops = valueize_refs (lhs_ops);
1308 gcc_assert (lhs_ops == tem);
1309 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref, get_alias_set (lhs),
1310 TREE_TYPE (lhs), lhs_ops);
1311 if (lhs_ref_ok
1312 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1313 return NULL;
1316 base = ao_ref_base (ref);
1317 offset = ref->offset;
1318 maxsize = ref->max_size;
1320 /* If we cannot constrain the size of the reference we cannot
1321 test if anything kills it. */
1322 if (maxsize == -1)
1323 return (void *)-1;
1325 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1326 from that defintion.
1327 1) Memset. */
1328 if (is_gimple_reg_type (vr->type)
1329 && is_gimple_call (def_stmt)
1330 && (fndecl = gimple_call_fndecl (def_stmt))
1331 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1332 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
1333 && integer_zerop (gimple_call_arg (def_stmt, 1))
1334 && host_integerp (gimple_call_arg (def_stmt, 2), 1)
1335 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1337 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1338 tree base2;
1339 HOST_WIDE_INT offset2, size2, maxsize2;
1340 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1341 size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
1342 if ((unsigned HOST_WIDE_INT)size2 / 8
1343 == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
1344 && maxsize2 != -1
1345 && operand_equal_p (base, base2, 0)
1346 && offset2 <= offset
1347 && offset2 + size2 >= offset + maxsize)
1349 tree val = build_zero_cst (vr->type);
1350 unsigned int value_id = get_or_alloc_constant_value_id (val);
1351 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1352 VEC_copy (vn_reference_op_s,
1353 heap, vr->operands),
1354 val, value_id);
1358 /* 2) Assignment from an empty CONSTRUCTOR. */
1359 else if (is_gimple_reg_type (vr->type)
1360 && gimple_assign_single_p (def_stmt)
1361 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1362 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1364 tree base2;
1365 HOST_WIDE_INT offset2, size2, maxsize2;
1366 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1367 &offset2, &size2, &maxsize2);
1368 if (maxsize2 != -1
1369 && operand_equal_p (base, base2, 0)
1370 && offset2 <= offset
1371 && offset2 + size2 >= offset + maxsize)
1373 tree val = build_zero_cst (vr->type);
1374 unsigned int value_id = get_or_alloc_constant_value_id (val);
1375 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1376 VEC_copy (vn_reference_op_s,
1377 heap, vr->operands),
1378 val, value_id);
1382 /* For aggregate copies translate the reference through them if
1383 the copy kills ref. */
1384 else if (vn_walk_kind == VN_WALKREWRITE
1385 && gimple_assign_single_p (def_stmt)
1386 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1387 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1388 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1390 tree base2;
1391 HOST_WIDE_INT offset2, size2, maxsize2;
1392 int i, j;
1393 VEC (vn_reference_op_s, heap) *rhs = NULL;
1394 vn_reference_op_t vro;
1395 ao_ref r;
1397 if (!lhs_ref_ok)
1398 return (void *)-1;
1400 /* See if the assignment kills REF. */
1401 base2 = ao_ref_base (&lhs_ref);
1402 offset2 = lhs_ref.offset;
1403 size2 = lhs_ref.size;
1404 maxsize2 = lhs_ref.max_size;
1405 if (maxsize2 == -1
1406 || (base != base2 && !operand_equal_p (base, base2, 0))
1407 || offset2 > offset
1408 || offset2 + size2 < offset + maxsize)
1409 return (void *)-1;
1411 /* Find the common base of ref and the lhs. lhs_ops already
1412 contains valueized operands for the lhs. */
1413 i = VEC_length (vn_reference_op_s, vr->operands) - 1;
1414 j = VEC_length (vn_reference_op_s, lhs_ops) - 1;
1415 while (j >= 0 && i >= 0
1416 && vn_reference_op_eq (VEC_index (vn_reference_op_s,
1417 vr->operands, i),
1418 VEC_index (vn_reference_op_s, lhs_ops, j)))
1420 i--;
1421 j--;
1424 /* i now points to the first additional op.
1425 ??? LHS may not be completely contained in VR, one or more
1426 VIEW_CONVERT_EXPRs could be in its way. We could at least
1427 try handling outermost VIEW_CONVERT_EXPRs. */
1428 if (j != -1)
1429 return (void *)-1;
1431 /* Now re-write REF to be based on the rhs of the assignment. */
1432 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1433 /* We need to pre-pend vr->operands[0..i] to rhs. */
1434 if (i + 1 + VEC_length (vn_reference_op_s, rhs)
1435 > VEC_length (vn_reference_op_s, vr->operands))
1437 VEC (vn_reference_op_s, heap) *old = vr->operands;
1438 VEC_safe_grow (vn_reference_op_s, heap, vr->operands,
1439 i + 1 + VEC_length (vn_reference_op_s, rhs));
1440 if (old == shared_lookup_references
1441 && vr->operands != old)
1442 shared_lookup_references = NULL;
1444 else
1445 VEC_truncate (vn_reference_op_s, vr->operands,
1446 i + 1 + VEC_length (vn_reference_op_s, rhs));
1447 FOR_EACH_VEC_ELT (vn_reference_op_s, rhs, j, vro)
1448 VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, vro);
1449 VEC_free (vn_reference_op_s, heap, rhs);
1450 vr->hashcode = vn_reference_compute_hash (vr);
1452 /* Adjust *ref from the new operands. */
1453 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1454 return (void *)-1;
1455 /* This can happen with bitfields. */
1456 if (ref->size != r.size)
1457 return (void *)-1;
1458 *ref = r;
1460 /* Do not update last seen VUSE after translating. */
1461 last_vuse_ptr = NULL;
1463 /* Keep looking for the adjusted *REF / VR pair. */
1464 return NULL;
1467 /* Bail out and stop walking. */
1468 return (void *)-1;
1471 /* Lookup a reference operation by it's parts, in the current hash table.
1472 Returns the resulting value number if it exists in the hash table,
1473 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1474 vn_reference_t stored in the hashtable if something is found. */
1476 tree
1477 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
1478 VEC (vn_reference_op_s, heap) *operands,
1479 vn_reference_t *vnresult, vn_lookup_kind kind)
1481 struct vn_reference_s vr1;
1482 vn_reference_t tmp;
1483 tree cst;
1485 if (!vnresult)
1486 vnresult = &tmp;
1487 *vnresult = NULL;
1489 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1490 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1491 VEC_safe_grow (vn_reference_op_s, heap, shared_lookup_references,
1492 VEC_length (vn_reference_op_s, operands));
1493 memcpy (VEC_address (vn_reference_op_s, shared_lookup_references),
1494 VEC_address (vn_reference_op_s, operands),
1495 sizeof (vn_reference_op_s)
1496 * VEC_length (vn_reference_op_s, operands));
1497 vr1.operands = operands = shared_lookup_references
1498 = valueize_refs (shared_lookup_references);
1499 vr1.type = type;
1500 vr1.set = set;
1501 vr1.hashcode = vn_reference_compute_hash (&vr1);
1502 if ((cst = fully_constant_vn_reference_p (&vr1)))
1503 return cst;
1505 vn_reference_lookup_1 (&vr1, vnresult);
1506 if (!*vnresult
1507 && kind != VN_NOWALK
1508 && vr1.vuse)
1510 ao_ref r;
1511 vn_walk_kind = kind;
1512 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
1513 *vnresult =
1514 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1515 vn_reference_lookup_2,
1516 vn_reference_lookup_3, &vr1);
1517 if (vr1.operands != operands)
1518 VEC_free (vn_reference_op_s, heap, vr1.operands);
1521 if (*vnresult)
1522 return (*vnresult)->result;
1524 return NULL_TREE;
1527 /* Lookup OP in the current hash table, and return the resulting value
1528 number if it exists in the hash table. Return NULL_TREE if it does
1529 not exist in the hash table or if the result field of the structure
1530 was NULL.. VNRESULT will be filled in with the vn_reference_t
1531 stored in the hashtable if one exists. */
1533 tree
1534 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
1535 vn_reference_t *vnresult)
1537 VEC (vn_reference_op_s, heap) *operands;
1538 struct vn_reference_s vr1;
1539 tree cst;
1541 if (vnresult)
1542 *vnresult = NULL;
1544 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1545 vr1.operands = operands = valueize_shared_reference_ops_from_ref (op);
1546 vr1.type = TREE_TYPE (op);
1547 vr1.set = get_alias_set (op);
1548 vr1.hashcode = vn_reference_compute_hash (&vr1);
1549 if ((cst = fully_constant_vn_reference_p (&vr1)))
1550 return cst;
1552 if (kind != VN_NOWALK
1553 && vr1.vuse)
1555 vn_reference_t wvnresult;
1556 ao_ref r;
1557 ao_ref_init (&r, op);
1558 vn_walk_kind = kind;
1559 wvnresult =
1560 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1561 vn_reference_lookup_2,
1562 vn_reference_lookup_3, &vr1);
1563 if (vr1.operands != operands)
1564 VEC_free (vn_reference_op_s, heap, vr1.operands);
1565 if (wvnresult)
1567 if (vnresult)
1568 *vnresult = wvnresult;
1569 return wvnresult->result;
1572 return NULL_TREE;
1575 return vn_reference_lookup_1 (&vr1, vnresult);
1579 /* Insert OP into the current hash table with a value number of
1580 RESULT, and return the resulting reference structure we created. */
1582 vn_reference_t
1583 vn_reference_insert (tree op, tree result, tree vuse)
1585 void **slot;
1586 vn_reference_t vr1;
1588 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1589 if (TREE_CODE (result) == SSA_NAME)
1590 vr1->value_id = VN_INFO (result)->value_id;
1591 else
1592 vr1->value_id = get_or_alloc_constant_value_id (result);
1593 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1594 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
1595 vr1->type = TREE_TYPE (op);
1596 vr1->set = get_alias_set (op);
1597 vr1->hashcode = vn_reference_compute_hash (vr1);
1598 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
1600 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1601 INSERT);
1603 /* Because we lookup stores using vuses, and value number failures
1604 using the vdefs (see visit_reference_op_store for how and why),
1605 it's possible that on failure we may try to insert an already
1606 inserted store. This is not wrong, there is no ssa name for a
1607 store that we could use as a differentiator anyway. Thus, unlike
1608 the other lookup functions, you cannot gcc_assert (!*slot)
1609 here. */
1611 /* But free the old slot in case of a collision. */
1612 if (*slot)
1613 free_reference (*slot);
1615 *slot = vr1;
1616 return vr1;
1619 /* Insert a reference by it's pieces into the current hash table with
1620 a value number of RESULT. Return the resulting reference
1621 structure we created. */
1623 vn_reference_t
1624 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
1625 VEC (vn_reference_op_s, heap) *operands,
1626 tree result, unsigned int value_id)
1629 void **slot;
1630 vn_reference_t vr1;
1632 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1633 vr1->value_id = value_id;
1634 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1635 vr1->operands = valueize_refs (operands);
1636 vr1->type = type;
1637 vr1->set = set;
1638 vr1->hashcode = vn_reference_compute_hash (vr1);
1639 if (result && TREE_CODE (result) == SSA_NAME)
1640 result = SSA_VAL (result);
1641 vr1->result = result;
1643 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1644 INSERT);
1646 /* At this point we should have all the things inserted that we have
1647 seen before, and we should never try inserting something that
1648 already exists. */
1649 gcc_assert (!*slot);
1650 if (*slot)
1651 free_reference (*slot);
1653 *slot = vr1;
1654 return vr1;
1657 /* Compute and return the hash value for nary operation VBO1. */
1659 hashval_t
1660 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
1662 hashval_t hash;
1663 unsigned i;
1665 for (i = 0; i < vno1->length; ++i)
1666 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
1667 vno1->op[i] = SSA_VAL (vno1->op[i]);
1669 if (vno1->length == 2
1670 && commutative_tree_code (vno1->opcode)
1671 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
1673 tree temp = vno1->op[0];
1674 vno1->op[0] = vno1->op[1];
1675 vno1->op[1] = temp;
1678 hash = iterative_hash_hashval_t (vno1->opcode, 0);
1679 for (i = 0; i < vno1->length; ++i)
1680 hash = iterative_hash_expr (vno1->op[i], hash);
1682 return hash;
1685 /* Return the computed hashcode for nary operation P1. */
1687 static hashval_t
1688 vn_nary_op_hash (const void *p1)
1690 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1691 return vno1->hashcode;
1694 /* Compare nary operations P1 and P2 and return true if they are
1695 equivalent. */
1698 vn_nary_op_eq (const void *p1, const void *p2)
1700 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1701 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
1702 unsigned i;
1704 if (vno1->hashcode != vno2->hashcode)
1705 return false;
1707 if (vno1->opcode != vno2->opcode
1708 || !types_compatible_p (vno1->type, vno2->type))
1709 return false;
1711 for (i = 0; i < vno1->length; ++i)
1712 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
1713 return false;
1715 return true;
1718 /* Initialize VNO from the pieces provided. */
1720 static void
1721 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
1722 enum tree_code code, tree type, tree op0,
1723 tree op1, tree op2, tree op3)
1725 vno->opcode = code;
1726 vno->length = length;
1727 vno->type = type;
1728 switch (length)
1730 /* The fallthrus here are deliberate. */
1731 case 4: vno->op[3] = op3;
1732 case 3: vno->op[2] = op2;
1733 case 2: vno->op[1] = op1;
1734 case 1: vno->op[0] = op0;
1735 default:
1736 break;
1740 /* Initialize VNO from OP. */
1742 static void
1743 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
1745 unsigned i;
1747 vno->opcode = TREE_CODE (op);
1748 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
1749 vno->type = TREE_TYPE (op);
1750 for (i = 0; i < vno->length; ++i)
1751 vno->op[i] = TREE_OPERAND (op, i);
1754 /* Initialize VNO from STMT. */
1756 static void
1757 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
1759 unsigned i;
1761 vno->opcode = gimple_assign_rhs_code (stmt);
1762 vno->length = gimple_num_ops (stmt) - 1;
1763 vno->type = gimple_expr_type (stmt);
1764 for (i = 0; i < vno->length; ++i)
1765 vno->op[i] = gimple_op (stmt, i + 1);
1766 if (vno->opcode == REALPART_EXPR
1767 || vno->opcode == IMAGPART_EXPR
1768 || vno->opcode == VIEW_CONVERT_EXPR)
1769 vno->op[0] = TREE_OPERAND (vno->op[0], 0);
1772 /* Compute the hashcode for VNO and look for it in the hash table;
1773 return the resulting value number if it exists in the hash table.
1774 Return NULL_TREE if it does not exist in the hash table or if the
1775 result field of the operation is NULL. VNRESULT will contain the
1776 vn_nary_op_t from the hashtable if it exists. */
1778 static tree
1779 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
1781 void **slot;
1783 if (vnresult)
1784 *vnresult = NULL;
1786 vno->hashcode = vn_nary_op_compute_hash (vno);
1787 slot = htab_find_slot_with_hash (current_info->nary, vno, vno->hashcode,
1788 NO_INSERT);
1789 if (!slot && current_info == optimistic_info)
1790 slot = htab_find_slot_with_hash (valid_info->nary, vno, vno->hashcode,
1791 NO_INSERT);
1792 if (!slot)
1793 return NULL_TREE;
1794 if (vnresult)
1795 *vnresult = (vn_nary_op_t)*slot;
1796 return ((vn_nary_op_t)*slot)->result;
1799 /* Lookup a n-ary operation by its pieces and return the resulting value
1800 number if it exists in the hash table. Return NULL_TREE if it does
1801 not exist in the hash table or if the result field of the operation
1802 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1803 if it exists. */
1805 tree
1806 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
1807 tree type, tree op0, tree op1, tree op2,
1808 tree op3, vn_nary_op_t *vnresult)
1810 struct vn_nary_op_s vno1;
1811 init_vn_nary_op_from_pieces (&vno1, length, code, type, op0, op1, op2, op3);
1812 return vn_nary_op_lookup_1 (&vno1, vnresult);
1815 /* Lookup OP in the current hash table, and return the resulting value
1816 number if it exists in the hash table. Return NULL_TREE if it does
1817 not exist in the hash table or if the result field of the operation
1818 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1819 if it exists. */
1821 tree
1822 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
1824 struct vn_nary_op_s vno1;
1825 init_vn_nary_op_from_op (&vno1, op);
1826 return vn_nary_op_lookup_1 (&vno1, vnresult);
1829 /* Lookup the rhs of STMT in the current hash table, and return the resulting
1830 value number if it exists in the hash table. Return NULL_TREE if
1831 it does not exist in the hash table. VNRESULT will contain the
1832 vn_nary_op_t from the hashtable if it exists. */
1834 tree
1835 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
1837 struct vn_nary_op_s vno1;
1838 init_vn_nary_op_from_stmt (&vno1, stmt);
1839 return vn_nary_op_lookup_1 (&vno1, vnresult);
1842 /* Return the size of a vn_nary_op_t with LENGTH operands. */
1844 static size_t
1845 sizeof_vn_nary_op (unsigned int length)
1847 return sizeof (struct vn_nary_op_s) - sizeof (tree) * (4 - length);
1850 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
1852 static vn_nary_op_t
1853 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
1855 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
1858 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
1859 obstack. */
1861 static vn_nary_op_t
1862 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
1864 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
1865 &current_info->nary_obstack);
1867 vno1->value_id = value_id;
1868 vno1->length = length;
1869 vno1->result = result;
1871 return vno1;
1874 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
1875 VNO->HASHCODE first. */
1877 static vn_nary_op_t
1878 vn_nary_op_insert_into (vn_nary_op_t vno, htab_t table, bool compute_hash)
1880 void **slot;
1882 if (compute_hash)
1883 vno->hashcode = vn_nary_op_compute_hash (vno);
1885 slot = htab_find_slot_with_hash (table, vno, vno->hashcode, INSERT);
1886 gcc_assert (!*slot);
1888 *slot = vno;
1889 return vno;
1892 /* Insert a n-ary operation into the current hash table using it's
1893 pieces. Return the vn_nary_op_t structure we created and put in
1894 the hashtable. */
1896 vn_nary_op_t
1897 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
1898 tree type, tree op0,
1899 tree op1, tree op2, tree op3,
1900 tree result,
1901 unsigned int value_id)
1903 vn_nary_op_t vno1;
1905 vno1 = alloc_vn_nary_op (length, result, value_id);
1906 init_vn_nary_op_from_pieces (vno1, length, code, type, op0, op1, op2, op3);
1907 return vn_nary_op_insert_into (vno1, current_info->nary, true);
1910 /* Insert OP into the current hash table with a value number of
1911 RESULT. Return the vn_nary_op_t structure we created and put in
1912 the hashtable. */
1914 vn_nary_op_t
1915 vn_nary_op_insert (tree op, tree result)
1917 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
1918 vn_nary_op_t vno1;
1920 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
1921 init_vn_nary_op_from_op (vno1, op);
1922 return vn_nary_op_insert_into (vno1, current_info->nary, true);
1925 /* Insert the rhs of STMT into the current hash table with a value number of
1926 RESULT. */
1928 vn_nary_op_t
1929 vn_nary_op_insert_stmt (gimple stmt, tree result)
1931 unsigned length = gimple_num_ops (stmt) - 1;
1932 vn_nary_op_t vno1;
1934 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
1935 init_vn_nary_op_from_stmt (vno1, stmt);
1936 return vn_nary_op_insert_into (vno1, current_info->nary, true);
1939 /* Compute a hashcode for PHI operation VP1 and return it. */
1941 static inline hashval_t
1942 vn_phi_compute_hash (vn_phi_t vp1)
1944 hashval_t result;
1945 int i;
1946 tree phi1op;
1947 tree type;
1949 result = vp1->block->index;
1951 /* If all PHI arguments are constants we need to distinguish
1952 the PHI node via its type. */
1953 type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
1954 result += (INTEGRAL_TYPE_P (type)
1955 + (INTEGRAL_TYPE_P (type)
1956 ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
1958 FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
1960 if (phi1op == VN_TOP)
1961 continue;
1962 result = iterative_hash_expr (phi1op, result);
1965 return result;
1968 /* Return the computed hashcode for phi operation P1. */
1970 static hashval_t
1971 vn_phi_hash (const void *p1)
1973 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1974 return vp1->hashcode;
1977 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
1979 static int
1980 vn_phi_eq (const void *p1, const void *p2)
1982 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1983 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
1985 if (vp1->hashcode != vp2->hashcode)
1986 return false;
1988 if (vp1->block == vp2->block)
1990 int i;
1991 tree phi1op;
1993 /* If the PHI nodes do not have compatible types
1994 they are not the same. */
1995 if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
1996 TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
1997 return false;
1999 /* Any phi in the same block will have it's arguments in the
2000 same edge order, because of how we store phi nodes. */
2001 FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
2003 tree phi2op = VEC_index (tree, vp2->phiargs, i);
2004 if (phi1op == VN_TOP || phi2op == VN_TOP)
2005 continue;
2006 if (!expressions_equal_p (phi1op, phi2op))
2007 return false;
2009 return true;
2011 return false;
2014 static VEC(tree, heap) *shared_lookup_phiargs;
2016 /* Lookup PHI in the current hash table, and return the resulting
2017 value number if it exists in the hash table. Return NULL_TREE if
2018 it does not exist in the hash table. */
2020 static tree
2021 vn_phi_lookup (gimple phi)
2023 void **slot;
2024 struct vn_phi_s vp1;
2025 unsigned i;
2027 VEC_truncate (tree, shared_lookup_phiargs, 0);
2029 /* Canonicalize the SSA_NAME's to their value number. */
2030 for (i = 0; i < gimple_phi_num_args (phi); i++)
2032 tree def = PHI_ARG_DEF (phi, i);
2033 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2034 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
2036 vp1.phiargs = shared_lookup_phiargs;
2037 vp1.block = gimple_bb (phi);
2038 vp1.hashcode = vn_phi_compute_hash (&vp1);
2039 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
2040 NO_INSERT);
2041 if (!slot && current_info == optimistic_info)
2042 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
2043 NO_INSERT);
2044 if (!slot)
2045 return NULL_TREE;
2046 return ((vn_phi_t)*slot)->result;
2049 /* Insert PHI into the current hash table with a value number of
2050 RESULT. */
2052 static vn_phi_t
2053 vn_phi_insert (gimple phi, tree result)
2055 void **slot;
2056 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
2057 unsigned i;
2058 VEC (tree, heap) *args = NULL;
2060 /* Canonicalize the SSA_NAME's to their value number. */
2061 for (i = 0; i < gimple_phi_num_args (phi); i++)
2063 tree def = PHI_ARG_DEF (phi, i);
2064 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2065 VEC_safe_push (tree, heap, args, def);
2067 vp1->value_id = VN_INFO (result)->value_id;
2068 vp1->phiargs = args;
2069 vp1->block = gimple_bb (phi);
2070 vp1->result = result;
2071 vp1->hashcode = vn_phi_compute_hash (vp1);
2073 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
2074 INSERT);
2076 /* Because we iterate over phi operations more than once, it's
2077 possible the slot might already exist here, hence no assert.*/
2078 *slot = vp1;
2079 return vp1;
2083 /* Print set of components in strongly connected component SCC to OUT. */
2085 static void
2086 print_scc (FILE *out, VEC (tree, heap) *scc)
2088 tree var;
2089 unsigned int i;
2091 fprintf (out, "SCC consists of: ");
2092 FOR_EACH_VEC_ELT (tree, scc, i, var)
2094 print_generic_expr (out, var, 0);
2095 fprintf (out, " ");
2097 fprintf (out, "\n");
2100 /* Set the value number of FROM to TO, return true if it has changed
2101 as a result. */
2103 static inline bool
2104 set_ssa_val_to (tree from, tree to)
2106 tree currval;
2108 if (from != to
2109 && TREE_CODE (to) == SSA_NAME
2110 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2111 to = from;
2113 /* The only thing we allow as value numbers are VN_TOP, ssa_names
2114 and invariants. So assert that here. */
2115 gcc_assert (to != NULL_TREE
2116 && (to == VN_TOP
2117 || TREE_CODE (to) == SSA_NAME
2118 || is_gimple_min_invariant (to)));
2120 if (dump_file && (dump_flags & TDF_DETAILS))
2122 fprintf (dump_file, "Setting value number of ");
2123 print_generic_expr (dump_file, from, 0);
2124 fprintf (dump_file, " to ");
2125 print_generic_expr (dump_file, to, 0);
2128 currval = SSA_VAL (from);
2130 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
2132 VN_INFO (from)->valnum = to;
2133 if (dump_file && (dump_flags & TDF_DETAILS))
2134 fprintf (dump_file, " (changed)\n");
2135 return true;
2137 if (dump_file && (dump_flags & TDF_DETAILS))
2138 fprintf (dump_file, "\n");
2139 return false;
2142 /* Set all definitions in STMT to value number to themselves.
2143 Return true if a value number changed. */
2145 static bool
2146 defs_to_varying (gimple stmt)
2148 bool changed = false;
2149 ssa_op_iter iter;
2150 def_operand_p defp;
2152 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2154 tree def = DEF_FROM_PTR (defp);
2156 VN_INFO (def)->use_processed = true;
2157 changed |= set_ssa_val_to (def, def);
2159 return changed;
2162 static bool expr_has_constants (tree expr);
2163 static tree valueize_expr (tree expr);
2165 /* Visit a copy between LHS and RHS, return true if the value number
2166 changed. */
2168 static bool
2169 visit_copy (tree lhs, tree rhs)
2171 /* Follow chains of copies to their destination. */
2172 while (TREE_CODE (rhs) == SSA_NAME
2173 && SSA_VAL (rhs) != rhs)
2174 rhs = SSA_VAL (rhs);
2176 /* The copy may have a more interesting constant filled expression
2177 (we don't, since we know our RHS is just an SSA name). */
2178 if (TREE_CODE (rhs) == SSA_NAME)
2180 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2181 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2184 return set_ssa_val_to (lhs, rhs);
2187 /* Visit a nary operator RHS, value number it, and return true if the
2188 value number of LHS has changed as a result. */
2190 static bool
2191 visit_nary_op (tree lhs, gimple stmt)
2193 bool changed = false;
2194 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2196 if (result)
2197 changed = set_ssa_val_to (lhs, result);
2198 else
2200 changed = set_ssa_val_to (lhs, lhs);
2201 vn_nary_op_insert_stmt (stmt, lhs);
2204 return changed;
2207 /* Visit a call STMT storing into LHS. Return true if the value number
2208 of the LHS has changed as a result. */
2210 static bool
2211 visit_reference_op_call (tree lhs, gimple stmt)
2213 bool changed = false;
2214 struct vn_reference_s vr1;
2215 tree result;
2216 tree vuse = gimple_vuse (stmt);
2218 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2219 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
2220 vr1.type = gimple_expr_type (stmt);
2221 vr1.set = 0;
2222 vr1.hashcode = vn_reference_compute_hash (&vr1);
2223 result = vn_reference_lookup_1 (&vr1, NULL);
2224 if (result)
2226 changed = set_ssa_val_to (lhs, result);
2227 if (TREE_CODE (result) == SSA_NAME
2228 && VN_INFO (result)->has_constants)
2229 VN_INFO (lhs)->has_constants = true;
2231 else
2233 void **slot;
2234 vn_reference_t vr2;
2235 changed = set_ssa_val_to (lhs, lhs);
2236 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
2237 vr2->vuse = vr1.vuse;
2238 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
2239 vr2->type = vr1.type;
2240 vr2->set = vr1.set;
2241 vr2->hashcode = vr1.hashcode;
2242 vr2->result = lhs;
2243 slot = htab_find_slot_with_hash (current_info->references,
2244 vr2, vr2->hashcode, INSERT);
2245 if (*slot)
2246 free_reference (*slot);
2247 *slot = vr2;
2250 return changed;
2253 /* Visit a load from a reference operator RHS, part of STMT, value number it,
2254 and return true if the value number of the LHS has changed as a result. */
2256 static bool
2257 visit_reference_op_load (tree lhs, tree op, gimple stmt)
2259 bool changed = false;
2260 tree last_vuse;
2261 tree result;
2263 last_vuse = gimple_vuse (stmt);
2264 last_vuse_ptr = &last_vuse;
2265 result = vn_reference_lookup (op, gimple_vuse (stmt),
2266 default_vn_walk_kind, NULL);
2267 last_vuse_ptr = NULL;
2269 /* If we have a VCE, try looking up its operand as it might be stored in
2270 a different type. */
2271 if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
2272 result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
2273 default_vn_walk_kind, NULL);
2275 /* We handle type-punning through unions by value-numbering based
2276 on offset and size of the access. Be prepared to handle a
2277 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
2278 if (result
2279 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
2281 /* We will be setting the value number of lhs to the value number
2282 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2283 So first simplify and lookup this expression to see if it
2284 is already available. */
2285 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
2286 if ((CONVERT_EXPR_P (val)
2287 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
2288 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
2290 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
2291 if ((CONVERT_EXPR_P (tem)
2292 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
2293 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
2294 TREE_TYPE (val), tem)))
2295 val = tem;
2297 result = val;
2298 if (!is_gimple_min_invariant (val)
2299 && TREE_CODE (val) != SSA_NAME)
2300 result = vn_nary_op_lookup (val, NULL);
2301 /* If the expression is not yet available, value-number lhs to
2302 a new SSA_NAME we create. */
2303 if (!result)
2305 result = make_ssa_name (SSA_NAME_VAR (lhs), gimple_build_nop ());
2306 /* Initialize value-number information properly. */
2307 VN_INFO_GET (result)->valnum = result;
2308 VN_INFO (result)->value_id = get_next_value_id ();
2309 VN_INFO (result)->expr = val;
2310 VN_INFO (result)->has_constants = expr_has_constants (val);
2311 VN_INFO (result)->needs_insertion = true;
2312 /* As all "inserted" statements are singleton SCCs, insert
2313 to the valid table. This is strictly needed to
2314 avoid re-generating new value SSA_NAMEs for the same
2315 expression during SCC iteration over and over (the
2316 optimistic table gets cleared after each iteration).
2317 We do not need to insert into the optimistic table, as
2318 lookups there will fall back to the valid table. */
2319 if (current_info == optimistic_info)
2321 current_info = valid_info;
2322 vn_nary_op_insert (val, result);
2323 current_info = optimistic_info;
2325 else
2326 vn_nary_op_insert (val, result);
2327 if (dump_file && (dump_flags & TDF_DETAILS))
2329 fprintf (dump_file, "Inserting name ");
2330 print_generic_expr (dump_file, result, 0);
2331 fprintf (dump_file, " for expression ");
2332 print_generic_expr (dump_file, val, 0);
2333 fprintf (dump_file, "\n");
2338 if (result)
2340 changed = set_ssa_val_to (lhs, result);
2341 if (TREE_CODE (result) == SSA_NAME
2342 && VN_INFO (result)->has_constants)
2344 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2345 VN_INFO (lhs)->has_constants = true;
2348 else
2350 changed = set_ssa_val_to (lhs, lhs);
2351 vn_reference_insert (op, lhs, last_vuse);
2354 return changed;
2358 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2359 and return true if the value number of the LHS has changed as a result. */
2361 static bool
2362 visit_reference_op_store (tree lhs, tree op, gimple stmt)
2364 bool changed = false;
2365 tree result;
2366 bool resultsame = false;
2368 /* First we want to lookup using the *vuses* from the store and see
2369 if there the last store to this location with the same address
2370 had the same value.
2372 The vuses represent the memory state before the store. If the
2373 memory state, address, and value of the store is the same as the
2374 last store to this location, then this store will produce the
2375 same memory state as that store.
2377 In this case the vdef versions for this store are value numbered to those
2378 vuse versions, since they represent the same memory state after
2379 this store.
2381 Otherwise, the vdefs for the store are used when inserting into
2382 the table, since the store generates a new memory state. */
2384 result = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_NOWALK, NULL);
2386 if (result)
2388 if (TREE_CODE (result) == SSA_NAME)
2389 result = SSA_VAL (result);
2390 if (TREE_CODE (op) == SSA_NAME)
2391 op = SSA_VAL (op);
2392 resultsame = expressions_equal_p (result, op);
2395 if (!result || !resultsame)
2397 tree vdef;
2399 if (dump_file && (dump_flags & TDF_DETAILS))
2401 fprintf (dump_file, "No store match\n");
2402 fprintf (dump_file, "Value numbering store ");
2403 print_generic_expr (dump_file, lhs, 0);
2404 fprintf (dump_file, " to ");
2405 print_generic_expr (dump_file, op, 0);
2406 fprintf (dump_file, "\n");
2408 /* Have to set value numbers before insert, since insert is
2409 going to valueize the references in-place. */
2410 if ((vdef = gimple_vdef (stmt)))
2412 VN_INFO (vdef)->use_processed = true;
2413 changed |= set_ssa_val_to (vdef, vdef);
2416 /* Do not insert structure copies into the tables. */
2417 if (is_gimple_min_invariant (op)
2418 || is_gimple_reg (op))
2419 vn_reference_insert (lhs, op, vdef);
2421 else
2423 /* We had a match, so value number the vdef to have the value
2424 number of the vuse it came from. */
2425 tree def, use;
2427 if (dump_file && (dump_flags & TDF_DETAILS))
2428 fprintf (dump_file, "Store matched earlier value,"
2429 "value numbering store vdefs to matching vuses.\n");
2431 def = gimple_vdef (stmt);
2432 use = gimple_vuse (stmt);
2434 VN_INFO (def)->use_processed = true;
2435 changed |= set_ssa_val_to (def, SSA_VAL (use));
2438 return changed;
2441 /* Visit and value number PHI, return true if the value number
2442 changed. */
2444 static bool
2445 visit_phi (gimple phi)
2447 bool changed = false;
2448 tree result;
2449 tree sameval = VN_TOP;
2450 bool allsame = true;
2451 unsigned i;
2453 /* TODO: We could check for this in init_sccvn, and replace this
2454 with a gcc_assert. */
2455 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
2456 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2458 /* See if all non-TOP arguments have the same value. TOP is
2459 equivalent to everything, so we can ignore it. */
2460 for (i = 0; i < gimple_phi_num_args (phi); i++)
2462 tree def = PHI_ARG_DEF (phi, i);
2464 if (TREE_CODE (def) == SSA_NAME)
2465 def = SSA_VAL (def);
2466 if (def == VN_TOP)
2467 continue;
2468 if (sameval == VN_TOP)
2470 sameval = def;
2472 else
2474 if (!expressions_equal_p (def, sameval))
2476 allsame = false;
2477 break;
2482 /* If all value numbered to the same value, the phi node has that
2483 value. */
2484 if (allsame)
2486 if (is_gimple_min_invariant (sameval))
2488 VN_INFO (PHI_RESULT (phi))->has_constants = true;
2489 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2491 else
2493 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2494 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2497 if (TREE_CODE (sameval) == SSA_NAME)
2498 return visit_copy (PHI_RESULT (phi), sameval);
2500 return set_ssa_val_to (PHI_RESULT (phi), sameval);
2503 /* Otherwise, see if it is equivalent to a phi node in this block. */
2504 result = vn_phi_lookup (phi);
2505 if (result)
2507 if (TREE_CODE (result) == SSA_NAME)
2508 changed = visit_copy (PHI_RESULT (phi), result);
2509 else
2510 changed = set_ssa_val_to (PHI_RESULT (phi), result);
2512 else
2514 vn_phi_insert (phi, PHI_RESULT (phi));
2515 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2516 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
2517 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2520 return changed;
2523 /* Return true if EXPR contains constants. */
2525 static bool
2526 expr_has_constants (tree expr)
2528 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2530 case tcc_unary:
2531 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
2533 case tcc_binary:
2534 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
2535 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
2536 /* Constants inside reference ops are rarely interesting, but
2537 it can take a lot of looking to find them. */
2538 case tcc_reference:
2539 case tcc_declaration:
2540 return false;
2541 default:
2542 return is_gimple_min_invariant (expr);
2544 return false;
2547 /* Return true if STMT contains constants. */
2549 static bool
2550 stmt_has_constants (gimple stmt)
2552 if (gimple_code (stmt) != GIMPLE_ASSIGN)
2553 return false;
2555 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2557 case GIMPLE_UNARY_RHS:
2558 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2560 case GIMPLE_BINARY_RHS:
2561 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2562 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
2563 case GIMPLE_TERNARY_RHS:
2564 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2565 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt))
2566 || is_gimple_min_invariant (gimple_assign_rhs3 (stmt)));
2567 case GIMPLE_SINGLE_RHS:
2568 /* Constants inside reference ops are rarely interesting, but
2569 it can take a lot of looking to find them. */
2570 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2571 default:
2572 gcc_unreachable ();
2574 return false;
2577 /* Replace SSA_NAMES in expr with their value numbers, and return the
2578 result.
2579 This is performed in place. */
2581 static tree
2582 valueize_expr (tree expr)
2584 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2586 case tcc_unary:
2587 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2588 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2589 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2590 break;
2591 case tcc_binary:
2592 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2593 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2594 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2595 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
2596 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
2597 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
2598 break;
2599 default:
2600 break;
2602 return expr;
2605 /* Simplify the binary expression RHS, and return the result if
2606 simplified. */
2608 static tree
2609 simplify_binary_expression (gimple stmt)
2611 tree result = NULL_TREE;
2612 tree op0 = gimple_assign_rhs1 (stmt);
2613 tree op1 = gimple_assign_rhs2 (stmt);
2615 /* This will not catch every single case we could combine, but will
2616 catch those with constants. The goal here is to simultaneously
2617 combine constants between expressions, but avoid infinite
2618 expansion of expressions during simplification. */
2619 if (TREE_CODE (op0) == SSA_NAME)
2621 if (VN_INFO (op0)->has_constants
2622 || TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)) == tcc_comparison)
2623 op0 = valueize_expr (vn_get_expr_for (op0));
2624 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
2625 op0 = SSA_VAL (op0);
2628 if (TREE_CODE (op1) == SSA_NAME)
2630 if (VN_INFO (op1)->has_constants)
2631 op1 = valueize_expr (vn_get_expr_for (op1));
2632 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
2633 op1 = SSA_VAL (op1);
2636 /* Avoid folding if nothing changed. */
2637 if (op0 == gimple_assign_rhs1 (stmt)
2638 && op1 == gimple_assign_rhs2 (stmt))
2639 return NULL_TREE;
2641 fold_defer_overflow_warnings ();
2643 result = fold_binary (gimple_assign_rhs_code (stmt),
2644 gimple_expr_type (stmt), op0, op1);
2645 if (result)
2646 STRIP_USELESS_TYPE_CONVERSION (result);
2648 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
2649 stmt, 0);
2651 /* Make sure result is not a complex expression consisting
2652 of operators of operators (IE (a + b) + (a + c))
2653 Otherwise, we will end up with unbounded expressions if
2654 fold does anything at all. */
2655 if (result && valid_gimple_rhs_p (result))
2656 return result;
2658 return NULL_TREE;
2661 /* Simplify the unary expression RHS, and return the result if
2662 simplified. */
2664 static tree
2665 simplify_unary_expression (gimple stmt)
2667 tree result = NULL_TREE;
2668 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
2670 /* We handle some tcc_reference codes here that are all
2671 GIMPLE_ASSIGN_SINGLE codes. */
2672 if (gimple_assign_rhs_code (stmt) == REALPART_EXPR
2673 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2674 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2675 op0 = TREE_OPERAND (op0, 0);
2677 if (TREE_CODE (op0) != SSA_NAME)
2678 return NULL_TREE;
2680 orig_op0 = op0;
2681 if (VN_INFO (op0)->has_constants)
2682 op0 = valueize_expr (vn_get_expr_for (op0));
2683 else if (gimple_assign_cast_p (stmt)
2684 || gimple_assign_rhs_code (stmt) == REALPART_EXPR
2685 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2686 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2688 /* We want to do tree-combining on conversion-like expressions.
2689 Make sure we feed only SSA_NAMEs or constants to fold though. */
2690 tree tem = valueize_expr (vn_get_expr_for (op0));
2691 if (UNARY_CLASS_P (tem)
2692 || BINARY_CLASS_P (tem)
2693 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
2694 || TREE_CODE (tem) == SSA_NAME
2695 || is_gimple_min_invariant (tem))
2696 op0 = tem;
2699 /* Avoid folding if nothing changed, but remember the expression. */
2700 if (op0 == orig_op0)
2701 return NULL_TREE;
2703 result = fold_unary_ignore_overflow (gimple_assign_rhs_code (stmt),
2704 gimple_expr_type (stmt), op0);
2705 if (result)
2707 STRIP_USELESS_TYPE_CONVERSION (result);
2708 if (valid_gimple_rhs_p (result))
2709 return result;
2712 return NULL_TREE;
2715 /* Try to simplify RHS using equivalences and constant folding. */
2717 static tree
2718 try_to_simplify (gimple stmt)
2720 tree tem;
2722 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
2723 in this case, there is no point in doing extra work. */
2724 if (gimple_assign_copy_p (stmt)
2725 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2726 return NULL_TREE;
2728 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2730 case tcc_declaration:
2731 tem = get_symbol_constant_value (gimple_assign_rhs1 (stmt));
2732 if (tem)
2733 return tem;
2734 break;
2736 case tcc_reference:
2737 /* Do not do full-blown reference lookup here, but simplify
2738 reads from constant aggregates. */
2739 tem = fold_const_aggregate_ref (gimple_assign_rhs1 (stmt));
2740 if (tem)
2741 return tem;
2743 /* Fallthrough for some codes that can operate on registers. */
2744 if (!(TREE_CODE (gimple_assign_rhs1 (stmt)) == REALPART_EXPR
2745 || TREE_CODE (gimple_assign_rhs1 (stmt)) == IMAGPART_EXPR
2746 || TREE_CODE (gimple_assign_rhs1 (stmt)) == VIEW_CONVERT_EXPR))
2747 break;
2748 /* We could do a little more with unary ops, if they expand
2749 into binary ops, but it's debatable whether it is worth it. */
2750 case tcc_unary:
2751 return simplify_unary_expression (stmt);
2752 break;
2753 case tcc_comparison:
2754 case tcc_binary:
2755 return simplify_binary_expression (stmt);
2756 break;
2757 default:
2758 break;
2761 return NULL_TREE;
2764 /* Visit and value number USE, return true if the value number
2765 changed. */
2767 static bool
2768 visit_use (tree use)
2770 bool changed = false;
2771 gimple stmt = SSA_NAME_DEF_STMT (use);
2773 VN_INFO (use)->use_processed = true;
2775 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
2776 if (dump_file && (dump_flags & TDF_DETAILS)
2777 && !SSA_NAME_IS_DEFAULT_DEF (use))
2779 fprintf (dump_file, "Value numbering ");
2780 print_generic_expr (dump_file, use, 0);
2781 fprintf (dump_file, " stmt = ");
2782 print_gimple_stmt (dump_file, stmt, 0, 0);
2785 /* Handle uninitialized uses. */
2786 if (SSA_NAME_IS_DEFAULT_DEF (use))
2787 changed = set_ssa_val_to (use, use);
2788 else
2790 if (gimple_code (stmt) == GIMPLE_PHI)
2791 changed = visit_phi (stmt);
2792 else if (!gimple_has_lhs (stmt)
2793 || gimple_has_volatile_ops (stmt)
2794 || stmt_could_throw_p (stmt))
2795 changed = defs_to_varying (stmt);
2796 else if (is_gimple_assign (stmt))
2798 tree lhs = gimple_assign_lhs (stmt);
2799 tree simplified;
2801 /* Shortcut for copies. Simplifying copies is pointless,
2802 since we copy the expression and value they represent. */
2803 if (gimple_assign_copy_p (stmt)
2804 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2805 && TREE_CODE (lhs) == SSA_NAME)
2807 changed = visit_copy (lhs, gimple_assign_rhs1 (stmt));
2808 goto done;
2810 simplified = try_to_simplify (stmt);
2811 if (simplified)
2813 if (dump_file && (dump_flags & TDF_DETAILS))
2815 fprintf (dump_file, "RHS ");
2816 print_gimple_expr (dump_file, stmt, 0, 0);
2817 fprintf (dump_file, " simplified to ");
2818 print_generic_expr (dump_file, simplified, 0);
2819 if (TREE_CODE (lhs) == SSA_NAME)
2820 fprintf (dump_file, " has constants %d\n",
2821 expr_has_constants (simplified));
2822 else
2823 fprintf (dump_file, "\n");
2826 /* Setting value numbers to constants will occasionally
2827 screw up phi congruence because constants are not
2828 uniquely associated with a single ssa name that can be
2829 looked up. */
2830 if (simplified
2831 && is_gimple_min_invariant (simplified)
2832 && TREE_CODE (lhs) == SSA_NAME)
2834 VN_INFO (lhs)->expr = simplified;
2835 VN_INFO (lhs)->has_constants = true;
2836 changed = set_ssa_val_to (lhs, simplified);
2837 goto done;
2839 else if (simplified
2840 && TREE_CODE (simplified) == SSA_NAME
2841 && TREE_CODE (lhs) == SSA_NAME)
2843 changed = visit_copy (lhs, simplified);
2844 goto done;
2846 else if (simplified)
2848 if (TREE_CODE (lhs) == SSA_NAME)
2850 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
2851 /* We have to unshare the expression or else
2852 valuizing may change the IL stream. */
2853 VN_INFO (lhs)->expr = unshare_expr (simplified);
2856 else if (stmt_has_constants (stmt)
2857 && TREE_CODE (lhs) == SSA_NAME)
2858 VN_INFO (lhs)->has_constants = true;
2859 else if (TREE_CODE (lhs) == SSA_NAME)
2861 /* We reset expr and constantness here because we may
2862 have been value numbering optimistically, and
2863 iterating. They may become non-constant in this case,
2864 even if they were optimistically constant. */
2866 VN_INFO (lhs)->has_constants = false;
2867 VN_INFO (lhs)->expr = NULL_TREE;
2870 if ((TREE_CODE (lhs) == SSA_NAME
2871 /* We can substitute SSA_NAMEs that are live over
2872 abnormal edges with their constant value. */
2873 && !(gimple_assign_copy_p (stmt)
2874 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2875 && !(simplified
2876 && is_gimple_min_invariant (simplified))
2877 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2878 /* Stores or copies from SSA_NAMEs that are live over
2879 abnormal edges are a problem. */
2880 || (gimple_assign_single_p (stmt)
2881 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2882 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt))))
2883 changed = defs_to_varying (stmt);
2884 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
2886 changed = visit_reference_op_store (lhs, gimple_assign_rhs1 (stmt), stmt);
2888 else if (TREE_CODE (lhs) == SSA_NAME)
2890 if ((gimple_assign_copy_p (stmt)
2891 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2892 || (simplified
2893 && is_gimple_min_invariant (simplified)))
2895 VN_INFO (lhs)->has_constants = true;
2896 if (simplified)
2897 changed = set_ssa_val_to (lhs, simplified);
2898 else
2899 changed = set_ssa_val_to (lhs, gimple_assign_rhs1 (stmt));
2901 else
2903 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2905 case GIMPLE_UNARY_RHS:
2906 case GIMPLE_BINARY_RHS:
2907 case GIMPLE_TERNARY_RHS:
2908 changed = visit_nary_op (lhs, stmt);
2909 break;
2910 case GIMPLE_SINGLE_RHS:
2911 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2913 case tcc_reference:
2914 /* VOP-less references can go through unary case. */
2915 if ((gimple_assign_rhs_code (stmt) == REALPART_EXPR
2916 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2917 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2918 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (stmt), 0)) == SSA_NAME)
2920 changed = visit_nary_op (lhs, stmt);
2921 break;
2923 /* Fallthrough. */
2924 case tcc_declaration:
2925 changed = visit_reference_op_load
2926 (lhs, gimple_assign_rhs1 (stmt), stmt);
2927 break;
2928 case tcc_expression:
2929 if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
2931 changed = visit_nary_op (lhs, stmt);
2932 break;
2934 /* Fallthrough. */
2935 default:
2936 changed = defs_to_varying (stmt);
2938 break;
2939 default:
2940 changed = defs_to_varying (stmt);
2941 break;
2945 else
2946 changed = defs_to_varying (stmt);
2948 else if (is_gimple_call (stmt))
2950 tree lhs = gimple_call_lhs (stmt);
2952 /* ??? We could try to simplify calls. */
2954 if (stmt_has_constants (stmt)
2955 && TREE_CODE (lhs) == SSA_NAME)
2956 VN_INFO (lhs)->has_constants = true;
2957 else if (TREE_CODE (lhs) == SSA_NAME)
2959 /* We reset expr and constantness here because we may
2960 have been value numbering optimistically, and
2961 iterating. They may become non-constant in this case,
2962 even if they were optimistically constant. */
2963 VN_INFO (lhs)->has_constants = false;
2964 VN_INFO (lhs)->expr = NULL_TREE;
2967 if (TREE_CODE (lhs) == SSA_NAME
2968 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2969 changed = defs_to_varying (stmt);
2970 /* ??? We should handle stores from calls. */
2971 else if (TREE_CODE (lhs) == SSA_NAME)
2973 if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
2974 changed = visit_reference_op_call (lhs, stmt);
2975 else
2976 changed = defs_to_varying (stmt);
2978 else
2979 changed = defs_to_varying (stmt);
2982 done:
2983 return changed;
2986 /* Compare two operands by reverse postorder index */
2988 static int
2989 compare_ops (const void *pa, const void *pb)
2991 const tree opa = *((const tree *)pa);
2992 const tree opb = *((const tree *)pb);
2993 gimple opstmta = SSA_NAME_DEF_STMT (opa);
2994 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
2995 basic_block bba;
2996 basic_block bbb;
2998 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
2999 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3000 else if (gimple_nop_p (opstmta))
3001 return -1;
3002 else if (gimple_nop_p (opstmtb))
3003 return 1;
3005 bba = gimple_bb (opstmta);
3006 bbb = gimple_bb (opstmtb);
3008 if (!bba && !bbb)
3009 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3010 else if (!bba)
3011 return -1;
3012 else if (!bbb)
3013 return 1;
3015 if (bba == bbb)
3017 if (gimple_code (opstmta) == GIMPLE_PHI
3018 && gimple_code (opstmtb) == GIMPLE_PHI)
3019 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3020 else if (gimple_code (opstmta) == GIMPLE_PHI)
3021 return -1;
3022 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3023 return 1;
3024 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3025 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3026 else
3027 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3029 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3032 /* Sort an array containing members of a strongly connected component
3033 SCC so that the members are ordered by RPO number.
3034 This means that when the sort is complete, iterating through the
3035 array will give you the members in RPO order. */
3037 static void
3038 sort_scc (VEC (tree, heap) *scc)
3040 VEC_qsort (tree, scc, compare_ops);
3043 /* Insert the no longer used nary ONARY to the hash INFO. */
3045 static void
3046 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3048 size_t size = sizeof_vn_nary_op (onary->length);
3049 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3050 &info->nary_obstack);
3051 memcpy (nary, onary, size);
3052 vn_nary_op_insert_into (nary, info->nary, false);
3055 /* Insert the no longer used phi OPHI to the hash INFO. */
3057 static void
3058 copy_phi (vn_phi_t ophi, vn_tables_t info)
3060 vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
3061 void **slot;
3062 memcpy (phi, ophi, sizeof (*phi));
3063 ophi->phiargs = NULL;
3064 slot = htab_find_slot_with_hash (info->phis, phi, phi->hashcode, INSERT);
3065 gcc_assert (!*slot);
3066 *slot = phi;
3069 /* Insert the no longer used reference OREF to the hash INFO. */
3071 static void
3072 copy_reference (vn_reference_t oref, vn_tables_t info)
3074 vn_reference_t ref;
3075 void **slot;
3076 ref = (vn_reference_t) pool_alloc (info->references_pool);
3077 memcpy (ref, oref, sizeof (*ref));
3078 oref->operands = NULL;
3079 slot = htab_find_slot_with_hash (info->references, ref, ref->hashcode,
3080 INSERT);
3081 if (*slot)
3082 free_reference (*slot);
3083 *slot = ref;
3086 /* Process a strongly connected component in the SSA graph. */
3088 static void
3089 process_scc (VEC (tree, heap) *scc)
3091 tree var;
3092 unsigned int i;
3093 unsigned int iterations = 0;
3094 bool changed = true;
3095 htab_iterator hi;
3096 vn_nary_op_t nary;
3097 vn_phi_t phi;
3098 vn_reference_t ref;
3100 /* If the SCC has a single member, just visit it. */
3101 if (VEC_length (tree, scc) == 1)
3103 tree use = VEC_index (tree, scc, 0);
3104 if (VN_INFO (use)->use_processed)
3105 return;
3106 /* We need to make sure it doesn't form a cycle itself, which can
3107 happen for self-referential PHI nodes. In that case we would
3108 end up inserting an expression with VN_TOP operands into the
3109 valid table which makes us derive bogus equivalences later.
3110 The cheapest way to check this is to assume it for all PHI nodes. */
3111 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3112 /* Fallthru to iteration. */ ;
3113 else
3115 visit_use (use);
3116 return;
3120 /* Iterate over the SCC with the optimistic table until it stops
3121 changing. */
3122 current_info = optimistic_info;
3123 while (changed)
3125 changed = false;
3126 iterations++;
3127 /* As we are value-numbering optimistically we have to
3128 clear the expression tables and the simplified expressions
3129 in each iteration until we converge. */
3130 htab_empty (optimistic_info->nary);
3131 htab_empty (optimistic_info->phis);
3132 htab_empty (optimistic_info->references);
3133 obstack_free (&optimistic_info->nary_obstack, NULL);
3134 gcc_obstack_init (&optimistic_info->nary_obstack);
3135 empty_alloc_pool (optimistic_info->phis_pool);
3136 empty_alloc_pool (optimistic_info->references_pool);
3137 FOR_EACH_VEC_ELT (tree, scc, i, var)
3138 VN_INFO (var)->expr = NULL_TREE;
3139 FOR_EACH_VEC_ELT (tree, scc, i, var)
3140 changed |= visit_use (var);
3143 statistics_histogram_event (cfun, "SCC iterations", iterations);
3145 /* Finally, copy the contents of the no longer used optimistic
3146 table to the valid table. */
3147 FOR_EACH_HTAB_ELEMENT (optimistic_info->nary, nary, vn_nary_op_t, hi)
3148 copy_nary (nary, valid_info);
3149 FOR_EACH_HTAB_ELEMENT (optimistic_info->phis, phi, vn_phi_t, hi)
3150 copy_phi (phi, valid_info);
3151 FOR_EACH_HTAB_ELEMENT (optimistic_info->references, ref, vn_reference_t, hi)
3152 copy_reference (ref, valid_info);
3154 current_info = valid_info;
3157 DEF_VEC_O(ssa_op_iter);
3158 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
3160 /* Pop the components of the found SCC for NAME off the SCC stack
3161 and process them. Returns true if all went well, false if
3162 we run into resource limits. */
3164 static bool
3165 extract_and_process_scc_for_name (tree name)
3167 VEC (tree, heap) *scc = NULL;
3168 tree x;
3170 /* Found an SCC, pop the components off the SCC stack and
3171 process them. */
3174 x = VEC_pop (tree, sccstack);
3176 VN_INFO (x)->on_sccstack = false;
3177 VEC_safe_push (tree, heap, scc, x);
3178 } while (x != name);
3180 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
3181 if (VEC_length (tree, scc)
3182 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
3184 if (dump_file)
3185 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
3186 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
3187 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
3188 return false;
3191 if (VEC_length (tree, scc) > 1)
3192 sort_scc (scc);
3194 if (dump_file && (dump_flags & TDF_DETAILS))
3195 print_scc (dump_file, scc);
3197 process_scc (scc);
3199 VEC_free (tree, heap, scc);
3201 return true;
3204 /* Depth first search on NAME to discover and process SCC's in the SSA
3205 graph.
3206 Execution of this algorithm relies on the fact that the SCC's are
3207 popped off the stack in topological order.
3208 Returns true if successful, false if we stopped processing SCC's due
3209 to resource constraints. */
3211 static bool
3212 DFS (tree name)
3214 VEC(ssa_op_iter, heap) *itervec = NULL;
3215 VEC(tree, heap) *namevec = NULL;
3216 use_operand_p usep = NULL;
3217 gimple defstmt;
3218 tree use;
3219 ssa_op_iter iter;
3221 start_over:
3222 /* SCC info */
3223 VN_INFO (name)->dfsnum = next_dfs_num++;
3224 VN_INFO (name)->visited = true;
3225 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
3227 VEC_safe_push (tree, heap, sccstack, name);
3228 VN_INFO (name)->on_sccstack = true;
3229 defstmt = SSA_NAME_DEF_STMT (name);
3231 /* Recursively DFS on our operands, looking for SCC's. */
3232 if (!gimple_nop_p (defstmt))
3234 /* Push a new iterator. */
3235 if (gimple_code (defstmt) == GIMPLE_PHI)
3236 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
3237 else
3238 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
3240 else
3241 clear_and_done_ssa_iter (&iter);
3243 while (1)
3245 /* If we are done processing uses of a name, go up the stack
3246 of iterators and process SCCs as we found them. */
3247 if (op_iter_done (&iter))
3249 /* See if we found an SCC. */
3250 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
3251 if (!extract_and_process_scc_for_name (name))
3253 VEC_free (tree, heap, namevec);
3254 VEC_free (ssa_op_iter, heap, itervec);
3255 return false;
3258 /* Check if we are done. */
3259 if (VEC_empty (tree, namevec))
3261 VEC_free (tree, heap, namevec);
3262 VEC_free (ssa_op_iter, heap, itervec);
3263 return true;
3266 /* Restore the last use walker and continue walking there. */
3267 use = name;
3268 name = VEC_pop (tree, namevec);
3269 memcpy (&iter, VEC_last (ssa_op_iter, itervec),
3270 sizeof (ssa_op_iter));
3271 VEC_pop (ssa_op_iter, itervec);
3272 goto continue_walking;
3275 use = USE_FROM_PTR (usep);
3277 /* Since we handle phi nodes, we will sometimes get
3278 invariants in the use expression. */
3279 if (TREE_CODE (use) == SSA_NAME)
3281 if (! (VN_INFO (use)->visited))
3283 /* Recurse by pushing the current use walking state on
3284 the stack and starting over. */
3285 VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
3286 VEC_safe_push(tree, heap, namevec, name);
3287 name = use;
3288 goto start_over;
3290 continue_walking:
3291 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3292 VN_INFO (use)->low);
3294 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3295 && VN_INFO (use)->on_sccstack)
3297 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3298 VN_INFO (name)->low);
3302 usep = op_iter_next_use (&iter);
3306 /* Allocate a value number table. */
3308 static void
3309 allocate_vn_table (vn_tables_t table)
3311 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
3312 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
3313 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
3314 free_reference);
3316 gcc_obstack_init (&table->nary_obstack);
3317 table->phis_pool = create_alloc_pool ("VN phis",
3318 sizeof (struct vn_phi_s),
3319 30);
3320 table->references_pool = create_alloc_pool ("VN references",
3321 sizeof (struct vn_reference_s),
3322 30);
3325 /* Free a value number table. */
3327 static void
3328 free_vn_table (vn_tables_t table)
3330 htab_delete (table->phis);
3331 htab_delete (table->nary);
3332 htab_delete (table->references);
3333 obstack_free (&table->nary_obstack, NULL);
3334 free_alloc_pool (table->phis_pool);
3335 free_alloc_pool (table->references_pool);
3338 static void
3339 init_scc_vn (void)
3341 size_t i;
3342 int j;
3343 int *rpo_numbers_temp;
3345 calculate_dominance_info (CDI_DOMINATORS);
3346 sccstack = NULL;
3347 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
3348 free);
3350 constant_value_ids = BITMAP_ALLOC (NULL);
3352 next_dfs_num = 1;
3353 next_value_id = 1;
3355 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
3356 /* VEC_alloc doesn't actually grow it to the right size, it just
3357 preallocates the space to do so. */
3358 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
3359 gcc_obstack_init (&vn_ssa_aux_obstack);
3361 shared_lookup_phiargs = NULL;
3362 shared_lookup_references = NULL;
3363 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3364 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3365 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
3367 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3368 the i'th block in RPO order is bb. We want to map bb's to RPO
3369 numbers, so we need to rearrange this array. */
3370 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
3371 rpo_numbers[rpo_numbers_temp[j]] = j;
3373 XDELETE (rpo_numbers_temp);
3375 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
3377 /* Create the VN_INFO structures, and initialize value numbers to
3378 TOP. */
3379 for (i = 0; i < num_ssa_names; i++)
3381 tree name = ssa_name (i);
3382 if (name)
3384 VN_INFO_GET (name)->valnum = VN_TOP;
3385 VN_INFO (name)->expr = NULL_TREE;
3386 VN_INFO (name)->value_id = 0;
3390 renumber_gimple_stmt_uids ();
3392 /* Create the valid and optimistic value numbering tables. */
3393 valid_info = XCNEW (struct vn_tables_s);
3394 allocate_vn_table (valid_info);
3395 optimistic_info = XCNEW (struct vn_tables_s);
3396 allocate_vn_table (optimistic_info);
3399 void
3400 free_scc_vn (void)
3402 size_t i;
3404 htab_delete (constant_to_value_id);
3405 BITMAP_FREE (constant_value_ids);
3406 VEC_free (tree, heap, shared_lookup_phiargs);
3407 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
3408 XDELETEVEC (rpo_numbers);
3410 for (i = 0; i < num_ssa_names; i++)
3412 tree name = ssa_name (i);
3413 if (name
3414 && VN_INFO (name)->needs_insertion)
3415 release_ssa_name (name);
3417 obstack_free (&vn_ssa_aux_obstack, NULL);
3418 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
3420 VEC_free (tree, heap, sccstack);
3421 free_vn_table (valid_info);
3422 XDELETE (valid_info);
3423 free_vn_table (optimistic_info);
3424 XDELETE (optimistic_info);
3427 /* Set *ID if we computed something useful in RESULT. */
3429 static void
3430 set_value_id_for_result (tree result, unsigned int *id)
3432 if (result)
3434 if (TREE_CODE (result) == SSA_NAME)
3435 *id = VN_INFO (result)->value_id;
3436 else if (is_gimple_min_invariant (result))
3437 *id = get_or_alloc_constant_value_id (result);
3441 /* Set the value ids in the valid hash tables. */
3443 static void
3444 set_hashtable_value_ids (void)
3446 htab_iterator hi;
3447 vn_nary_op_t vno;
3448 vn_reference_t vr;
3449 vn_phi_t vp;
3451 /* Now set the value ids of the things we had put in the hash
3452 table. */
3454 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
3455 vno, vn_nary_op_t, hi)
3456 set_value_id_for_result (vno->result, &vno->value_id);
3458 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
3459 vp, vn_phi_t, hi)
3460 set_value_id_for_result (vp->result, &vp->value_id);
3462 FOR_EACH_HTAB_ELEMENT (valid_info->references,
3463 vr, vn_reference_t, hi)
3464 set_value_id_for_result (vr->result, &vr->value_id);
3467 /* Do SCCVN. Returns true if it finished, false if we bailed out
3468 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
3469 how we use the alias oracle walking during the VN process. */
3471 bool
3472 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
3474 size_t i;
3475 tree param;
3476 bool changed = true;
3478 default_vn_walk_kind = default_vn_walk_kind_;
3480 init_scc_vn ();
3481 current_info = valid_info;
3483 for (param = DECL_ARGUMENTS (current_function_decl);
3484 param;
3485 param = DECL_CHAIN (param))
3487 if (gimple_default_def (cfun, param) != NULL)
3489 tree def = gimple_default_def (cfun, param);
3490 VN_INFO (def)->valnum = def;
3494 for (i = 1; i < num_ssa_names; ++i)
3496 tree name = ssa_name (i);
3497 if (name
3498 && VN_INFO (name)->visited == false
3499 && !has_zero_uses (name))
3500 if (!DFS (name))
3502 free_scc_vn ();
3503 return false;
3507 /* Initialize the value ids. */
3509 for (i = 1; i < num_ssa_names; ++i)
3511 tree name = ssa_name (i);
3512 vn_ssa_aux_t info;
3513 if (!name)
3514 continue;
3515 info = VN_INFO (name);
3516 if (info->valnum == name
3517 || info->valnum == VN_TOP)
3518 info->value_id = get_next_value_id ();
3519 else if (is_gimple_min_invariant (info->valnum))
3520 info->value_id = get_or_alloc_constant_value_id (info->valnum);
3523 /* Propagate until they stop changing. */
3524 while (changed)
3526 changed = false;
3527 for (i = 1; i < num_ssa_names; ++i)
3529 tree name = ssa_name (i);
3530 vn_ssa_aux_t info;
3531 if (!name)
3532 continue;
3533 info = VN_INFO (name);
3534 if (TREE_CODE (info->valnum) == SSA_NAME
3535 && info->valnum != name
3536 && info->value_id != VN_INFO (info->valnum)->value_id)
3538 changed = true;
3539 info->value_id = VN_INFO (info->valnum)->value_id;
3544 set_hashtable_value_ids ();
3546 if (dump_file && (dump_flags & TDF_DETAILS))
3548 fprintf (dump_file, "Value numbers:\n");
3549 for (i = 0; i < num_ssa_names; i++)
3551 tree name = ssa_name (i);
3552 if (name
3553 && VN_INFO (name)->visited
3554 && SSA_VAL (name) != name)
3556 print_generic_expr (dump_file, name, 0);
3557 fprintf (dump_file, " = ");
3558 print_generic_expr (dump_file, SSA_VAL (name), 0);
3559 fprintf (dump_file, "\n");
3564 return true;
3567 /* Return the maximum value id we have ever seen. */
3569 unsigned int
3570 get_max_value_id (void)
3572 return next_value_id;
3575 /* Return the next unique value id. */
3577 unsigned int
3578 get_next_value_id (void)
3580 return next_value_id++;
3584 /* Compare two expressions E1 and E2 and return true if they are equal. */
3586 bool
3587 expressions_equal_p (tree e1, tree e2)
3589 /* The obvious case. */
3590 if (e1 == e2)
3591 return true;
3593 /* If only one of them is null, they cannot be equal. */
3594 if (!e1 || !e2)
3595 return false;
3597 /* Now perform the actual comparison. */
3598 if (TREE_CODE (e1) == TREE_CODE (e2)
3599 && operand_equal_p (e1, e2, OEP_PURE_SAME))
3600 return true;
3602 return false;
3606 /* Return true if the nary operation NARY may trap. This is a copy
3607 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
3609 bool
3610 vn_nary_may_trap (vn_nary_op_t nary)
3612 tree type;
3613 tree rhs2 = NULL_TREE;
3614 bool honor_nans = false;
3615 bool honor_snans = false;
3616 bool fp_operation = false;
3617 bool honor_trapv = false;
3618 bool handled, ret;
3619 unsigned i;
3621 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
3622 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
3623 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
3625 type = nary->type;
3626 fp_operation = FLOAT_TYPE_P (type);
3627 if (fp_operation)
3629 honor_nans = flag_trapping_math && !flag_finite_math_only;
3630 honor_snans = flag_signaling_nans != 0;
3632 else if (INTEGRAL_TYPE_P (type)
3633 && TYPE_OVERFLOW_TRAPS (type))
3634 honor_trapv = true;
3636 if (nary->length >= 2)
3637 rhs2 = nary->op[1];
3638 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
3639 honor_trapv,
3640 honor_nans, honor_snans, rhs2,
3641 &handled);
3642 if (handled
3643 && ret)
3644 return true;
3646 for (i = 0; i < nary->length; ++i)
3647 if (tree_could_trap_p (nary->op[i]))
3648 return true;
3650 return false;