fix pr/45972
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blob557c393037ad6f54522eb4099a161d1d6b34c2b4
1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "basic-block.h"
28 #include "tree-pretty-print.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "gimple.h"
33 #include "tree-dump.h"
34 #include "timevar.h"
35 #include "fibheap.h"
36 #include "hashtab.h"
37 #include "tree-iterator.h"
38 #include "alloc-pool.h"
39 #include "tree-pass.h"
40 #include "flags.h"
41 #include "bitmap.h"
42 #include "langhooks.h"
43 #include "cfgloop.h"
44 #include "params.h"
45 #include "tree-ssa-propagate.h"
46 #include "tree-ssa-sccvn.h"
48 /* This algorithm is based on the SCC algorithm presented by Keith
49 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
50 (http://citeseer.ist.psu.edu/41805.html). In
51 straight line code, it is equivalent to a regular hash based value
52 numbering that is performed in reverse postorder.
54 For code with cycles, there are two alternatives, both of which
55 require keeping the hashtables separate from the actual list of
56 value numbers for SSA names.
58 1. Iterate value numbering in an RPO walk of the blocks, removing
59 all the entries from the hashtable after each iteration (but
60 keeping the SSA name->value number mapping between iterations).
61 Iterate until it does not change.
63 2. Perform value numbering as part of an SCC walk on the SSA graph,
64 iterating only the cycles in the SSA graph until they do not change
65 (using a separate, optimistic hashtable for value numbering the SCC
66 operands).
68 The second is not just faster in practice (because most SSA graph
69 cycles do not involve all the variables in the graph), it also has
70 some nice properties.
72 One of these nice properties is that when we pop an SCC off the
73 stack, we are guaranteed to have processed all the operands coming from
74 *outside of that SCC*, so we do not need to do anything special to
75 ensure they have value numbers.
77 Another nice property is that the SCC walk is done as part of a DFS
78 of the SSA graph, which makes it easy to perform combining and
79 simplifying operations at the same time.
81 The code below is deliberately written in a way that makes it easy
82 to separate the SCC walk from the other work it does.
84 In order to propagate constants through the code, we track which
85 expressions contain constants, and use those while folding. In
86 theory, we could also track expressions whose value numbers are
87 replaced, in case we end up folding based on expression
88 identities.
90 In order to value number memory, we assign value numbers to vuses.
91 This enables us to note that, for example, stores to the same
92 address of the same value from the same starting memory states are
93 equivalent.
94 TODO:
96 1. We can iterate only the changing portions of the SCC's, but
97 I have not seen an SCC big enough for this to be a win.
98 2. If you differentiate between phi nodes for loops and phi nodes
99 for if-then-else, you can properly consider phi nodes in different
100 blocks for equivalence.
101 3. We could value number vuses in more cases, particularly, whole
102 structure copies.
105 /* The set of hashtables and alloc_pool's for their items. */
107 typedef struct vn_tables_s
109 htab_t nary;
110 htab_t phis;
111 htab_t references;
112 struct obstack nary_obstack;
113 alloc_pool phis_pool;
114 alloc_pool references_pool;
115 } *vn_tables_t;
117 static htab_t constant_to_value_id;
118 static bitmap constant_value_ids;
121 /* Valid hashtables storing information we have proven to be
122 correct. */
124 static vn_tables_t valid_info;
126 /* Optimistic hashtables storing information we are making assumptions about
127 during iterations. */
129 static vn_tables_t optimistic_info;
131 /* Pointer to the set of hashtables that is currently being used.
132 Should always point to either the optimistic_info, or the
133 valid_info. */
135 static vn_tables_t current_info;
138 /* Reverse post order index for each basic block. */
140 static int *rpo_numbers;
142 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
144 /* This represents the top of the VN lattice, which is the universal
145 value. */
147 tree VN_TOP;
149 /* Unique counter for our value ids. */
151 static unsigned int next_value_id;
153 /* Next DFS number and the stack for strongly connected component
154 detection. */
156 static unsigned int next_dfs_num;
157 static VEC (tree, heap) *sccstack;
160 DEF_VEC_P(vn_ssa_aux_t);
161 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
163 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
164 are allocated on an obstack for locality reasons, and to free them
165 without looping over the VEC. */
167 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
168 static struct obstack vn_ssa_aux_obstack;
170 /* Return the value numbering information for a given SSA name. */
172 vn_ssa_aux_t
173 VN_INFO (tree name)
175 vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
176 SSA_NAME_VERSION (name));
177 gcc_checking_assert (res);
178 return res;
181 /* Set the value numbering info for a given SSA name to a given
182 value. */
184 static inline void
185 VN_INFO_SET (tree name, vn_ssa_aux_t value)
187 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
188 SSA_NAME_VERSION (name), value);
191 /* Initialize the value numbering info for a given SSA name.
192 This should be called just once for every SSA name. */
194 vn_ssa_aux_t
195 VN_INFO_GET (tree name)
197 vn_ssa_aux_t newinfo;
199 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
200 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
201 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
202 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
203 SSA_NAME_VERSION (name) + 1);
204 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
205 SSA_NAME_VERSION (name), newinfo);
206 return newinfo;
210 /* Get the representative expression for the SSA_NAME NAME. Returns
211 the representative SSA_NAME if there is no expression associated with it. */
213 tree
214 vn_get_expr_for (tree name)
216 vn_ssa_aux_t vn = VN_INFO (name);
217 gimple def_stmt;
218 tree expr = NULL_TREE;
220 if (vn->valnum == VN_TOP)
221 return name;
223 /* If the value-number is a constant it is the representative
224 expression. */
225 if (TREE_CODE (vn->valnum) != SSA_NAME)
226 return vn->valnum;
228 /* Get to the information of the value of this SSA_NAME. */
229 vn = VN_INFO (vn->valnum);
231 /* If the value-number is a constant it is the representative
232 expression. */
233 if (TREE_CODE (vn->valnum) != SSA_NAME)
234 return vn->valnum;
236 /* Else if we have an expression, return it. */
237 if (vn->expr != NULL_TREE)
238 return vn->expr;
240 /* Otherwise use the defining statement to build the expression. */
241 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
243 /* If the value number is a default-definition or a PHI result
244 use it directly. */
245 if (gimple_nop_p (def_stmt)
246 || gimple_code (def_stmt) == GIMPLE_PHI)
247 return vn->valnum;
249 if (!is_gimple_assign (def_stmt))
250 return vn->valnum;
252 /* FIXME tuples. This is incomplete and likely will miss some
253 simplifications. */
254 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)))
256 case tcc_reference:
257 if ((gimple_assign_rhs_code (def_stmt) == VIEW_CONVERT_EXPR
258 || gimple_assign_rhs_code (def_stmt) == REALPART_EXPR
259 || gimple_assign_rhs_code (def_stmt) == IMAGPART_EXPR)
260 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
261 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
262 gimple_expr_type (def_stmt),
263 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
264 break;
266 case tcc_unary:
267 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
268 gimple_expr_type (def_stmt),
269 gimple_assign_rhs1 (def_stmt));
270 break;
272 case tcc_binary:
273 expr = fold_build2 (gimple_assign_rhs_code (def_stmt),
274 gimple_expr_type (def_stmt),
275 gimple_assign_rhs1 (def_stmt),
276 gimple_assign_rhs2 (def_stmt));
277 break;
279 default:;
281 if (expr == NULL_TREE)
282 return vn->valnum;
284 /* Cache the expression. */
285 vn->expr = expr;
287 return expr;
291 /* Free a phi operation structure VP. */
293 static void
294 free_phi (void *vp)
296 vn_phi_t phi = (vn_phi_t) vp;
297 VEC_free (tree, heap, phi->phiargs);
300 /* Free a reference operation structure VP. */
302 static void
303 free_reference (void *vp)
305 vn_reference_t vr = (vn_reference_t) vp;
306 VEC_free (vn_reference_op_s, heap, vr->operands);
309 /* Hash table equality function for vn_constant_t. */
311 static int
312 vn_constant_eq (const void *p1, const void *p2)
314 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
315 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
317 if (vc1->hashcode != vc2->hashcode)
318 return false;
320 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
323 /* Hash table hash function for vn_constant_t. */
325 static hashval_t
326 vn_constant_hash (const void *p1)
328 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
329 return vc1->hashcode;
332 /* Lookup a value id for CONSTANT and return it. If it does not
333 exist returns 0. */
335 unsigned int
336 get_constant_value_id (tree constant)
338 void **slot;
339 struct vn_constant_s vc;
341 vc.hashcode = vn_hash_constant_with_type (constant);
342 vc.constant = constant;
343 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
344 vc.hashcode, NO_INSERT);
345 if (slot)
346 return ((vn_constant_t)*slot)->value_id;
347 return 0;
350 /* Lookup a value id for CONSTANT, and if it does not exist, create a
351 new one and return it. If it does exist, return it. */
353 unsigned int
354 get_or_alloc_constant_value_id (tree constant)
356 void **slot;
357 struct vn_constant_s vc;
358 vn_constant_t vcp;
360 vc.hashcode = vn_hash_constant_with_type (constant);
361 vc.constant = constant;
362 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
363 vc.hashcode, INSERT);
364 if (*slot)
365 return ((vn_constant_t)*slot)->value_id;
367 vcp = XNEW (struct vn_constant_s);
368 vcp->hashcode = vc.hashcode;
369 vcp->constant = constant;
370 vcp->value_id = get_next_value_id ();
371 *slot = (void *) vcp;
372 bitmap_set_bit (constant_value_ids, vcp->value_id);
373 return vcp->value_id;
376 /* Return true if V is a value id for a constant. */
378 bool
379 value_id_constant_p (unsigned int v)
381 return bitmap_bit_p (constant_value_ids, v);
384 /* Compare two reference operands P1 and P2 for equality. Return true if
385 they are equal, and false otherwise. */
387 static int
388 vn_reference_op_eq (const void *p1, const void *p2)
390 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
391 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
393 return vro1->opcode == vro2->opcode
394 && types_compatible_p (vro1->type, vro2->type)
395 && expressions_equal_p (vro1->op0, vro2->op0)
396 && expressions_equal_p (vro1->op1, vro2->op1)
397 && expressions_equal_p (vro1->op2, vro2->op2);
400 /* Compute the hash for a reference operand VRO1. */
402 static hashval_t
403 vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result)
405 result = iterative_hash_hashval_t (vro1->opcode, result);
406 if (vro1->op0)
407 result = iterative_hash_expr (vro1->op0, result);
408 if (vro1->op1)
409 result = iterative_hash_expr (vro1->op1, result);
410 if (vro1->op2)
411 result = iterative_hash_expr (vro1->op2, result);
412 return result;
415 /* Return the hashcode for a given reference operation P1. */
417 static hashval_t
418 vn_reference_hash (const void *p1)
420 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
421 return vr1->hashcode;
424 /* Compute a hash for the reference operation VR1 and return it. */
426 hashval_t
427 vn_reference_compute_hash (const vn_reference_t vr1)
429 hashval_t result = 0;
430 int i;
431 vn_reference_op_t vro;
432 HOST_WIDE_INT off = -1;
433 bool deref = false;
435 FOR_EACH_VEC_ELT (vn_reference_op_s, vr1->operands, i, vro)
437 if (vro->opcode == MEM_REF)
438 deref = true;
439 else if (vro->opcode != ADDR_EXPR)
440 deref = false;
441 if (vro->off != -1)
443 if (off == -1)
444 off = 0;
445 off += vro->off;
447 else
449 if (off != -1
450 && off != 0)
451 result = iterative_hash_hashval_t (off, result);
452 off = -1;
453 if (deref
454 && vro->opcode == ADDR_EXPR)
456 if (vro->op0)
458 tree op = TREE_OPERAND (vro->op0, 0);
459 result = iterative_hash_hashval_t (TREE_CODE (op), result);
460 result = iterative_hash_expr (op, result);
463 else
464 result = vn_reference_op_compute_hash (vro, result);
467 if (vr1->vuse)
468 result += SSA_NAME_VERSION (vr1->vuse);
470 return result;
473 /* Return true if reference operations P1 and P2 are equivalent. This
474 means they have the same set of operands and vuses. */
477 vn_reference_eq (const void *p1, const void *p2)
479 unsigned i, j;
481 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
482 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
483 if (vr1->hashcode != vr2->hashcode)
484 return false;
486 /* Early out if this is not a hash collision. */
487 if (vr1->hashcode != vr2->hashcode)
488 return false;
490 /* The VOP needs to be the same. */
491 if (vr1->vuse != vr2->vuse)
492 return false;
494 /* If the operands are the same we are done. */
495 if (vr1->operands == vr2->operands)
496 return true;
498 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
499 return false;
501 if (INTEGRAL_TYPE_P (vr1->type)
502 && INTEGRAL_TYPE_P (vr2->type))
504 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
505 return false;
507 else if (INTEGRAL_TYPE_P (vr1->type)
508 && (TYPE_PRECISION (vr1->type)
509 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
510 return false;
511 else if (INTEGRAL_TYPE_P (vr2->type)
512 && (TYPE_PRECISION (vr2->type)
513 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
514 return false;
516 i = 0;
517 j = 0;
520 HOST_WIDE_INT off1 = 0, off2 = 0;
521 vn_reference_op_t vro1, vro2;
522 vn_reference_op_s tem1, tem2;
523 bool deref1 = false, deref2 = false;
524 for (; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro1); i++)
526 if (vro1->opcode == MEM_REF)
527 deref1 = true;
528 if (vro1->off == -1)
529 break;
530 off1 += vro1->off;
532 for (; VEC_iterate (vn_reference_op_s, vr2->operands, j, vro2); j++)
534 if (vro2->opcode == MEM_REF)
535 deref2 = true;
536 if (vro2->off == -1)
537 break;
538 off2 += vro2->off;
540 if (off1 != off2)
541 return false;
542 if (deref1 && vro1->opcode == ADDR_EXPR)
544 memset (&tem1, 0, sizeof (tem1));
545 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
546 tem1.type = TREE_TYPE (tem1.op0);
547 tem1.opcode = TREE_CODE (tem1.op0);
548 vro1 = &tem1;
550 if (deref2 && vro2->opcode == ADDR_EXPR)
552 memset (&tem2, 0, sizeof (tem2));
553 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
554 tem2.type = TREE_TYPE (tem2.op0);
555 tem2.opcode = TREE_CODE (tem2.op0);
556 vro2 = &tem2;
558 if (!vn_reference_op_eq (vro1, vro2))
559 return false;
560 ++j;
561 ++i;
563 while (VEC_length (vn_reference_op_s, vr1->operands) != i
564 || VEC_length (vn_reference_op_s, vr2->operands) != j);
566 return true;
569 /* Copy the operations present in load/store REF into RESULT, a vector of
570 vn_reference_op_s's. */
572 void
573 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
575 if (TREE_CODE (ref) == TARGET_MEM_REF)
577 vn_reference_op_s temp;
579 memset (&temp, 0, sizeof (temp));
580 /* We do not care for spurious type qualifications. */
581 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
582 temp.opcode = TREE_CODE (ref);
583 temp.op0 = TMR_INDEX (ref);
584 temp.op1 = TMR_STEP (ref);
585 temp.op2 = TMR_OFFSET (ref);
586 temp.off = -1;
587 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
589 memset (&temp, 0, sizeof (temp));
590 temp.type = NULL_TREE;
591 temp.opcode = ERROR_MARK;
592 temp.op0 = TMR_INDEX2 (ref);
593 temp.off = -1;
594 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
596 memset (&temp, 0, sizeof (temp));
597 temp.type = NULL_TREE;
598 temp.opcode = TREE_CODE (TMR_BASE (ref));
599 temp.op0 = TMR_BASE (ref);
600 temp.off = -1;
601 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
602 return;
605 /* For non-calls, store the information that makes up the address. */
607 while (ref)
609 vn_reference_op_s temp;
611 memset (&temp, 0, sizeof (temp));
612 /* We do not care for spurious type qualifications. */
613 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
614 temp.opcode = TREE_CODE (ref);
615 temp.off = -1;
617 switch (temp.opcode)
619 case MEM_REF:
620 /* The base address gets its own vn_reference_op_s structure. */
621 temp.op0 = TREE_OPERAND (ref, 1);
622 if (host_integerp (TREE_OPERAND (ref, 1), 0))
623 temp.off = TREE_INT_CST_LOW (TREE_OPERAND (ref, 1));
624 break;
625 case BIT_FIELD_REF:
626 /* Record bits and position. */
627 temp.op0 = TREE_OPERAND (ref, 1);
628 temp.op1 = TREE_OPERAND (ref, 2);
629 break;
630 case COMPONENT_REF:
631 /* The field decl is enough to unambiguously specify the field,
632 a matching type is not necessary and a mismatching type
633 is always a spurious difference. */
634 temp.type = NULL_TREE;
635 temp.op0 = TREE_OPERAND (ref, 1);
636 temp.op1 = TREE_OPERAND (ref, 2);
638 tree this_offset = component_ref_field_offset (ref);
639 if (this_offset
640 && TREE_CODE (this_offset) == INTEGER_CST)
642 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
643 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
645 double_int off
646 = double_int_add (tree_to_double_int (this_offset),
647 double_int_sdiv
648 (tree_to_double_int (bit_offset),
649 uhwi_to_double_int (BITS_PER_UNIT),
650 TRUNC_DIV_EXPR));
651 if (double_int_fits_in_shwi_p (off))
652 temp.off = off.low;
656 break;
657 case ARRAY_RANGE_REF:
658 case ARRAY_REF:
659 /* Record index as operand. */
660 temp.op0 = TREE_OPERAND (ref, 1);
661 /* Always record lower bounds and element size. */
662 temp.op1 = array_ref_low_bound (ref);
663 temp.op2 = array_ref_element_size (ref);
664 if (TREE_CODE (temp.op0) == INTEGER_CST
665 && TREE_CODE (temp.op1) == INTEGER_CST
666 && TREE_CODE (temp.op2) == INTEGER_CST)
668 double_int off = tree_to_double_int (temp.op0);
669 off = double_int_add (off,
670 double_int_neg
671 (tree_to_double_int (temp.op1)));
672 off = double_int_mul (off, tree_to_double_int (temp.op2));
673 if (double_int_fits_in_shwi_p (off))
674 temp.off = off.low;
676 break;
677 case STRING_CST:
678 case INTEGER_CST:
679 case COMPLEX_CST:
680 case VECTOR_CST:
681 case REAL_CST:
682 case CONSTRUCTOR:
683 case VAR_DECL:
684 case PARM_DECL:
685 case CONST_DECL:
686 case RESULT_DECL:
687 case SSA_NAME:
688 temp.op0 = ref;
689 break;
690 case ADDR_EXPR:
691 if (is_gimple_min_invariant (ref))
693 temp.op0 = ref;
694 break;
696 /* Fallthrough. */
697 /* These are only interesting for their operands, their
698 existence, and their type. They will never be the last
699 ref in the chain of references (IE they require an
700 operand), so we don't have to put anything
701 for op* as it will be handled by the iteration */
702 case REALPART_EXPR:
703 case VIEW_CONVERT_EXPR:
704 temp.off = 0;
705 break;
706 case IMAGPART_EXPR:
707 /* This is only interesting for its constant offset. */
708 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
709 break;
710 default:
711 gcc_unreachable ();
713 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
715 if (REFERENCE_CLASS_P (ref)
716 || (TREE_CODE (ref) == ADDR_EXPR
717 && !is_gimple_min_invariant (ref)))
718 ref = TREE_OPERAND (ref, 0);
719 else
720 ref = NULL_TREE;
724 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
725 operands in *OPS, the reference alias set SET and the reference type TYPE.
726 Return true if something useful was produced. */
728 bool
729 ao_ref_init_from_vn_reference (ao_ref *ref,
730 alias_set_type set, tree type,
731 VEC (vn_reference_op_s, heap) *ops)
733 vn_reference_op_t op;
734 unsigned i;
735 tree base = NULL_TREE;
736 tree *op0_p = &base;
737 HOST_WIDE_INT offset = 0;
738 HOST_WIDE_INT max_size;
739 HOST_WIDE_INT size = -1;
740 tree size_tree = NULL_TREE;
741 alias_set_type base_alias_set = -1;
743 /* First get the final access size from just the outermost expression. */
744 op = VEC_index (vn_reference_op_s, ops, 0);
745 if (op->opcode == COMPONENT_REF)
746 size_tree = DECL_SIZE (op->op0);
747 else if (op->opcode == BIT_FIELD_REF)
748 size_tree = op->op0;
749 else
751 enum machine_mode mode = TYPE_MODE (type);
752 if (mode == BLKmode)
753 size_tree = TYPE_SIZE (type);
754 else
755 size = GET_MODE_BITSIZE (mode);
757 if (size_tree != NULL_TREE)
759 if (!host_integerp (size_tree, 1))
760 size = -1;
761 else
762 size = TREE_INT_CST_LOW (size_tree);
765 /* Initially, maxsize is the same as the accessed element size.
766 In the following it will only grow (or become -1). */
767 max_size = size;
769 /* Compute cumulative bit-offset for nested component-refs and array-refs,
770 and find the ultimate containing object. */
771 FOR_EACH_VEC_ELT (vn_reference_op_s, ops, i, op)
773 switch (op->opcode)
775 /* These may be in the reference ops, but we cannot do anything
776 sensible with them here. */
777 case ADDR_EXPR:
778 /* Apart from ADDR_EXPR arguments to MEM_REF. */
779 if (base != NULL_TREE
780 && TREE_CODE (base) == MEM_REF
781 && op->op0
782 && DECL_P (TREE_OPERAND (op->op0, 0)))
784 vn_reference_op_t pop = VEC_index (vn_reference_op_s, ops, i-1);
785 base = TREE_OPERAND (op->op0, 0);
786 if (pop->off == -1)
788 max_size = -1;
789 offset = 0;
791 else
792 offset += pop->off * BITS_PER_UNIT;
793 op0_p = NULL;
794 break;
796 /* Fallthru. */
797 case CALL_EXPR:
798 return false;
800 /* Record the base objects. */
801 case MEM_REF:
802 base_alias_set = get_deref_alias_set (op->op0);
803 *op0_p = build2 (MEM_REF, op->type,
804 NULL_TREE, op->op0);
805 op0_p = &TREE_OPERAND (*op0_p, 0);
806 break;
808 case VAR_DECL:
809 case PARM_DECL:
810 case RESULT_DECL:
811 case SSA_NAME:
812 *op0_p = op->op0;
813 op0_p = NULL;
814 break;
816 /* And now the usual component-reference style ops. */
817 case BIT_FIELD_REF:
818 offset += tree_low_cst (op->op1, 0);
819 break;
821 case COMPONENT_REF:
823 tree field = op->op0;
824 /* We do not have a complete COMPONENT_REF tree here so we
825 cannot use component_ref_field_offset. Do the interesting
826 parts manually. */
828 if (op->op1
829 || !host_integerp (DECL_FIELD_OFFSET (field), 1))
830 max_size = -1;
831 else
833 offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
834 * BITS_PER_UNIT);
835 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
837 break;
840 case ARRAY_RANGE_REF:
841 case ARRAY_REF:
842 /* We recorded the lower bound and the element size. */
843 if (!host_integerp (op->op0, 0)
844 || !host_integerp (op->op1, 0)
845 || !host_integerp (op->op2, 0))
846 max_size = -1;
847 else
849 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
850 hindex -= TREE_INT_CST_LOW (op->op1);
851 hindex *= TREE_INT_CST_LOW (op->op2);
852 hindex *= BITS_PER_UNIT;
853 offset += hindex;
855 break;
857 case REALPART_EXPR:
858 break;
860 case IMAGPART_EXPR:
861 offset += size;
862 break;
864 case VIEW_CONVERT_EXPR:
865 break;
867 case STRING_CST:
868 case INTEGER_CST:
869 case COMPLEX_CST:
870 case VECTOR_CST:
871 case REAL_CST:
872 case CONSTRUCTOR:
873 case CONST_DECL:
874 return false;
876 default:
877 return false;
881 if (base == NULL_TREE)
882 return false;
884 ref->ref = NULL_TREE;
885 ref->base = base;
886 ref->offset = offset;
887 ref->size = size;
888 ref->max_size = max_size;
889 ref->ref_alias_set = set;
890 if (base_alias_set != -1)
891 ref->base_alias_set = base_alias_set;
892 else
893 ref->base_alias_set = get_alias_set (base);
895 return true;
898 /* Copy the operations present in load/store/call REF into RESULT, a vector of
899 vn_reference_op_s's. */
901 void
902 copy_reference_ops_from_call (gimple call,
903 VEC(vn_reference_op_s, heap) **result)
905 vn_reference_op_s temp;
906 unsigned i;
908 /* Copy the type, opcode, function being called and static chain. */
909 memset (&temp, 0, sizeof (temp));
910 temp.type = gimple_call_return_type (call);
911 temp.opcode = CALL_EXPR;
912 temp.op0 = gimple_call_fn (call);
913 temp.op1 = gimple_call_chain (call);
914 temp.off = -1;
915 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
917 /* Copy the call arguments. As they can be references as well,
918 just chain them together. */
919 for (i = 0; i < gimple_call_num_args (call); ++i)
921 tree callarg = gimple_call_arg (call, i);
922 copy_reference_ops_from_ref (callarg, result);
926 /* Create a vector of vn_reference_op_s structures from REF, a
927 REFERENCE_CLASS_P tree. The vector is not shared. */
929 static VEC(vn_reference_op_s, heap) *
930 create_reference_ops_from_ref (tree ref)
932 VEC (vn_reference_op_s, heap) *result = NULL;
934 copy_reference_ops_from_ref (ref, &result);
935 return result;
938 /* Create a vector of vn_reference_op_s structures from CALL, a
939 call statement. The vector is not shared. */
941 static VEC(vn_reference_op_s, heap) *
942 create_reference_ops_from_call (gimple call)
944 VEC (vn_reference_op_s, heap) *result = NULL;
946 copy_reference_ops_from_call (call, &result);
947 return result;
950 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
951 *I_P to point to the last element of the replacement. */
952 void
953 vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
954 unsigned int *i_p)
956 unsigned int i = *i_p;
957 vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
958 vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
959 tree addr_base;
960 HOST_WIDE_INT addr_offset;
962 /* The only thing we have to do is from &OBJ.foo.bar add the offset
963 from .foo.bar to the preceeding MEM_REF offset and replace the
964 address with &OBJ. */
965 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
966 &addr_offset);
967 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
968 if (addr_base != op->op0)
970 double_int off = tree_to_double_int (mem_op->op0);
971 off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
972 off = double_int_add (off, shwi_to_double_int (addr_offset));
973 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
974 op->op0 = build_fold_addr_expr (addr_base);
975 if (host_integerp (mem_op->op0, 0))
976 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
977 else
978 mem_op->off = -1;
982 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
983 *I_P to point to the last element of the replacement. */
984 static void
985 vn_reference_maybe_forwprop_address (VEC (vn_reference_op_s, heap) **ops,
986 unsigned int *i_p)
988 unsigned int i = *i_p;
989 vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
990 vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
991 gimple def_stmt;
992 enum tree_code code;
993 double_int off;
995 def_stmt = SSA_NAME_DEF_STMT (op->op0);
996 if (!is_gimple_assign (def_stmt))
997 return;
999 code = gimple_assign_rhs_code (def_stmt);
1000 if (code != ADDR_EXPR
1001 && code != POINTER_PLUS_EXPR)
1002 return;
1004 off = tree_to_double_int (mem_op->op0);
1005 off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1007 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1008 from .foo.bar to the preceeding MEM_REF offset and replace the
1009 address with &OBJ. */
1010 if (code == ADDR_EXPR)
1012 tree addr, addr_base;
1013 HOST_WIDE_INT addr_offset;
1015 addr = gimple_assign_rhs1 (def_stmt);
1016 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1017 &addr_offset);
1018 if (!addr_base
1019 || TREE_CODE (addr_base) != MEM_REF)
1020 return;
1022 off = double_int_add (off, shwi_to_double_int (addr_offset));
1023 off = double_int_add (off, mem_ref_offset (addr_base));
1024 op->op0 = TREE_OPERAND (addr_base, 0);
1026 else
1028 tree ptr, ptroff;
1029 ptr = gimple_assign_rhs1 (def_stmt);
1030 ptroff = gimple_assign_rhs2 (def_stmt);
1031 if (TREE_CODE (ptr) != SSA_NAME
1032 || TREE_CODE (ptroff) != INTEGER_CST)
1033 return;
1035 off = double_int_add (off, tree_to_double_int (ptroff));
1036 op->op0 = ptr;
1039 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1040 if (host_integerp (mem_op->op0, 0))
1041 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1042 else
1043 mem_op->off = -1;
1044 if (TREE_CODE (op->op0) == SSA_NAME)
1045 op->op0 = SSA_VAL (op->op0);
1046 if (TREE_CODE (op->op0) != SSA_NAME)
1047 op->opcode = TREE_CODE (op->op0);
1049 /* And recurse. */
1050 if (TREE_CODE (op->op0) == SSA_NAME)
1051 vn_reference_maybe_forwprop_address (ops, i_p);
1052 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1053 vn_reference_fold_indirect (ops, i_p);
1056 /* Optimize the reference REF to a constant if possible or return
1057 NULL_TREE if not. */
1059 tree
1060 fully_constant_vn_reference_p (vn_reference_t ref)
1062 VEC (vn_reference_op_s, heap) *operands = ref->operands;
1063 vn_reference_op_t op;
1065 /* Try to simplify the translated expression if it is
1066 a call to a builtin function with at most two arguments. */
1067 op = VEC_index (vn_reference_op_s, operands, 0);
1068 if (op->opcode == CALL_EXPR
1069 && TREE_CODE (op->op0) == ADDR_EXPR
1070 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1071 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1072 && VEC_length (vn_reference_op_s, operands) >= 2
1073 && VEC_length (vn_reference_op_s, operands) <= 3)
1075 vn_reference_op_t arg0, arg1 = NULL;
1076 bool anyconst = false;
1077 arg0 = VEC_index (vn_reference_op_s, operands, 1);
1078 if (VEC_length (vn_reference_op_s, operands) > 2)
1079 arg1 = VEC_index (vn_reference_op_s, operands, 2);
1080 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1081 || (arg0->opcode == ADDR_EXPR
1082 && is_gimple_min_invariant (arg0->op0)))
1083 anyconst = true;
1084 if (arg1
1085 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1086 || (arg1->opcode == ADDR_EXPR
1087 && is_gimple_min_invariant (arg1->op0))))
1088 anyconst = true;
1089 if (anyconst)
1091 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1092 arg1 ? 2 : 1,
1093 arg0->op0,
1094 arg1 ? arg1->op0 : NULL);
1095 if (folded
1096 && TREE_CODE (folded) == NOP_EXPR)
1097 folded = TREE_OPERAND (folded, 0);
1098 if (folded
1099 && is_gimple_min_invariant (folded))
1100 return folded;
1104 /* Simplify reads from constant strings. */
1105 else if (op->opcode == ARRAY_REF
1106 && TREE_CODE (op->op0) == INTEGER_CST
1107 && integer_zerop (op->op1)
1108 && VEC_length (vn_reference_op_s, operands) == 2)
1110 vn_reference_op_t arg0;
1111 arg0 = VEC_index (vn_reference_op_s, operands, 1);
1112 if (arg0->opcode == STRING_CST
1113 && (TYPE_MODE (op->type)
1114 == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
1115 && GET_MODE_CLASS (TYPE_MODE (op->type)) == MODE_INT
1116 && GET_MODE_SIZE (TYPE_MODE (op->type)) == 1
1117 && compare_tree_int (op->op0, TREE_STRING_LENGTH (arg0->op0)) < 0)
1118 return build_int_cst_type (op->type,
1119 (TREE_STRING_POINTER (arg0->op0)
1120 [TREE_INT_CST_LOW (op->op0)]));
1123 return NULL_TREE;
1126 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1127 structures into their value numbers. This is done in-place, and
1128 the vector passed in is returned. */
1130 static VEC (vn_reference_op_s, heap) *
1131 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
1133 vn_reference_op_t vro;
1134 unsigned int i;
1136 FOR_EACH_VEC_ELT (vn_reference_op_s, orig, i, vro)
1138 if (vro->opcode == SSA_NAME
1139 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1141 vro->op0 = SSA_VAL (vro->op0);
1142 /* If it transforms from an SSA_NAME to a constant, update
1143 the opcode. */
1144 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1145 vro->opcode = TREE_CODE (vro->op0);
1147 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1148 vro->op1 = SSA_VAL (vro->op1);
1149 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1150 vro->op2 = SSA_VAL (vro->op2);
1151 /* If it transforms from an SSA_NAME to an address, fold with
1152 a preceding indirect reference. */
1153 if (i > 0
1154 && vro->op0
1155 && TREE_CODE (vro->op0) == ADDR_EXPR
1156 && VEC_index (vn_reference_op_s,
1157 orig, i - 1)->opcode == MEM_REF)
1158 vn_reference_fold_indirect (&orig, &i);
1159 else if (i > 0
1160 && vro->opcode == SSA_NAME
1161 && VEC_index (vn_reference_op_s,
1162 orig, i - 1)->opcode == MEM_REF)
1163 vn_reference_maybe_forwprop_address (&orig, &i);
1164 /* If it transforms a non-constant ARRAY_REF into a constant
1165 one, adjust the constant offset. */
1166 else if (vro->opcode == ARRAY_REF
1167 && vro->off == -1
1168 && TREE_CODE (vro->op0) == INTEGER_CST
1169 && TREE_CODE (vro->op1) == INTEGER_CST
1170 && TREE_CODE (vro->op2) == INTEGER_CST)
1172 double_int off = tree_to_double_int (vro->op0);
1173 off = double_int_add (off,
1174 double_int_neg
1175 (tree_to_double_int (vro->op1)));
1176 off = double_int_mul (off, tree_to_double_int (vro->op2));
1177 if (double_int_fits_in_shwi_p (off))
1178 vro->off = off.low;
1182 return orig;
1185 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
1187 /* Create a vector of vn_reference_op_s structures from REF, a
1188 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1189 this function. */
1191 static VEC(vn_reference_op_s, heap) *
1192 valueize_shared_reference_ops_from_ref (tree ref)
1194 if (!ref)
1195 return NULL;
1196 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1197 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1198 shared_lookup_references = valueize_refs (shared_lookup_references);
1199 return shared_lookup_references;
1202 /* Create a vector of vn_reference_op_s structures from CALL, a
1203 call statement. The vector is shared among all callers of
1204 this function. */
1206 static VEC(vn_reference_op_s, heap) *
1207 valueize_shared_reference_ops_from_call (gimple call)
1209 if (!call)
1210 return NULL;
1211 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1212 copy_reference_ops_from_call (call, &shared_lookup_references);
1213 shared_lookup_references = valueize_refs (shared_lookup_references);
1214 return shared_lookup_references;
1217 /* Lookup a SCCVN reference operation VR in the current hash table.
1218 Returns the resulting value number if it exists in the hash table,
1219 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1220 vn_reference_t stored in the hashtable if something is found. */
1222 static tree
1223 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1225 void **slot;
1226 hashval_t hash;
1228 hash = vr->hashcode;
1229 slot = htab_find_slot_with_hash (current_info->references, vr,
1230 hash, NO_INSERT);
1231 if (!slot && current_info == optimistic_info)
1232 slot = htab_find_slot_with_hash (valid_info->references, vr,
1233 hash, NO_INSERT);
1234 if (slot)
1236 if (vnresult)
1237 *vnresult = (vn_reference_t)*slot;
1238 return ((vn_reference_t)*slot)->result;
1241 return NULL_TREE;
1244 static tree *last_vuse_ptr;
1246 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1247 with the current VUSE and performs the expression lookup. */
1249 static void *
1250 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
1252 vn_reference_t vr = (vn_reference_t)vr_;
1253 void **slot;
1254 hashval_t hash;
1256 if (last_vuse_ptr)
1257 *last_vuse_ptr = vuse;
1259 /* Fixup vuse and hash. */
1260 if (vr->vuse)
1261 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1262 vr->vuse = SSA_VAL (vuse);
1263 if (vr->vuse)
1264 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1266 hash = vr->hashcode;
1267 slot = htab_find_slot_with_hash (current_info->references, vr,
1268 hash, NO_INSERT);
1269 if (!slot && current_info == optimistic_info)
1270 slot = htab_find_slot_with_hash (valid_info->references, vr,
1271 hash, NO_INSERT);
1272 if (slot)
1273 return *slot;
1275 return NULL;
1278 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1279 from the statement defining VUSE and if not successful tries to
1280 translate *REFP and VR_ through an aggregate copy at the defintion
1281 of VUSE. */
1283 static void *
1284 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
1286 vn_reference_t vr = (vn_reference_t)vr_;
1287 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1288 tree fndecl;
1289 tree base;
1290 HOST_WIDE_INT offset, maxsize;
1292 /* First try to disambiguate after value-replacing in the definitions LHS. */
1293 if (is_gimple_assign (def_stmt))
1295 tree lhs = gimple_assign_lhs (def_stmt);
1296 ao_ref ref1;
1297 VEC (vn_reference_op_s, heap) *operands = NULL;
1298 bool res = true;
1299 copy_reference_ops_from_ref (lhs, &operands);
1300 operands = valueize_refs (operands);
1301 if (ao_ref_init_from_vn_reference (&ref1, get_alias_set (lhs),
1302 TREE_TYPE (lhs), operands))
1303 res = refs_may_alias_p_1 (ref, &ref1, true);
1304 VEC_free (vn_reference_op_s, heap, operands);
1305 if (!res)
1306 return NULL;
1309 base = ao_ref_base (ref);
1310 offset = ref->offset;
1311 maxsize = ref->max_size;
1313 /* If we cannot constrain the size of the reference we cannot
1314 test if anything kills it. */
1315 if (maxsize == -1)
1316 return (void *)-1;
1318 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1319 from that defintion.
1320 1) Memset. */
1321 if (is_gimple_reg_type (vr->type)
1322 && is_gimple_call (def_stmt)
1323 && (fndecl = gimple_call_fndecl (def_stmt))
1324 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1325 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
1326 && integer_zerop (gimple_call_arg (def_stmt, 1))
1327 && host_integerp (gimple_call_arg (def_stmt, 2), 1)
1328 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1330 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1331 tree base2;
1332 HOST_WIDE_INT offset2, size2, maxsize2;
1333 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1334 size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
1335 if ((unsigned HOST_WIDE_INT)size2 / 8
1336 == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
1337 && operand_equal_p (base, base2, 0)
1338 && offset2 <= offset
1339 && offset2 + size2 >= offset + maxsize)
1341 tree val = fold_convert (vr->type, integer_zero_node);
1342 unsigned int value_id = get_or_alloc_constant_value_id (val);
1343 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1344 VEC_copy (vn_reference_op_s,
1345 heap, vr->operands),
1346 val, value_id);
1350 /* 2) Assignment from an empty CONSTRUCTOR. */
1351 else if (is_gimple_reg_type (vr->type)
1352 && gimple_assign_single_p (def_stmt)
1353 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1354 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1356 tree base2;
1357 HOST_WIDE_INT offset2, size2, maxsize2;
1358 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1359 &offset2, &size2, &maxsize2);
1360 if (operand_equal_p (base, base2, 0)
1361 && offset2 <= offset
1362 && offset2 + size2 >= offset + maxsize)
1364 tree val = fold_convert (vr->type, integer_zero_node);
1365 unsigned int value_id = get_or_alloc_constant_value_id (val);
1366 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1367 VEC_copy (vn_reference_op_s,
1368 heap, vr->operands),
1369 val, value_id);
1373 /* For aggregate copies translate the reference through them if
1374 the copy kills ref. */
1375 else if (gimple_assign_single_p (def_stmt)
1376 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1377 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1378 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1380 tree base2;
1381 HOST_WIDE_INT offset2, size2, maxsize2;
1382 int i, j;
1383 VEC (vn_reference_op_s, heap) *lhs = NULL, *rhs = NULL;
1384 vn_reference_op_t vro;
1385 ao_ref r;
1387 /* See if the assignment kills REF. */
1388 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1389 &offset2, &size2, &maxsize2);
1390 if (!operand_equal_p (base, base2, 0)
1391 || offset2 > offset
1392 || offset2 + size2 < offset + maxsize)
1393 return (void *)-1;
1395 /* Find the common base of ref and the lhs. */
1396 copy_reference_ops_from_ref (gimple_assign_lhs (def_stmt), &lhs);
1397 i = VEC_length (vn_reference_op_s, vr->operands) - 1;
1398 j = VEC_length (vn_reference_op_s, lhs) - 1;
1399 while (j >= 0 && i >= 0
1400 && vn_reference_op_eq (VEC_index (vn_reference_op_s,
1401 vr->operands, i),
1402 VEC_index (vn_reference_op_s, lhs, j)))
1404 i--;
1405 j--;
1408 VEC_free (vn_reference_op_s, heap, lhs);
1409 /* i now points to the first additional op.
1410 ??? LHS may not be completely contained in VR, one or more
1411 VIEW_CONVERT_EXPRs could be in its way. We could at least
1412 try handling outermost VIEW_CONVERT_EXPRs. */
1413 if (j != -1)
1414 return (void *)-1;
1416 /* Now re-write REF to be based on the rhs of the assignment. */
1417 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1418 /* We need to pre-pend vr->operands[0..i] to rhs. */
1419 if (i + 1 + VEC_length (vn_reference_op_s, rhs)
1420 > VEC_length (vn_reference_op_s, vr->operands))
1422 VEC (vn_reference_op_s, heap) *old = vr->operands;
1423 VEC_safe_grow (vn_reference_op_s, heap, vr->operands,
1424 i + 1 + VEC_length (vn_reference_op_s, rhs));
1425 if (old == shared_lookup_references
1426 && vr->operands != old)
1427 shared_lookup_references = NULL;
1429 else
1430 VEC_truncate (vn_reference_op_s, vr->operands,
1431 i + 1 + VEC_length (vn_reference_op_s, rhs));
1432 FOR_EACH_VEC_ELT (vn_reference_op_s, rhs, j, vro)
1433 VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, vro);
1434 VEC_free (vn_reference_op_s, heap, rhs);
1435 vr->hashcode = vn_reference_compute_hash (vr);
1437 /* Adjust *ref from the new operands. */
1438 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1439 return (void *)-1;
1440 /* This can happen with bitfields. */
1441 if (ref->size != r.size)
1442 return (void *)-1;
1443 *ref = r;
1445 /* Do not update last seen VUSE after translating. */
1446 last_vuse_ptr = NULL;
1448 /* Keep looking for the adjusted *REF / VR pair. */
1449 return NULL;
1452 /* Bail out and stop walking. */
1453 return (void *)-1;
1456 /* Lookup a reference operation by it's parts, in the current hash table.
1457 Returns the resulting value number if it exists in the hash table,
1458 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1459 vn_reference_t stored in the hashtable if something is found. */
1461 tree
1462 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
1463 VEC (vn_reference_op_s, heap) *operands,
1464 vn_reference_t *vnresult, bool maywalk)
1466 struct vn_reference_s vr1;
1467 vn_reference_t tmp;
1468 tree cst;
1470 if (!vnresult)
1471 vnresult = &tmp;
1472 *vnresult = NULL;
1474 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1475 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1476 VEC_safe_grow (vn_reference_op_s, heap, shared_lookup_references,
1477 VEC_length (vn_reference_op_s, operands));
1478 memcpy (VEC_address (vn_reference_op_s, shared_lookup_references),
1479 VEC_address (vn_reference_op_s, operands),
1480 sizeof (vn_reference_op_s)
1481 * VEC_length (vn_reference_op_s, operands));
1482 vr1.operands = operands = shared_lookup_references
1483 = valueize_refs (shared_lookup_references);
1484 vr1.type = type;
1485 vr1.set = set;
1486 vr1.hashcode = vn_reference_compute_hash (&vr1);
1487 if ((cst = fully_constant_vn_reference_p (&vr1)))
1488 return cst;
1490 vn_reference_lookup_1 (&vr1, vnresult);
1491 if (!*vnresult
1492 && maywalk
1493 && vr1.vuse)
1495 ao_ref r;
1496 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
1497 *vnresult =
1498 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1499 vn_reference_lookup_2,
1500 vn_reference_lookup_3, &vr1);
1501 if (vr1.operands != operands)
1502 VEC_free (vn_reference_op_s, heap, vr1.operands);
1505 if (*vnresult)
1506 return (*vnresult)->result;
1508 return NULL_TREE;
1511 /* Lookup OP in the current hash table, and return the resulting value
1512 number if it exists in the hash table. Return NULL_TREE if it does
1513 not exist in the hash table or if the result field of the structure
1514 was NULL.. VNRESULT will be filled in with the vn_reference_t
1515 stored in the hashtable if one exists. */
1517 tree
1518 vn_reference_lookup (tree op, tree vuse, bool maywalk,
1519 vn_reference_t *vnresult)
1521 VEC (vn_reference_op_s, heap) *operands;
1522 struct vn_reference_s vr1;
1523 tree cst;
1525 if (vnresult)
1526 *vnresult = NULL;
1528 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1529 vr1.operands = operands = valueize_shared_reference_ops_from_ref (op);
1530 vr1.type = TREE_TYPE (op);
1531 vr1.set = get_alias_set (op);
1532 vr1.hashcode = vn_reference_compute_hash (&vr1);
1533 if ((cst = fully_constant_vn_reference_p (&vr1)))
1534 return cst;
1536 if (maywalk
1537 && vr1.vuse)
1539 vn_reference_t wvnresult;
1540 ao_ref r;
1541 ao_ref_init (&r, op);
1542 wvnresult =
1543 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1544 vn_reference_lookup_2,
1545 vn_reference_lookup_3, &vr1);
1546 if (vr1.operands != operands)
1547 VEC_free (vn_reference_op_s, heap, vr1.operands);
1548 if (wvnresult)
1550 if (vnresult)
1551 *vnresult = wvnresult;
1552 return wvnresult->result;
1555 return NULL_TREE;
1558 return vn_reference_lookup_1 (&vr1, vnresult);
1562 /* Insert OP into the current hash table with a value number of
1563 RESULT, and return the resulting reference structure we created. */
1565 vn_reference_t
1566 vn_reference_insert (tree op, tree result, tree vuse)
1568 void **slot;
1569 vn_reference_t vr1;
1571 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1572 if (TREE_CODE (result) == SSA_NAME)
1573 vr1->value_id = VN_INFO (result)->value_id;
1574 else
1575 vr1->value_id = get_or_alloc_constant_value_id (result);
1576 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1577 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
1578 vr1->type = TREE_TYPE (op);
1579 vr1->set = get_alias_set (op);
1580 vr1->hashcode = vn_reference_compute_hash (vr1);
1581 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
1583 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1584 INSERT);
1586 /* Because we lookup stores using vuses, and value number failures
1587 using the vdefs (see visit_reference_op_store for how and why),
1588 it's possible that on failure we may try to insert an already
1589 inserted store. This is not wrong, there is no ssa name for a
1590 store that we could use as a differentiator anyway. Thus, unlike
1591 the other lookup functions, you cannot gcc_assert (!*slot)
1592 here. */
1594 /* But free the old slot in case of a collision. */
1595 if (*slot)
1596 free_reference (*slot);
1598 *slot = vr1;
1599 return vr1;
1602 /* Insert a reference by it's pieces into the current hash table with
1603 a value number of RESULT. Return the resulting reference
1604 structure we created. */
1606 vn_reference_t
1607 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
1608 VEC (vn_reference_op_s, heap) *operands,
1609 tree result, unsigned int value_id)
1612 void **slot;
1613 vn_reference_t vr1;
1615 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1616 vr1->value_id = value_id;
1617 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1618 vr1->operands = valueize_refs (operands);
1619 vr1->type = type;
1620 vr1->set = set;
1621 vr1->hashcode = vn_reference_compute_hash (vr1);
1622 if (result && TREE_CODE (result) == SSA_NAME)
1623 result = SSA_VAL (result);
1624 vr1->result = result;
1626 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1627 INSERT);
1629 /* At this point we should have all the things inserted that we have
1630 seen before, and we should never try inserting something that
1631 already exists. */
1632 gcc_assert (!*slot);
1633 if (*slot)
1634 free_reference (*slot);
1636 *slot = vr1;
1637 return vr1;
1640 /* Compute and return the hash value for nary operation VBO1. */
1642 hashval_t
1643 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
1645 hashval_t hash;
1646 unsigned i;
1648 for (i = 0; i < vno1->length; ++i)
1649 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
1650 vno1->op[i] = SSA_VAL (vno1->op[i]);
1652 if (vno1->length == 2
1653 && commutative_tree_code (vno1->opcode)
1654 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
1656 tree temp = vno1->op[0];
1657 vno1->op[0] = vno1->op[1];
1658 vno1->op[1] = temp;
1661 hash = iterative_hash_hashval_t (vno1->opcode, 0);
1662 for (i = 0; i < vno1->length; ++i)
1663 hash = iterative_hash_expr (vno1->op[i], hash);
1665 return hash;
1668 /* Return the computed hashcode for nary operation P1. */
1670 static hashval_t
1671 vn_nary_op_hash (const void *p1)
1673 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1674 return vno1->hashcode;
1677 /* Compare nary operations P1 and P2 and return true if they are
1678 equivalent. */
1681 vn_nary_op_eq (const void *p1, const void *p2)
1683 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1684 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
1685 unsigned i;
1687 if (vno1->hashcode != vno2->hashcode)
1688 return false;
1690 if (vno1->opcode != vno2->opcode
1691 || !types_compatible_p (vno1->type, vno2->type))
1692 return false;
1694 for (i = 0; i < vno1->length; ++i)
1695 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
1696 return false;
1698 return true;
1701 /* Initialize VNO from the pieces provided. */
1703 static void
1704 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
1705 enum tree_code code, tree type, tree op0,
1706 tree op1, tree op2, tree op3)
1708 vno->opcode = code;
1709 vno->length = length;
1710 vno->type = type;
1711 switch (length)
1713 /* The fallthrus here are deliberate. */
1714 case 4: vno->op[3] = op3;
1715 case 3: vno->op[2] = op2;
1716 case 2: vno->op[1] = op1;
1717 case 1: vno->op[0] = op0;
1718 default:
1719 break;
1723 /* Initialize VNO from OP. */
1725 static void
1726 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
1728 unsigned i;
1730 vno->opcode = TREE_CODE (op);
1731 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
1732 vno->type = TREE_TYPE (op);
1733 for (i = 0; i < vno->length; ++i)
1734 vno->op[i] = TREE_OPERAND (op, i);
1737 /* Initialize VNO from STMT. */
1739 static void
1740 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
1742 unsigned i;
1744 vno->opcode = gimple_assign_rhs_code (stmt);
1745 vno->length = gimple_num_ops (stmt) - 1;
1746 vno->type = gimple_expr_type (stmt);
1747 for (i = 0; i < vno->length; ++i)
1748 vno->op[i] = gimple_op (stmt, i + 1);
1749 if (vno->opcode == REALPART_EXPR
1750 || vno->opcode == IMAGPART_EXPR
1751 || vno->opcode == VIEW_CONVERT_EXPR)
1752 vno->op[0] = TREE_OPERAND (vno->op[0], 0);
1755 /* Compute the hashcode for VNO and look for it in the hash table;
1756 return the resulting value number if it exists in the hash table.
1757 Return NULL_TREE if it does not exist in the hash table or if the
1758 result field of the operation is NULL. VNRESULT will contain the
1759 vn_nary_op_t from the hashtable if it exists. */
1761 static tree
1762 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
1764 void **slot;
1766 if (vnresult)
1767 *vnresult = NULL;
1769 vno->hashcode = vn_nary_op_compute_hash (vno);
1770 slot = htab_find_slot_with_hash (current_info->nary, vno, vno->hashcode,
1771 NO_INSERT);
1772 if (!slot && current_info == optimistic_info)
1773 slot = htab_find_slot_with_hash (valid_info->nary, vno, vno->hashcode,
1774 NO_INSERT);
1775 if (!slot)
1776 return NULL_TREE;
1777 if (vnresult)
1778 *vnresult = (vn_nary_op_t)*slot;
1779 return ((vn_nary_op_t)*slot)->result;
1782 /* Lookup a n-ary operation by its pieces and return the resulting value
1783 number if it exists in the hash table. Return NULL_TREE if it does
1784 not exist in the hash table or if the result field of the operation
1785 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1786 if it exists. */
1788 tree
1789 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
1790 tree type, tree op0, tree op1, tree op2,
1791 tree op3, vn_nary_op_t *vnresult)
1793 struct vn_nary_op_s vno1;
1794 init_vn_nary_op_from_pieces (&vno1, length, code, type, op0, op1, op2, op3);
1795 return vn_nary_op_lookup_1 (&vno1, vnresult);
1798 /* Lookup OP in the current hash table, and return the resulting value
1799 number if it exists in the hash table. Return NULL_TREE if it does
1800 not exist in the hash table or if the result field of the operation
1801 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1802 if it exists. */
1804 tree
1805 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
1807 struct vn_nary_op_s vno1;
1808 init_vn_nary_op_from_op (&vno1, op);
1809 return vn_nary_op_lookup_1 (&vno1, vnresult);
1812 /* Lookup the rhs of STMT in the current hash table, and return the resulting
1813 value number if it exists in the hash table. Return NULL_TREE if
1814 it does not exist in the hash table. VNRESULT will contain the
1815 vn_nary_op_t from the hashtable if it exists. */
1817 tree
1818 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
1820 struct vn_nary_op_s vno1;
1821 init_vn_nary_op_from_stmt (&vno1, stmt);
1822 return vn_nary_op_lookup_1 (&vno1, vnresult);
1825 /* Return the size of a vn_nary_op_t with LENGTH operands. */
1827 static size_t
1828 sizeof_vn_nary_op (unsigned int length)
1830 return sizeof (struct vn_nary_op_s) - sizeof (tree) * (4 - length);
1833 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
1835 static vn_nary_op_t
1836 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
1838 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
1841 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
1842 obstack. */
1844 static vn_nary_op_t
1845 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
1847 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
1848 &current_info->nary_obstack);
1850 vno1->value_id = value_id;
1851 vno1->length = length;
1852 vno1->result = result;
1854 return vno1;
1857 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
1858 VNO->HASHCODE first. */
1860 static vn_nary_op_t
1861 vn_nary_op_insert_into (vn_nary_op_t vno, htab_t table, bool compute_hash)
1863 void **slot;
1865 if (compute_hash)
1866 vno->hashcode = vn_nary_op_compute_hash (vno);
1868 slot = htab_find_slot_with_hash (table, vno, vno->hashcode, INSERT);
1869 gcc_assert (!*slot);
1871 *slot = vno;
1872 return vno;
1875 /* Insert a n-ary operation into the current hash table using it's
1876 pieces. Return the vn_nary_op_t structure we created and put in
1877 the hashtable. */
1879 vn_nary_op_t
1880 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
1881 tree type, tree op0,
1882 tree op1, tree op2, tree op3,
1883 tree result,
1884 unsigned int value_id)
1886 vn_nary_op_t vno1;
1888 vno1 = alloc_vn_nary_op (length, result, value_id);
1889 init_vn_nary_op_from_pieces (vno1, length, code, type, op0, op1, op2, op3);
1890 return vn_nary_op_insert_into (vno1, current_info->nary, true);
1893 /* Insert OP into the current hash table with a value number of
1894 RESULT. Return the vn_nary_op_t structure we created and put in
1895 the hashtable. */
1897 vn_nary_op_t
1898 vn_nary_op_insert (tree op, tree result)
1900 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
1901 vn_nary_op_t vno1;
1903 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
1904 init_vn_nary_op_from_op (vno1, op);
1905 return vn_nary_op_insert_into (vno1, current_info->nary, true);
1908 /* Insert the rhs of STMT into the current hash table with a value number of
1909 RESULT. */
1911 vn_nary_op_t
1912 vn_nary_op_insert_stmt (gimple stmt, tree result)
1914 unsigned length = gimple_num_ops (stmt) - 1;
1915 vn_nary_op_t vno1;
1917 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
1918 init_vn_nary_op_from_stmt (vno1, stmt);
1919 return vn_nary_op_insert_into (vno1, current_info->nary, true);
1922 /* Compute a hashcode for PHI operation VP1 and return it. */
1924 static inline hashval_t
1925 vn_phi_compute_hash (vn_phi_t vp1)
1927 hashval_t result;
1928 int i;
1929 tree phi1op;
1930 tree type;
1932 result = vp1->block->index;
1934 /* If all PHI arguments are constants we need to distinguish
1935 the PHI node via its type. */
1936 type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
1937 result += (INTEGRAL_TYPE_P (type)
1938 + (INTEGRAL_TYPE_P (type)
1939 ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
1941 FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
1943 if (phi1op == VN_TOP)
1944 continue;
1945 result = iterative_hash_expr (phi1op, result);
1948 return result;
1951 /* Return the computed hashcode for phi operation P1. */
1953 static hashval_t
1954 vn_phi_hash (const void *p1)
1956 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1957 return vp1->hashcode;
1960 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
1962 static int
1963 vn_phi_eq (const void *p1, const void *p2)
1965 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1966 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
1968 if (vp1->hashcode != vp2->hashcode)
1969 return false;
1971 if (vp1->block == vp2->block)
1973 int i;
1974 tree phi1op;
1976 /* If the PHI nodes do not have compatible types
1977 they are not the same. */
1978 if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
1979 TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
1980 return false;
1982 /* Any phi in the same block will have it's arguments in the
1983 same edge order, because of how we store phi nodes. */
1984 FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
1986 tree phi2op = VEC_index (tree, vp2->phiargs, i);
1987 if (phi1op == VN_TOP || phi2op == VN_TOP)
1988 continue;
1989 if (!expressions_equal_p (phi1op, phi2op))
1990 return false;
1992 return true;
1994 return false;
1997 static VEC(tree, heap) *shared_lookup_phiargs;
1999 /* Lookup PHI in the current hash table, and return the resulting
2000 value number if it exists in the hash table. Return NULL_TREE if
2001 it does not exist in the hash table. */
2003 static tree
2004 vn_phi_lookup (gimple phi)
2006 void **slot;
2007 struct vn_phi_s vp1;
2008 unsigned i;
2010 VEC_truncate (tree, shared_lookup_phiargs, 0);
2012 /* Canonicalize the SSA_NAME's to their value number. */
2013 for (i = 0; i < gimple_phi_num_args (phi); i++)
2015 tree def = PHI_ARG_DEF (phi, i);
2016 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2017 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
2019 vp1.phiargs = shared_lookup_phiargs;
2020 vp1.block = gimple_bb (phi);
2021 vp1.hashcode = vn_phi_compute_hash (&vp1);
2022 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
2023 NO_INSERT);
2024 if (!slot && current_info == optimistic_info)
2025 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
2026 NO_INSERT);
2027 if (!slot)
2028 return NULL_TREE;
2029 return ((vn_phi_t)*slot)->result;
2032 /* Insert PHI into the current hash table with a value number of
2033 RESULT. */
2035 static vn_phi_t
2036 vn_phi_insert (gimple phi, tree result)
2038 void **slot;
2039 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
2040 unsigned i;
2041 VEC (tree, heap) *args = NULL;
2043 /* Canonicalize the SSA_NAME's to their value number. */
2044 for (i = 0; i < gimple_phi_num_args (phi); i++)
2046 tree def = PHI_ARG_DEF (phi, i);
2047 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2048 VEC_safe_push (tree, heap, args, def);
2050 vp1->value_id = VN_INFO (result)->value_id;
2051 vp1->phiargs = args;
2052 vp1->block = gimple_bb (phi);
2053 vp1->result = result;
2054 vp1->hashcode = vn_phi_compute_hash (vp1);
2056 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
2057 INSERT);
2059 /* Because we iterate over phi operations more than once, it's
2060 possible the slot might already exist here, hence no assert.*/
2061 *slot = vp1;
2062 return vp1;
2066 /* Print set of components in strongly connected component SCC to OUT. */
2068 static void
2069 print_scc (FILE *out, VEC (tree, heap) *scc)
2071 tree var;
2072 unsigned int i;
2074 fprintf (out, "SCC consists of: ");
2075 FOR_EACH_VEC_ELT (tree, scc, i, var)
2077 print_generic_expr (out, var, 0);
2078 fprintf (out, " ");
2080 fprintf (out, "\n");
2083 /* Set the value number of FROM to TO, return true if it has changed
2084 as a result. */
2086 static inline bool
2087 set_ssa_val_to (tree from, tree to)
2089 tree currval;
2091 if (from != to
2092 && TREE_CODE (to) == SSA_NAME
2093 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2094 to = from;
2096 /* The only thing we allow as value numbers are VN_TOP, ssa_names
2097 and invariants. So assert that here. */
2098 gcc_assert (to != NULL_TREE
2099 && (to == VN_TOP
2100 || TREE_CODE (to) == SSA_NAME
2101 || is_gimple_min_invariant (to)));
2103 if (dump_file && (dump_flags & TDF_DETAILS))
2105 fprintf (dump_file, "Setting value number of ");
2106 print_generic_expr (dump_file, from, 0);
2107 fprintf (dump_file, " to ");
2108 print_generic_expr (dump_file, to, 0);
2111 currval = SSA_VAL (from);
2113 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
2115 VN_INFO (from)->valnum = to;
2116 if (dump_file && (dump_flags & TDF_DETAILS))
2117 fprintf (dump_file, " (changed)\n");
2118 return true;
2120 if (dump_file && (dump_flags & TDF_DETAILS))
2121 fprintf (dump_file, "\n");
2122 return false;
2125 /* Set all definitions in STMT to value number to themselves.
2126 Return true if a value number changed. */
2128 static bool
2129 defs_to_varying (gimple stmt)
2131 bool changed = false;
2132 ssa_op_iter iter;
2133 def_operand_p defp;
2135 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2137 tree def = DEF_FROM_PTR (defp);
2139 VN_INFO (def)->use_processed = true;
2140 changed |= set_ssa_val_to (def, def);
2142 return changed;
2145 static bool expr_has_constants (tree expr);
2146 static tree valueize_expr (tree expr);
2148 /* Visit a copy between LHS and RHS, return true if the value number
2149 changed. */
2151 static bool
2152 visit_copy (tree lhs, tree rhs)
2154 /* Follow chains of copies to their destination. */
2155 while (TREE_CODE (rhs) == SSA_NAME
2156 && SSA_VAL (rhs) != rhs)
2157 rhs = SSA_VAL (rhs);
2159 /* The copy may have a more interesting constant filled expression
2160 (we don't, since we know our RHS is just an SSA name). */
2161 if (TREE_CODE (rhs) == SSA_NAME)
2163 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2164 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2167 return set_ssa_val_to (lhs, rhs);
2170 /* Visit a unary operator RHS, value number it, and return true if the
2171 value number of LHS has changed as a result. */
2173 static bool
2174 visit_unary_op (tree lhs, gimple stmt)
2176 bool changed = false;
2177 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2179 if (result)
2181 changed = set_ssa_val_to (lhs, result);
2183 else
2185 changed = set_ssa_val_to (lhs, lhs);
2186 vn_nary_op_insert_stmt (stmt, lhs);
2189 return changed;
2192 /* Visit a binary operator RHS, value number it, and return true if the
2193 value number of LHS has changed as a result. */
2195 static bool
2196 visit_binary_op (tree lhs, gimple stmt)
2198 bool changed = false;
2199 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2201 if (result)
2203 changed = set_ssa_val_to (lhs, result);
2205 else
2207 changed = set_ssa_val_to (lhs, lhs);
2208 vn_nary_op_insert_stmt (stmt, lhs);
2211 return changed;
2214 /* Visit a call STMT storing into LHS. Return true if the value number
2215 of the LHS has changed as a result. */
2217 static bool
2218 visit_reference_op_call (tree lhs, gimple stmt)
2220 bool changed = false;
2221 struct vn_reference_s vr1;
2222 tree result;
2223 tree vuse = gimple_vuse (stmt);
2225 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2226 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
2227 vr1.type = gimple_expr_type (stmt);
2228 vr1.set = 0;
2229 vr1.hashcode = vn_reference_compute_hash (&vr1);
2230 result = vn_reference_lookup_1 (&vr1, NULL);
2231 if (result)
2233 changed = set_ssa_val_to (lhs, result);
2234 if (TREE_CODE (result) == SSA_NAME
2235 && VN_INFO (result)->has_constants)
2236 VN_INFO (lhs)->has_constants = true;
2238 else
2240 void **slot;
2241 vn_reference_t vr2;
2242 changed = set_ssa_val_to (lhs, lhs);
2243 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
2244 vr2->vuse = vr1.vuse;
2245 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
2246 vr2->type = vr1.type;
2247 vr2->set = vr1.set;
2248 vr2->hashcode = vr1.hashcode;
2249 vr2->result = lhs;
2250 slot = htab_find_slot_with_hash (current_info->references,
2251 vr2, vr2->hashcode, INSERT);
2252 if (*slot)
2253 free_reference (*slot);
2254 *slot = vr2;
2257 return changed;
2260 /* Visit a load from a reference operator RHS, part of STMT, value number it,
2261 and return true if the value number of the LHS has changed as a result. */
2263 static bool
2264 visit_reference_op_load (tree lhs, tree op, gimple stmt)
2266 bool changed = false;
2267 tree last_vuse;
2268 tree result;
2270 last_vuse = gimple_vuse (stmt);
2271 last_vuse_ptr = &last_vuse;
2272 result = vn_reference_lookup (op, gimple_vuse (stmt), true, NULL);
2273 last_vuse_ptr = NULL;
2275 /* If we have a VCE, try looking up its operand as it might be stored in
2276 a different type. */
2277 if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
2278 result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
2279 true, NULL);
2281 /* We handle type-punning through unions by value-numbering based
2282 on offset and size of the access. Be prepared to handle a
2283 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
2284 if (result
2285 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
2287 /* We will be setting the value number of lhs to the value number
2288 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2289 So first simplify and lookup this expression to see if it
2290 is already available. */
2291 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
2292 if ((CONVERT_EXPR_P (val)
2293 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
2294 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
2296 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
2297 if ((CONVERT_EXPR_P (tem)
2298 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
2299 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
2300 TREE_TYPE (val), tem)))
2301 val = tem;
2303 result = val;
2304 if (!is_gimple_min_invariant (val)
2305 && TREE_CODE (val) != SSA_NAME)
2306 result = vn_nary_op_lookup (val, NULL);
2307 /* If the expression is not yet available, value-number lhs to
2308 a new SSA_NAME we create. */
2309 if (!result)
2311 result = make_ssa_name (SSA_NAME_VAR (lhs), gimple_build_nop ());
2312 /* Initialize value-number information properly. */
2313 VN_INFO_GET (result)->valnum = result;
2314 VN_INFO (result)->value_id = get_next_value_id ();
2315 VN_INFO (result)->expr = val;
2316 VN_INFO (result)->has_constants = expr_has_constants (val);
2317 VN_INFO (result)->needs_insertion = true;
2318 /* As all "inserted" statements are singleton SCCs, insert
2319 to the valid table. This is strictly needed to
2320 avoid re-generating new value SSA_NAMEs for the same
2321 expression during SCC iteration over and over (the
2322 optimistic table gets cleared after each iteration).
2323 We do not need to insert into the optimistic table, as
2324 lookups there will fall back to the valid table. */
2325 if (current_info == optimistic_info)
2327 current_info = valid_info;
2328 vn_nary_op_insert (val, result);
2329 current_info = optimistic_info;
2331 else
2332 vn_nary_op_insert (val, result);
2333 if (dump_file && (dump_flags & TDF_DETAILS))
2335 fprintf (dump_file, "Inserting name ");
2336 print_generic_expr (dump_file, result, 0);
2337 fprintf (dump_file, " for expression ");
2338 print_generic_expr (dump_file, val, 0);
2339 fprintf (dump_file, "\n");
2344 if (result)
2346 changed = set_ssa_val_to (lhs, result);
2347 if (TREE_CODE (result) == SSA_NAME
2348 && VN_INFO (result)->has_constants)
2350 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2351 VN_INFO (lhs)->has_constants = true;
2354 else
2356 changed = set_ssa_val_to (lhs, lhs);
2357 vn_reference_insert (op, lhs, last_vuse);
2360 return changed;
2364 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2365 and return true if the value number of the LHS has changed as a result. */
2367 static bool
2368 visit_reference_op_store (tree lhs, tree op, gimple stmt)
2370 bool changed = false;
2371 tree result;
2372 bool resultsame = false;
2374 /* First we want to lookup using the *vuses* from the store and see
2375 if there the last store to this location with the same address
2376 had the same value.
2378 The vuses represent the memory state before the store. If the
2379 memory state, address, and value of the store is the same as the
2380 last store to this location, then this store will produce the
2381 same memory state as that store.
2383 In this case the vdef versions for this store are value numbered to those
2384 vuse versions, since they represent the same memory state after
2385 this store.
2387 Otherwise, the vdefs for the store are used when inserting into
2388 the table, since the store generates a new memory state. */
2390 result = vn_reference_lookup (lhs, gimple_vuse (stmt), false, NULL);
2392 if (result)
2394 if (TREE_CODE (result) == SSA_NAME)
2395 result = SSA_VAL (result);
2396 if (TREE_CODE (op) == SSA_NAME)
2397 op = SSA_VAL (op);
2398 resultsame = expressions_equal_p (result, op);
2401 if (!result || !resultsame)
2403 tree vdef;
2405 if (dump_file && (dump_flags & TDF_DETAILS))
2407 fprintf (dump_file, "No store match\n");
2408 fprintf (dump_file, "Value numbering store ");
2409 print_generic_expr (dump_file, lhs, 0);
2410 fprintf (dump_file, " to ");
2411 print_generic_expr (dump_file, op, 0);
2412 fprintf (dump_file, "\n");
2414 /* Have to set value numbers before insert, since insert is
2415 going to valueize the references in-place. */
2416 if ((vdef = gimple_vdef (stmt)))
2418 VN_INFO (vdef)->use_processed = true;
2419 changed |= set_ssa_val_to (vdef, vdef);
2422 /* Do not insert structure copies into the tables. */
2423 if (is_gimple_min_invariant (op)
2424 || is_gimple_reg (op))
2425 vn_reference_insert (lhs, op, vdef);
2427 else
2429 /* We had a match, so value number the vdef to have the value
2430 number of the vuse it came from. */
2431 tree def, use;
2433 if (dump_file && (dump_flags & TDF_DETAILS))
2434 fprintf (dump_file, "Store matched earlier value,"
2435 "value numbering store vdefs to matching vuses.\n");
2437 def = gimple_vdef (stmt);
2438 use = gimple_vuse (stmt);
2440 VN_INFO (def)->use_processed = true;
2441 changed |= set_ssa_val_to (def, SSA_VAL (use));
2444 return changed;
2447 /* Visit and value number PHI, return true if the value number
2448 changed. */
2450 static bool
2451 visit_phi (gimple phi)
2453 bool changed = false;
2454 tree result;
2455 tree sameval = VN_TOP;
2456 bool allsame = true;
2457 unsigned i;
2459 /* TODO: We could check for this in init_sccvn, and replace this
2460 with a gcc_assert. */
2461 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
2462 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2464 /* See if all non-TOP arguments have the same value. TOP is
2465 equivalent to everything, so we can ignore it. */
2466 for (i = 0; i < gimple_phi_num_args (phi); i++)
2468 tree def = PHI_ARG_DEF (phi, i);
2470 if (TREE_CODE (def) == SSA_NAME)
2471 def = SSA_VAL (def);
2472 if (def == VN_TOP)
2473 continue;
2474 if (sameval == VN_TOP)
2476 sameval = def;
2478 else
2480 if (!expressions_equal_p (def, sameval))
2482 allsame = false;
2483 break;
2488 /* If all value numbered to the same value, the phi node has that
2489 value. */
2490 if (allsame)
2492 if (is_gimple_min_invariant (sameval))
2494 VN_INFO (PHI_RESULT (phi))->has_constants = true;
2495 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2497 else
2499 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2500 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2503 if (TREE_CODE (sameval) == SSA_NAME)
2504 return visit_copy (PHI_RESULT (phi), sameval);
2506 return set_ssa_val_to (PHI_RESULT (phi), sameval);
2509 /* Otherwise, see if it is equivalent to a phi node in this block. */
2510 result = vn_phi_lookup (phi);
2511 if (result)
2513 if (TREE_CODE (result) == SSA_NAME)
2514 changed = visit_copy (PHI_RESULT (phi), result);
2515 else
2516 changed = set_ssa_val_to (PHI_RESULT (phi), result);
2518 else
2520 vn_phi_insert (phi, PHI_RESULT (phi));
2521 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2522 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
2523 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2526 return changed;
2529 /* Return true if EXPR contains constants. */
2531 static bool
2532 expr_has_constants (tree expr)
2534 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2536 case tcc_unary:
2537 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
2539 case tcc_binary:
2540 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
2541 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
2542 /* Constants inside reference ops are rarely interesting, but
2543 it can take a lot of looking to find them. */
2544 case tcc_reference:
2545 case tcc_declaration:
2546 return false;
2547 default:
2548 return is_gimple_min_invariant (expr);
2550 return false;
2553 /* Return true if STMT contains constants. */
2555 static bool
2556 stmt_has_constants (gimple stmt)
2558 if (gimple_code (stmt) != GIMPLE_ASSIGN)
2559 return false;
2561 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2563 case GIMPLE_UNARY_RHS:
2564 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2566 case GIMPLE_BINARY_RHS:
2567 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2568 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
2569 case GIMPLE_TERNARY_RHS:
2570 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2571 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt))
2572 || is_gimple_min_invariant (gimple_assign_rhs3 (stmt)));
2573 case GIMPLE_SINGLE_RHS:
2574 /* Constants inside reference ops are rarely interesting, but
2575 it can take a lot of looking to find them. */
2576 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2577 default:
2578 gcc_unreachable ();
2580 return false;
2583 /* Replace SSA_NAMES in expr with their value numbers, and return the
2584 result.
2585 This is performed in place. */
2587 static tree
2588 valueize_expr (tree expr)
2590 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2592 case tcc_unary:
2593 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2594 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2595 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2596 break;
2597 case tcc_binary:
2598 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2599 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2600 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2601 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
2602 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
2603 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
2604 break;
2605 default:
2606 break;
2608 return expr;
2611 /* Simplify the binary expression RHS, and return the result if
2612 simplified. */
2614 static tree
2615 simplify_binary_expression (gimple stmt)
2617 tree result = NULL_TREE;
2618 tree op0 = gimple_assign_rhs1 (stmt);
2619 tree op1 = gimple_assign_rhs2 (stmt);
2621 /* This will not catch every single case we could combine, but will
2622 catch those with constants. The goal here is to simultaneously
2623 combine constants between expressions, but avoid infinite
2624 expansion of expressions during simplification. */
2625 if (TREE_CODE (op0) == SSA_NAME)
2627 if (VN_INFO (op0)->has_constants
2628 || TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)) == tcc_comparison)
2629 op0 = valueize_expr (vn_get_expr_for (op0));
2630 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
2631 op0 = SSA_VAL (op0);
2634 if (TREE_CODE (op1) == SSA_NAME)
2636 if (VN_INFO (op1)->has_constants)
2637 op1 = valueize_expr (vn_get_expr_for (op1));
2638 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
2639 op1 = SSA_VAL (op1);
2642 /* Avoid folding if nothing changed. */
2643 if (op0 == gimple_assign_rhs1 (stmt)
2644 && op1 == gimple_assign_rhs2 (stmt))
2645 return NULL_TREE;
2647 fold_defer_overflow_warnings ();
2649 result = fold_binary (gimple_assign_rhs_code (stmt),
2650 gimple_expr_type (stmt), op0, op1);
2651 if (result)
2652 STRIP_USELESS_TYPE_CONVERSION (result);
2654 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
2655 stmt, 0);
2657 /* Make sure result is not a complex expression consisting
2658 of operators of operators (IE (a + b) + (a + c))
2659 Otherwise, we will end up with unbounded expressions if
2660 fold does anything at all. */
2661 if (result && valid_gimple_rhs_p (result))
2662 return result;
2664 return NULL_TREE;
2667 /* Simplify the unary expression RHS, and return the result if
2668 simplified. */
2670 static tree
2671 simplify_unary_expression (gimple stmt)
2673 tree result = NULL_TREE;
2674 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
2676 /* We handle some tcc_reference codes here that are all
2677 GIMPLE_ASSIGN_SINGLE codes. */
2678 if (gimple_assign_rhs_code (stmt) == REALPART_EXPR
2679 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2680 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2681 op0 = TREE_OPERAND (op0, 0);
2683 if (TREE_CODE (op0) != SSA_NAME)
2684 return NULL_TREE;
2686 orig_op0 = op0;
2687 if (VN_INFO (op0)->has_constants)
2688 op0 = valueize_expr (vn_get_expr_for (op0));
2689 else if (gimple_assign_cast_p (stmt)
2690 || gimple_assign_rhs_code (stmt) == REALPART_EXPR
2691 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2692 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2694 /* We want to do tree-combining on conversion-like expressions.
2695 Make sure we feed only SSA_NAMEs or constants to fold though. */
2696 tree tem = valueize_expr (vn_get_expr_for (op0));
2697 if (UNARY_CLASS_P (tem)
2698 || BINARY_CLASS_P (tem)
2699 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
2700 || TREE_CODE (tem) == SSA_NAME
2701 || is_gimple_min_invariant (tem))
2702 op0 = tem;
2705 /* Avoid folding if nothing changed, but remember the expression. */
2706 if (op0 == orig_op0)
2707 return NULL_TREE;
2709 result = fold_unary_ignore_overflow (gimple_assign_rhs_code (stmt),
2710 gimple_expr_type (stmt), op0);
2711 if (result)
2713 STRIP_USELESS_TYPE_CONVERSION (result);
2714 if (valid_gimple_rhs_p (result))
2715 return result;
2718 return NULL_TREE;
2721 /* Try to simplify RHS using equivalences and constant folding. */
2723 static tree
2724 try_to_simplify (gimple stmt)
2726 tree tem;
2728 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
2729 in this case, there is no point in doing extra work. */
2730 if (gimple_assign_copy_p (stmt)
2731 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2732 return NULL_TREE;
2734 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2736 case tcc_declaration:
2737 tem = get_symbol_constant_value (gimple_assign_rhs1 (stmt));
2738 if (tem)
2739 return tem;
2740 break;
2742 case tcc_reference:
2743 /* Do not do full-blown reference lookup here, but simplify
2744 reads from constant aggregates. */
2745 tem = fold_const_aggregate_ref (gimple_assign_rhs1 (stmt));
2746 if (tem)
2747 return tem;
2749 /* Fallthrough for some codes that can operate on registers. */
2750 if (!(TREE_CODE (gimple_assign_rhs1 (stmt)) == REALPART_EXPR
2751 || TREE_CODE (gimple_assign_rhs1 (stmt)) == IMAGPART_EXPR
2752 || TREE_CODE (gimple_assign_rhs1 (stmt)) == VIEW_CONVERT_EXPR))
2753 break;
2754 /* We could do a little more with unary ops, if they expand
2755 into binary ops, but it's debatable whether it is worth it. */
2756 case tcc_unary:
2757 return simplify_unary_expression (stmt);
2758 break;
2759 case tcc_comparison:
2760 case tcc_binary:
2761 return simplify_binary_expression (stmt);
2762 break;
2763 default:
2764 break;
2767 return NULL_TREE;
2770 /* Visit and value number USE, return true if the value number
2771 changed. */
2773 static bool
2774 visit_use (tree use)
2776 bool changed = false;
2777 gimple stmt = SSA_NAME_DEF_STMT (use);
2779 VN_INFO (use)->use_processed = true;
2781 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
2782 if (dump_file && (dump_flags & TDF_DETAILS)
2783 && !SSA_NAME_IS_DEFAULT_DEF (use))
2785 fprintf (dump_file, "Value numbering ");
2786 print_generic_expr (dump_file, use, 0);
2787 fprintf (dump_file, " stmt = ");
2788 print_gimple_stmt (dump_file, stmt, 0, 0);
2791 /* Handle uninitialized uses. */
2792 if (SSA_NAME_IS_DEFAULT_DEF (use))
2793 changed = set_ssa_val_to (use, use);
2794 else
2796 if (gimple_code (stmt) == GIMPLE_PHI)
2797 changed = visit_phi (stmt);
2798 else if (!gimple_has_lhs (stmt)
2799 || gimple_has_volatile_ops (stmt)
2800 || stmt_could_throw_p (stmt))
2801 changed = defs_to_varying (stmt);
2802 else if (is_gimple_assign (stmt))
2804 tree lhs = gimple_assign_lhs (stmt);
2805 tree simplified;
2807 /* Shortcut for copies. Simplifying copies is pointless,
2808 since we copy the expression and value they represent. */
2809 if (gimple_assign_copy_p (stmt)
2810 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2811 && TREE_CODE (lhs) == SSA_NAME)
2813 changed = visit_copy (lhs, gimple_assign_rhs1 (stmt));
2814 goto done;
2816 simplified = try_to_simplify (stmt);
2817 if (simplified)
2819 if (dump_file && (dump_flags & TDF_DETAILS))
2821 fprintf (dump_file, "RHS ");
2822 print_gimple_expr (dump_file, stmt, 0, 0);
2823 fprintf (dump_file, " simplified to ");
2824 print_generic_expr (dump_file, simplified, 0);
2825 if (TREE_CODE (lhs) == SSA_NAME)
2826 fprintf (dump_file, " has constants %d\n",
2827 expr_has_constants (simplified));
2828 else
2829 fprintf (dump_file, "\n");
2832 /* Setting value numbers to constants will occasionally
2833 screw up phi congruence because constants are not
2834 uniquely associated with a single ssa name that can be
2835 looked up. */
2836 if (simplified
2837 && is_gimple_min_invariant (simplified)
2838 && TREE_CODE (lhs) == SSA_NAME)
2840 VN_INFO (lhs)->expr = simplified;
2841 VN_INFO (lhs)->has_constants = true;
2842 changed = set_ssa_val_to (lhs, simplified);
2843 goto done;
2845 else if (simplified
2846 && TREE_CODE (simplified) == SSA_NAME
2847 && TREE_CODE (lhs) == SSA_NAME)
2849 changed = visit_copy (lhs, simplified);
2850 goto done;
2852 else if (simplified)
2854 if (TREE_CODE (lhs) == SSA_NAME)
2856 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
2857 /* We have to unshare the expression or else
2858 valuizing may change the IL stream. */
2859 VN_INFO (lhs)->expr = unshare_expr (simplified);
2862 else if (stmt_has_constants (stmt)
2863 && TREE_CODE (lhs) == SSA_NAME)
2864 VN_INFO (lhs)->has_constants = true;
2865 else if (TREE_CODE (lhs) == SSA_NAME)
2867 /* We reset expr and constantness here because we may
2868 have been value numbering optimistically, and
2869 iterating. They may become non-constant in this case,
2870 even if they were optimistically constant. */
2872 VN_INFO (lhs)->has_constants = false;
2873 VN_INFO (lhs)->expr = NULL_TREE;
2876 if ((TREE_CODE (lhs) == SSA_NAME
2877 /* We can substitute SSA_NAMEs that are live over
2878 abnormal edges with their constant value. */
2879 && !(gimple_assign_copy_p (stmt)
2880 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2881 && !(simplified
2882 && is_gimple_min_invariant (simplified))
2883 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2884 /* Stores or copies from SSA_NAMEs that are live over
2885 abnormal edges are a problem. */
2886 || (gimple_assign_single_p (stmt)
2887 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2888 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt))))
2889 changed = defs_to_varying (stmt);
2890 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
2892 changed = visit_reference_op_store (lhs, gimple_assign_rhs1 (stmt), stmt);
2894 else if (TREE_CODE (lhs) == SSA_NAME)
2896 if ((gimple_assign_copy_p (stmt)
2897 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2898 || (simplified
2899 && is_gimple_min_invariant (simplified)))
2901 VN_INFO (lhs)->has_constants = true;
2902 if (simplified)
2903 changed = set_ssa_val_to (lhs, simplified);
2904 else
2905 changed = set_ssa_val_to (lhs, gimple_assign_rhs1 (stmt));
2907 else
2909 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2911 case GIMPLE_UNARY_RHS:
2912 changed = visit_unary_op (lhs, stmt);
2913 break;
2914 case GIMPLE_BINARY_RHS:
2915 changed = visit_binary_op (lhs, stmt);
2916 break;
2917 case GIMPLE_SINGLE_RHS:
2918 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2920 case tcc_reference:
2921 /* VOP-less references can go through unary case. */
2922 if ((gimple_assign_rhs_code (stmt) == REALPART_EXPR
2923 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2924 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR )
2925 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (stmt), 0)) == SSA_NAME)
2927 changed = visit_unary_op (lhs, stmt);
2928 break;
2930 /* Fallthrough. */
2931 case tcc_declaration:
2932 changed = visit_reference_op_load
2933 (lhs, gimple_assign_rhs1 (stmt), stmt);
2934 break;
2935 case tcc_expression:
2936 if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
2938 changed = visit_unary_op (lhs, stmt);
2939 break;
2941 /* Fallthrough. */
2942 default:
2943 changed = defs_to_varying (stmt);
2945 break;
2946 default:
2947 changed = defs_to_varying (stmt);
2948 break;
2952 else
2953 changed = defs_to_varying (stmt);
2955 else if (is_gimple_call (stmt))
2957 tree lhs = gimple_call_lhs (stmt);
2959 /* ??? We could try to simplify calls. */
2961 if (stmt_has_constants (stmt)
2962 && TREE_CODE (lhs) == SSA_NAME)
2963 VN_INFO (lhs)->has_constants = true;
2964 else if (TREE_CODE (lhs) == SSA_NAME)
2966 /* We reset expr and constantness here because we may
2967 have been value numbering optimistically, and
2968 iterating. They may become non-constant in this case,
2969 even if they were optimistically constant. */
2970 VN_INFO (lhs)->has_constants = false;
2971 VN_INFO (lhs)->expr = NULL_TREE;
2974 if (TREE_CODE (lhs) == SSA_NAME
2975 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2976 changed = defs_to_varying (stmt);
2977 /* ??? We should handle stores from calls. */
2978 else if (TREE_CODE (lhs) == SSA_NAME)
2980 if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
2981 changed = visit_reference_op_call (lhs, stmt);
2982 else
2983 changed = defs_to_varying (stmt);
2985 else
2986 changed = defs_to_varying (stmt);
2989 done:
2990 return changed;
2993 /* Compare two operands by reverse postorder index */
2995 static int
2996 compare_ops (const void *pa, const void *pb)
2998 const tree opa = *((const tree *)pa);
2999 const tree opb = *((const tree *)pb);
3000 gimple opstmta = SSA_NAME_DEF_STMT (opa);
3001 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
3002 basic_block bba;
3003 basic_block bbb;
3005 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3006 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3007 else if (gimple_nop_p (opstmta))
3008 return -1;
3009 else if (gimple_nop_p (opstmtb))
3010 return 1;
3012 bba = gimple_bb (opstmta);
3013 bbb = gimple_bb (opstmtb);
3015 if (!bba && !bbb)
3016 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3017 else if (!bba)
3018 return -1;
3019 else if (!bbb)
3020 return 1;
3022 if (bba == bbb)
3024 if (gimple_code (opstmta) == GIMPLE_PHI
3025 && gimple_code (opstmtb) == GIMPLE_PHI)
3026 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3027 else if (gimple_code (opstmta) == GIMPLE_PHI)
3028 return -1;
3029 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3030 return 1;
3031 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3032 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3033 else
3034 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3036 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3039 /* Sort an array containing members of a strongly connected component
3040 SCC so that the members are ordered by RPO number.
3041 This means that when the sort is complete, iterating through the
3042 array will give you the members in RPO order. */
3044 static void
3045 sort_scc (VEC (tree, heap) *scc)
3047 VEC_qsort (tree, scc, compare_ops);
3050 /* Insert the no longer used nary ONARY to the hash INFO. */
3052 static void
3053 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3055 size_t size = sizeof_vn_nary_op (onary->length);
3056 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3057 &info->nary_obstack);
3058 memcpy (nary, onary, size);
3059 vn_nary_op_insert_into (nary, info->nary, false);
3062 /* Insert the no longer used phi OPHI to the hash INFO. */
3064 static void
3065 copy_phi (vn_phi_t ophi, vn_tables_t info)
3067 vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
3068 void **slot;
3069 memcpy (phi, ophi, sizeof (*phi));
3070 ophi->phiargs = NULL;
3071 slot = htab_find_slot_with_hash (info->phis, phi, phi->hashcode, INSERT);
3072 gcc_assert (!*slot);
3073 *slot = phi;
3076 /* Insert the no longer used reference OREF to the hash INFO. */
3078 static void
3079 copy_reference (vn_reference_t oref, vn_tables_t info)
3081 vn_reference_t ref;
3082 void **slot;
3083 ref = (vn_reference_t) pool_alloc (info->references_pool);
3084 memcpy (ref, oref, sizeof (*ref));
3085 oref->operands = NULL;
3086 slot = htab_find_slot_with_hash (info->references, ref, ref->hashcode,
3087 INSERT);
3088 if (*slot)
3089 free_reference (*slot);
3090 *slot = ref;
3093 /* Process a strongly connected component in the SSA graph. */
3095 static void
3096 process_scc (VEC (tree, heap) *scc)
3098 tree var;
3099 unsigned int i;
3100 unsigned int iterations = 0;
3101 bool changed = true;
3102 htab_iterator hi;
3103 vn_nary_op_t nary;
3104 vn_phi_t phi;
3105 vn_reference_t ref;
3107 /* If the SCC has a single member, just visit it. */
3108 if (VEC_length (tree, scc) == 1)
3110 tree use = VEC_index (tree, scc, 0);
3111 if (!VN_INFO (use)->use_processed)
3112 visit_use (use);
3113 return;
3116 /* Iterate over the SCC with the optimistic table until it stops
3117 changing. */
3118 current_info = optimistic_info;
3119 while (changed)
3121 changed = false;
3122 iterations++;
3123 /* As we are value-numbering optimistically we have to
3124 clear the expression tables and the simplified expressions
3125 in each iteration until we converge. */
3126 htab_empty (optimistic_info->nary);
3127 htab_empty (optimistic_info->phis);
3128 htab_empty (optimistic_info->references);
3129 obstack_free (&optimistic_info->nary_obstack, NULL);
3130 gcc_obstack_init (&optimistic_info->nary_obstack);
3131 empty_alloc_pool (optimistic_info->phis_pool);
3132 empty_alloc_pool (optimistic_info->references_pool);
3133 FOR_EACH_VEC_ELT (tree, scc, i, var)
3134 VN_INFO (var)->expr = NULL_TREE;
3135 FOR_EACH_VEC_ELT (tree, scc, i, var)
3136 changed |= visit_use (var);
3139 statistics_histogram_event (cfun, "SCC iterations", iterations);
3141 /* Finally, copy the contents of the no longer used optimistic
3142 table to the valid table. */
3143 FOR_EACH_HTAB_ELEMENT (optimistic_info->nary, nary, vn_nary_op_t, hi)
3144 copy_nary (nary, valid_info);
3145 FOR_EACH_HTAB_ELEMENT (optimistic_info->phis, phi, vn_phi_t, hi)
3146 copy_phi (phi, valid_info);
3147 FOR_EACH_HTAB_ELEMENT (optimistic_info->references, ref, vn_reference_t, hi)
3148 copy_reference (ref, valid_info);
3150 current_info = valid_info;
3153 DEF_VEC_O(ssa_op_iter);
3154 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
3156 /* Pop the components of the found SCC for NAME off the SCC stack
3157 and process them. Returns true if all went well, false if
3158 we run into resource limits. */
3160 static bool
3161 extract_and_process_scc_for_name (tree name)
3163 VEC (tree, heap) *scc = NULL;
3164 tree x;
3166 /* Found an SCC, pop the components off the SCC stack and
3167 process them. */
3170 x = VEC_pop (tree, sccstack);
3172 VN_INFO (x)->on_sccstack = false;
3173 VEC_safe_push (tree, heap, scc, x);
3174 } while (x != name);
3176 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
3177 if (VEC_length (tree, scc)
3178 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
3180 if (dump_file)
3181 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
3182 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
3183 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
3184 return false;
3187 if (VEC_length (tree, scc) > 1)
3188 sort_scc (scc);
3190 if (dump_file && (dump_flags & TDF_DETAILS))
3191 print_scc (dump_file, scc);
3193 process_scc (scc);
3195 VEC_free (tree, heap, scc);
3197 return true;
3200 /* Depth first search on NAME to discover and process SCC's in the SSA
3201 graph.
3202 Execution of this algorithm relies on the fact that the SCC's are
3203 popped off the stack in topological order.
3204 Returns true if successful, false if we stopped processing SCC's due
3205 to resource constraints. */
3207 static bool
3208 DFS (tree name)
3210 VEC(ssa_op_iter, heap) *itervec = NULL;
3211 VEC(tree, heap) *namevec = NULL;
3212 use_operand_p usep = NULL;
3213 gimple defstmt;
3214 tree use;
3215 ssa_op_iter iter;
3217 start_over:
3218 /* SCC info */
3219 VN_INFO (name)->dfsnum = next_dfs_num++;
3220 VN_INFO (name)->visited = true;
3221 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
3223 VEC_safe_push (tree, heap, sccstack, name);
3224 VN_INFO (name)->on_sccstack = true;
3225 defstmt = SSA_NAME_DEF_STMT (name);
3227 /* Recursively DFS on our operands, looking for SCC's. */
3228 if (!gimple_nop_p (defstmt))
3230 /* Push a new iterator. */
3231 if (gimple_code (defstmt) == GIMPLE_PHI)
3232 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
3233 else
3234 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
3236 else
3237 clear_and_done_ssa_iter (&iter);
3239 while (1)
3241 /* If we are done processing uses of a name, go up the stack
3242 of iterators and process SCCs as we found them. */
3243 if (op_iter_done (&iter))
3245 /* See if we found an SCC. */
3246 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
3247 if (!extract_and_process_scc_for_name (name))
3249 VEC_free (tree, heap, namevec);
3250 VEC_free (ssa_op_iter, heap, itervec);
3251 return false;
3254 /* Check if we are done. */
3255 if (VEC_empty (tree, namevec))
3257 VEC_free (tree, heap, namevec);
3258 VEC_free (ssa_op_iter, heap, itervec);
3259 return true;
3262 /* Restore the last use walker and continue walking there. */
3263 use = name;
3264 name = VEC_pop (tree, namevec);
3265 memcpy (&iter, VEC_last (ssa_op_iter, itervec),
3266 sizeof (ssa_op_iter));
3267 VEC_pop (ssa_op_iter, itervec);
3268 goto continue_walking;
3271 use = USE_FROM_PTR (usep);
3273 /* Since we handle phi nodes, we will sometimes get
3274 invariants in the use expression. */
3275 if (TREE_CODE (use) == SSA_NAME)
3277 if (! (VN_INFO (use)->visited))
3279 /* Recurse by pushing the current use walking state on
3280 the stack and starting over. */
3281 VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
3282 VEC_safe_push(tree, heap, namevec, name);
3283 name = use;
3284 goto start_over;
3286 continue_walking:
3287 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3288 VN_INFO (use)->low);
3290 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3291 && VN_INFO (use)->on_sccstack)
3293 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3294 VN_INFO (name)->low);
3298 usep = op_iter_next_use (&iter);
3302 /* Allocate a value number table. */
3304 static void
3305 allocate_vn_table (vn_tables_t table)
3307 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
3308 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
3309 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
3310 free_reference);
3312 gcc_obstack_init (&table->nary_obstack);
3313 table->phis_pool = create_alloc_pool ("VN phis",
3314 sizeof (struct vn_phi_s),
3315 30);
3316 table->references_pool = create_alloc_pool ("VN references",
3317 sizeof (struct vn_reference_s),
3318 30);
3321 /* Free a value number table. */
3323 static void
3324 free_vn_table (vn_tables_t table)
3326 htab_delete (table->phis);
3327 htab_delete (table->nary);
3328 htab_delete (table->references);
3329 obstack_free (&table->nary_obstack, NULL);
3330 free_alloc_pool (table->phis_pool);
3331 free_alloc_pool (table->references_pool);
3334 static void
3335 init_scc_vn (void)
3337 size_t i;
3338 int j;
3339 int *rpo_numbers_temp;
3341 calculate_dominance_info (CDI_DOMINATORS);
3342 sccstack = NULL;
3343 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
3344 free);
3346 constant_value_ids = BITMAP_ALLOC (NULL);
3348 next_dfs_num = 1;
3349 next_value_id = 1;
3351 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
3352 /* VEC_alloc doesn't actually grow it to the right size, it just
3353 preallocates the space to do so. */
3354 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
3355 gcc_obstack_init (&vn_ssa_aux_obstack);
3357 shared_lookup_phiargs = NULL;
3358 shared_lookup_references = NULL;
3359 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3360 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3361 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
3363 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3364 the i'th block in RPO order is bb. We want to map bb's to RPO
3365 numbers, so we need to rearrange this array. */
3366 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
3367 rpo_numbers[rpo_numbers_temp[j]] = j;
3369 XDELETE (rpo_numbers_temp);
3371 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
3373 /* Create the VN_INFO structures, and initialize value numbers to
3374 TOP. */
3375 for (i = 0; i < num_ssa_names; i++)
3377 tree name = ssa_name (i);
3378 if (name)
3380 VN_INFO_GET (name)->valnum = VN_TOP;
3381 VN_INFO (name)->expr = NULL_TREE;
3382 VN_INFO (name)->value_id = 0;
3386 renumber_gimple_stmt_uids ();
3388 /* Create the valid and optimistic value numbering tables. */
3389 valid_info = XCNEW (struct vn_tables_s);
3390 allocate_vn_table (valid_info);
3391 optimistic_info = XCNEW (struct vn_tables_s);
3392 allocate_vn_table (optimistic_info);
3395 void
3396 free_scc_vn (void)
3398 size_t i;
3400 htab_delete (constant_to_value_id);
3401 BITMAP_FREE (constant_value_ids);
3402 VEC_free (tree, heap, shared_lookup_phiargs);
3403 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
3404 XDELETEVEC (rpo_numbers);
3406 for (i = 0; i < num_ssa_names; i++)
3408 tree name = ssa_name (i);
3409 if (name
3410 && VN_INFO (name)->needs_insertion)
3411 release_ssa_name (name);
3413 obstack_free (&vn_ssa_aux_obstack, NULL);
3414 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
3416 VEC_free (tree, heap, sccstack);
3417 free_vn_table (valid_info);
3418 XDELETE (valid_info);
3419 free_vn_table (optimistic_info);
3420 XDELETE (optimistic_info);
3423 /* Set *ID if we computed something useful in RESULT. */
3425 static void
3426 set_value_id_for_result (tree result, unsigned int *id)
3428 if (result)
3430 if (TREE_CODE (result) == SSA_NAME)
3431 *id = VN_INFO (result)->value_id;
3432 else if (is_gimple_min_invariant (result))
3433 *id = get_or_alloc_constant_value_id (result);
3437 /* Set the value ids in the valid hash tables. */
3439 static void
3440 set_hashtable_value_ids (void)
3442 htab_iterator hi;
3443 vn_nary_op_t vno;
3444 vn_reference_t vr;
3445 vn_phi_t vp;
3447 /* Now set the value ids of the things we had put in the hash
3448 table. */
3450 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
3451 vno, vn_nary_op_t, hi)
3452 set_value_id_for_result (vno->result, &vno->value_id);
3454 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
3455 vp, vn_phi_t, hi)
3456 set_value_id_for_result (vp->result, &vp->value_id);
3458 FOR_EACH_HTAB_ELEMENT (valid_info->references,
3459 vr, vn_reference_t, hi)
3460 set_value_id_for_result (vr->result, &vr->value_id);
3463 /* Do SCCVN. Returns true if it finished, false if we bailed out
3464 due to resource constraints. */
3466 bool
3467 run_scc_vn (void)
3469 size_t i;
3470 tree param;
3471 bool changed = true;
3473 init_scc_vn ();
3474 current_info = valid_info;
3476 for (param = DECL_ARGUMENTS (current_function_decl);
3477 param;
3478 param = DECL_CHAIN (param))
3480 if (gimple_default_def (cfun, param) != NULL)
3482 tree def = gimple_default_def (cfun, param);
3483 VN_INFO (def)->valnum = def;
3487 for (i = 1; i < num_ssa_names; ++i)
3489 tree name = ssa_name (i);
3490 if (name
3491 && VN_INFO (name)->visited == false
3492 && !has_zero_uses (name))
3493 if (!DFS (name))
3495 free_scc_vn ();
3496 return false;
3500 /* Initialize the value ids. */
3502 for (i = 1; i < num_ssa_names; ++i)
3504 tree name = ssa_name (i);
3505 vn_ssa_aux_t info;
3506 if (!name)
3507 continue;
3508 info = VN_INFO (name);
3509 if (info->valnum == name
3510 || info->valnum == VN_TOP)
3511 info->value_id = get_next_value_id ();
3512 else if (is_gimple_min_invariant (info->valnum))
3513 info->value_id = get_or_alloc_constant_value_id (info->valnum);
3516 /* Propagate until they stop changing. */
3517 while (changed)
3519 changed = false;
3520 for (i = 1; i < num_ssa_names; ++i)
3522 tree name = ssa_name (i);
3523 vn_ssa_aux_t info;
3524 if (!name)
3525 continue;
3526 info = VN_INFO (name);
3527 if (TREE_CODE (info->valnum) == SSA_NAME
3528 && info->valnum != name
3529 && info->value_id != VN_INFO (info->valnum)->value_id)
3531 changed = true;
3532 info->value_id = VN_INFO (info->valnum)->value_id;
3537 set_hashtable_value_ids ();
3539 if (dump_file && (dump_flags & TDF_DETAILS))
3541 fprintf (dump_file, "Value numbers:\n");
3542 for (i = 0; i < num_ssa_names; i++)
3544 tree name = ssa_name (i);
3545 if (name
3546 && VN_INFO (name)->visited
3547 && SSA_VAL (name) != name)
3549 print_generic_expr (dump_file, name, 0);
3550 fprintf (dump_file, " = ");
3551 print_generic_expr (dump_file, SSA_VAL (name), 0);
3552 fprintf (dump_file, "\n");
3557 return true;
3560 /* Return the maximum value id we have ever seen. */
3562 unsigned int
3563 get_max_value_id (void)
3565 return next_value_id;
3568 /* Return the next unique value id. */
3570 unsigned int
3571 get_next_value_id (void)
3573 return next_value_id++;
3577 /* Compare two expressions E1 and E2 and return true if they are equal. */
3579 bool
3580 expressions_equal_p (tree e1, tree e2)
3582 /* The obvious case. */
3583 if (e1 == e2)
3584 return true;
3586 /* If only one of them is null, they cannot be equal. */
3587 if (!e1 || !e2)
3588 return false;
3590 /* Now perform the actual comparison. */
3591 if (TREE_CODE (e1) == TREE_CODE (e2)
3592 && operand_equal_p (e1, e2, OEP_PURE_SAME))
3593 return true;
3595 return false;
3599 /* Return true if the nary operation NARY may trap. This is a copy
3600 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
3602 bool
3603 vn_nary_may_trap (vn_nary_op_t nary)
3605 tree type;
3606 tree rhs2 = NULL_TREE;
3607 bool honor_nans = false;
3608 bool honor_snans = false;
3609 bool fp_operation = false;
3610 bool honor_trapv = false;
3611 bool handled, ret;
3612 unsigned i;
3614 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
3615 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
3616 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
3618 type = nary->type;
3619 fp_operation = FLOAT_TYPE_P (type);
3620 if (fp_operation)
3622 honor_nans = flag_trapping_math && !flag_finite_math_only;
3623 honor_snans = flag_signaling_nans != 0;
3625 else if (INTEGRAL_TYPE_P (type)
3626 && TYPE_OVERFLOW_TRAPS (type))
3627 honor_trapv = true;
3629 if (nary->length >= 2)
3630 rhs2 = nary->op[1];
3631 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
3632 honor_trapv,
3633 honor_nans, honor_snans, rhs2,
3634 &handled);
3635 if (handled
3636 && ret)
3637 return true;
3639 for (i = 0; i < nary->length; ++i)
3640 if (tree_could_trap_p (nary->op[i]))
3641 return true;
3643 return false;