* gcc.dg/vect/slp-perm-1.c (main): Make sure loops aren't vectorized.
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blob8eafd9b9821829a35640f4b25fa6dd538392bd3f
1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "basic-block.h"
28 #include "tree-pretty-print.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "gimple.h"
33 #include "tree-dump.h"
34 #include "timevar.h"
35 #include "fibheap.h"
36 #include "hashtab.h"
37 #include "tree-iterator.h"
38 #include "alloc-pool.h"
39 #include "tree-pass.h"
40 #include "flags.h"
41 #include "bitmap.h"
42 #include "langhooks.h"
43 #include "cfgloop.h"
44 #include "params.h"
45 #include "tree-ssa-propagate.h"
46 #include "tree-ssa-sccvn.h"
48 /* This algorithm is based on the SCC algorithm presented by Keith
49 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
50 (http://citeseer.ist.psu.edu/41805.html). In
51 straight line code, it is equivalent to a regular hash based value
52 numbering that is performed in reverse postorder.
54 For code with cycles, there are two alternatives, both of which
55 require keeping the hashtables separate from the actual list of
56 value numbers for SSA names.
58 1. Iterate value numbering in an RPO walk of the blocks, removing
59 all the entries from the hashtable after each iteration (but
60 keeping the SSA name->value number mapping between iterations).
61 Iterate until it does not change.
63 2. Perform value numbering as part of an SCC walk on the SSA graph,
64 iterating only the cycles in the SSA graph until they do not change
65 (using a separate, optimistic hashtable for value numbering the SCC
66 operands).
68 The second is not just faster in practice (because most SSA graph
69 cycles do not involve all the variables in the graph), it also has
70 some nice properties.
72 One of these nice properties is that when we pop an SCC off the
73 stack, we are guaranteed to have processed all the operands coming from
74 *outside of that SCC*, so we do not need to do anything special to
75 ensure they have value numbers.
77 Another nice property is that the SCC walk is done as part of a DFS
78 of the SSA graph, which makes it easy to perform combining and
79 simplifying operations at the same time.
81 The code below is deliberately written in a way that makes it easy
82 to separate the SCC walk from the other work it does.
84 In order to propagate constants through the code, we track which
85 expressions contain constants, and use those while folding. In
86 theory, we could also track expressions whose value numbers are
87 replaced, in case we end up folding based on expression
88 identities.
90 In order to value number memory, we assign value numbers to vuses.
91 This enables us to note that, for example, stores to the same
92 address of the same value from the same starting memory states are
93 equivalent.
94 TODO:
96 1. We can iterate only the changing portions of the SCC's, but
97 I have not seen an SCC big enough for this to be a win.
98 2. If you differentiate between phi nodes for loops and phi nodes
99 for if-then-else, you can properly consider phi nodes in different
100 blocks for equivalence.
101 3. We could value number vuses in more cases, particularly, whole
102 structure copies.
105 /* The set of hashtables and alloc_pool's for their items. */
107 typedef struct vn_tables_s
109 htab_t nary;
110 htab_t phis;
111 htab_t references;
112 struct obstack nary_obstack;
113 alloc_pool phis_pool;
114 alloc_pool references_pool;
115 } *vn_tables_t;
117 static htab_t constant_to_value_id;
118 static bitmap constant_value_ids;
121 /* Valid hashtables storing information we have proven to be
122 correct. */
124 static vn_tables_t valid_info;
126 /* Optimistic hashtables storing information we are making assumptions about
127 during iterations. */
129 static vn_tables_t optimistic_info;
131 /* Pointer to the set of hashtables that is currently being used.
132 Should always point to either the optimistic_info, or the
133 valid_info. */
135 static vn_tables_t current_info;
138 /* Reverse post order index for each basic block. */
140 static int *rpo_numbers;
142 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
144 /* This represents the top of the VN lattice, which is the universal
145 value. */
147 tree VN_TOP;
149 /* Unique counter for our value ids. */
151 static unsigned int next_value_id;
153 /* Next DFS number and the stack for strongly connected component
154 detection. */
156 static unsigned int next_dfs_num;
157 static VEC (tree, heap) *sccstack;
160 DEF_VEC_P(vn_ssa_aux_t);
161 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
163 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
164 are allocated on an obstack for locality reasons, and to free them
165 without looping over the VEC. */
167 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
168 static struct obstack vn_ssa_aux_obstack;
170 /* Return the value numbering information for a given SSA name. */
172 vn_ssa_aux_t
173 VN_INFO (tree name)
175 vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
176 SSA_NAME_VERSION (name));
177 gcc_checking_assert (res);
178 return res;
181 /* Set the value numbering info for a given SSA name to a given
182 value. */
184 static inline void
185 VN_INFO_SET (tree name, vn_ssa_aux_t value)
187 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
188 SSA_NAME_VERSION (name), value);
191 /* Initialize the value numbering info for a given SSA name.
192 This should be called just once for every SSA name. */
194 vn_ssa_aux_t
195 VN_INFO_GET (tree name)
197 vn_ssa_aux_t newinfo;
199 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
200 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
201 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
202 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
203 SSA_NAME_VERSION (name) + 1);
204 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
205 SSA_NAME_VERSION (name), newinfo);
206 return newinfo;
210 /* Get the representative expression for the SSA_NAME NAME. Returns
211 the representative SSA_NAME if there is no expression associated with it. */
213 tree
214 vn_get_expr_for (tree name)
216 vn_ssa_aux_t vn = VN_INFO (name);
217 gimple def_stmt;
218 tree expr = NULL_TREE;
220 if (vn->valnum == VN_TOP)
221 return name;
223 /* If the value-number is a constant it is the representative
224 expression. */
225 if (TREE_CODE (vn->valnum) != SSA_NAME)
226 return vn->valnum;
228 /* Get to the information of the value of this SSA_NAME. */
229 vn = VN_INFO (vn->valnum);
231 /* If the value-number is a constant it is the representative
232 expression. */
233 if (TREE_CODE (vn->valnum) != SSA_NAME)
234 return vn->valnum;
236 /* Else if we have an expression, return it. */
237 if (vn->expr != NULL_TREE)
238 return vn->expr;
240 /* Otherwise use the defining statement to build the expression. */
241 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
243 /* If the value number is a default-definition or a PHI result
244 use it directly. */
245 if (gimple_nop_p (def_stmt)
246 || gimple_code (def_stmt) == GIMPLE_PHI)
247 return vn->valnum;
249 if (!is_gimple_assign (def_stmt))
250 return vn->valnum;
252 /* FIXME tuples. This is incomplete and likely will miss some
253 simplifications. */
254 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)))
256 case tcc_reference:
257 if ((gimple_assign_rhs_code (def_stmt) == VIEW_CONVERT_EXPR
258 || gimple_assign_rhs_code (def_stmt) == REALPART_EXPR
259 || gimple_assign_rhs_code (def_stmt) == IMAGPART_EXPR)
260 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
261 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
262 gimple_expr_type (def_stmt),
263 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
264 break;
266 case tcc_unary:
267 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
268 gimple_expr_type (def_stmt),
269 gimple_assign_rhs1 (def_stmt));
270 break;
272 case tcc_binary:
273 expr = fold_build2 (gimple_assign_rhs_code (def_stmt),
274 gimple_expr_type (def_stmt),
275 gimple_assign_rhs1 (def_stmt),
276 gimple_assign_rhs2 (def_stmt));
277 break;
279 default:;
281 if (expr == NULL_TREE)
282 return vn->valnum;
284 /* Cache the expression. */
285 vn->expr = expr;
287 return expr;
291 /* Free a phi operation structure VP. */
293 static void
294 free_phi (void *vp)
296 vn_phi_t phi = (vn_phi_t) vp;
297 VEC_free (tree, heap, phi->phiargs);
300 /* Free a reference operation structure VP. */
302 static void
303 free_reference (void *vp)
305 vn_reference_t vr = (vn_reference_t) vp;
306 VEC_free (vn_reference_op_s, heap, vr->operands);
309 /* Hash table equality function for vn_constant_t. */
311 static int
312 vn_constant_eq (const void *p1, const void *p2)
314 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
315 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
317 if (vc1->hashcode != vc2->hashcode)
318 return false;
320 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
323 /* Hash table hash function for vn_constant_t. */
325 static hashval_t
326 vn_constant_hash (const void *p1)
328 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
329 return vc1->hashcode;
332 /* Lookup a value id for CONSTANT and return it. If it does not
333 exist returns 0. */
335 unsigned int
336 get_constant_value_id (tree constant)
338 void **slot;
339 struct vn_constant_s vc;
341 vc.hashcode = vn_hash_constant_with_type (constant);
342 vc.constant = constant;
343 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
344 vc.hashcode, NO_INSERT);
345 if (slot)
346 return ((vn_constant_t)*slot)->value_id;
347 return 0;
350 /* Lookup a value id for CONSTANT, and if it does not exist, create a
351 new one and return it. If it does exist, return it. */
353 unsigned int
354 get_or_alloc_constant_value_id (tree constant)
356 void **slot;
357 struct vn_constant_s vc;
358 vn_constant_t vcp;
360 vc.hashcode = vn_hash_constant_with_type (constant);
361 vc.constant = constant;
362 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
363 vc.hashcode, INSERT);
364 if (*slot)
365 return ((vn_constant_t)*slot)->value_id;
367 vcp = XNEW (struct vn_constant_s);
368 vcp->hashcode = vc.hashcode;
369 vcp->constant = constant;
370 vcp->value_id = get_next_value_id ();
371 *slot = (void *) vcp;
372 bitmap_set_bit (constant_value_ids, vcp->value_id);
373 return vcp->value_id;
376 /* Return true if V is a value id for a constant. */
378 bool
379 value_id_constant_p (unsigned int v)
381 return bitmap_bit_p (constant_value_ids, v);
384 /* Compare two reference operands P1 and P2 for equality. Return true if
385 they are equal, and false otherwise. */
387 static int
388 vn_reference_op_eq (const void *p1, const void *p2)
390 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
391 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
393 return vro1->opcode == vro2->opcode
394 && types_compatible_p (vro1->type, vro2->type)
395 && expressions_equal_p (vro1->op0, vro2->op0)
396 && expressions_equal_p (vro1->op1, vro2->op1)
397 && expressions_equal_p (vro1->op2, vro2->op2);
400 /* Compute the hash for a reference operand VRO1. */
402 static hashval_t
403 vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result)
405 result = iterative_hash_hashval_t (vro1->opcode, result);
406 if (vro1->op0)
407 result = iterative_hash_expr (vro1->op0, result);
408 if (vro1->op1)
409 result = iterative_hash_expr (vro1->op1, result);
410 if (vro1->op2)
411 result = iterative_hash_expr (vro1->op2, result);
412 return result;
415 /* Return the hashcode for a given reference operation P1. */
417 static hashval_t
418 vn_reference_hash (const void *p1)
420 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
421 return vr1->hashcode;
424 /* Compute a hash for the reference operation VR1 and return it. */
426 hashval_t
427 vn_reference_compute_hash (const vn_reference_t vr1)
429 hashval_t result = 0;
430 int i;
431 vn_reference_op_t vro;
432 HOST_WIDE_INT off = -1;
433 bool deref = false;
435 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
437 if (vro->opcode == MEM_REF)
438 deref = true;
439 else if (vro->opcode != ADDR_EXPR)
440 deref = false;
441 if (vro->off != -1)
443 if (off == -1)
444 off = 0;
445 off += vro->off;
447 else
449 if (off != -1
450 && off != 0)
451 result = iterative_hash_hashval_t (off, result);
452 off = -1;
453 if (deref
454 && vro->opcode == ADDR_EXPR)
456 if (vro->op0)
458 tree op = TREE_OPERAND (vro->op0, 0);
459 result = iterative_hash_hashval_t (TREE_CODE (op), result);
460 result = iterative_hash_expr (op, result);
463 else
464 result = vn_reference_op_compute_hash (vro, result);
467 if (vr1->vuse)
468 result += SSA_NAME_VERSION (vr1->vuse);
470 return result;
473 /* Return true if reference operations P1 and P2 are equivalent. This
474 means they have the same set of operands and vuses. */
477 vn_reference_eq (const void *p1, const void *p2)
479 unsigned i, j;
481 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
482 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
483 if (vr1->hashcode != vr2->hashcode)
484 return false;
486 /* Early out if this is not a hash collision. */
487 if (vr1->hashcode != vr2->hashcode)
488 return false;
490 /* The VOP needs to be the same. */
491 if (vr1->vuse != vr2->vuse)
492 return false;
494 /* If the operands are the same we are done. */
495 if (vr1->operands == vr2->operands)
496 return true;
498 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
499 return false;
501 i = 0;
502 j = 0;
505 HOST_WIDE_INT off1 = 0, off2 = 0;
506 vn_reference_op_t vro1, vro2;
507 vn_reference_op_s tem1, tem2;
508 bool deref1 = false, deref2 = false;
509 for (; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro1); i++)
511 if (vro1->opcode == MEM_REF)
512 deref1 = true;
513 if (vro1->off == -1)
514 break;
515 off1 += vro1->off;
517 for (; VEC_iterate (vn_reference_op_s, vr2->operands, j, vro2); j++)
519 if (vro2->opcode == MEM_REF)
520 deref2 = true;
521 if (vro2->off == -1)
522 break;
523 off2 += vro2->off;
525 if (off1 != off2)
526 return false;
527 if (deref1 && vro1->opcode == ADDR_EXPR)
529 memset (&tem1, 0, sizeof (tem1));
530 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
531 tem1.type = TREE_TYPE (tem1.op0);
532 tem1.opcode = TREE_CODE (tem1.op0);
533 vro1 = &tem1;
535 if (deref2 && vro2->opcode == ADDR_EXPR)
537 memset (&tem2, 0, sizeof (tem2));
538 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
539 tem2.type = TREE_TYPE (tem2.op0);
540 tem2.opcode = TREE_CODE (tem2.op0);
541 vro2 = &tem2;
543 if (!vn_reference_op_eq (vro1, vro2))
544 return false;
545 ++j;
546 ++i;
548 while (VEC_length (vn_reference_op_s, vr1->operands) != i
549 || VEC_length (vn_reference_op_s, vr2->operands) != j);
551 return true;
554 /* Copy the operations present in load/store REF into RESULT, a vector of
555 vn_reference_op_s's. */
557 void
558 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
560 if (TREE_CODE (ref) == TARGET_MEM_REF)
562 vn_reference_op_s temp;
563 tree base;
565 base = TMR_SYMBOL (ref) ? TMR_SYMBOL (ref) : TMR_BASE (ref);
566 if (!base)
567 base = null_pointer_node;
569 memset (&temp, 0, sizeof (temp));
570 /* We do not care for spurious type qualifications. */
571 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
572 temp.opcode = TREE_CODE (ref);
573 temp.op0 = TMR_INDEX (ref);
574 temp.op1 = TMR_STEP (ref);
575 temp.op2 = TMR_OFFSET (ref);
576 temp.off = -1;
577 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
579 memset (&temp, 0, sizeof (temp));
580 temp.type = NULL_TREE;
581 temp.opcode = TREE_CODE (base);
582 temp.op0 = base;
583 temp.op1 = TMR_ORIGINAL (ref);
584 temp.off = -1;
585 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
586 return;
589 /* For non-calls, store the information that makes up the address. */
591 while (ref)
593 vn_reference_op_s temp;
595 memset (&temp, 0, sizeof (temp));
596 /* We do not care for spurious type qualifications. */
597 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
598 temp.opcode = TREE_CODE (ref);
599 temp.off = -1;
601 switch (temp.opcode)
603 case MISALIGNED_INDIRECT_REF:
604 temp.op0 = TREE_OPERAND (ref, 1);
605 break;
606 case MEM_REF:
607 /* The base address gets its own vn_reference_op_s structure. */
608 temp.op0 = TREE_OPERAND (ref, 1);
609 if (host_integerp (TREE_OPERAND (ref, 1), 0))
610 temp.off = TREE_INT_CST_LOW (TREE_OPERAND (ref, 1));
611 break;
612 case BIT_FIELD_REF:
613 /* Record bits and position. */
614 temp.op0 = TREE_OPERAND (ref, 1);
615 temp.op1 = TREE_OPERAND (ref, 2);
616 break;
617 case COMPONENT_REF:
618 /* The field decl is enough to unambiguously specify the field,
619 a matching type is not necessary and a mismatching type
620 is always a spurious difference. */
621 temp.type = NULL_TREE;
622 temp.op0 = TREE_OPERAND (ref, 1);
623 temp.op1 = TREE_OPERAND (ref, 2);
625 tree this_offset = component_ref_field_offset (ref);
626 if (this_offset
627 && TREE_CODE (this_offset) == INTEGER_CST)
629 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
630 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
632 double_int off
633 = double_int_add (tree_to_double_int (this_offset),
634 double_int_sdiv
635 (tree_to_double_int (bit_offset),
636 uhwi_to_double_int (BITS_PER_UNIT),
637 TRUNC_DIV_EXPR));
638 if (double_int_fits_in_shwi_p (off))
639 temp.off = off.low;
643 break;
644 case ARRAY_RANGE_REF:
645 case ARRAY_REF:
646 /* Record index as operand. */
647 temp.op0 = TREE_OPERAND (ref, 1);
648 /* Always record lower bounds and element size. */
649 temp.op1 = array_ref_low_bound (ref);
650 temp.op2 = array_ref_element_size (ref);
651 if (TREE_CODE (temp.op0) == INTEGER_CST
652 && TREE_CODE (temp.op1) == INTEGER_CST
653 && TREE_CODE (temp.op2) == INTEGER_CST)
655 double_int off = tree_to_double_int (temp.op0);
656 off = double_int_add (off,
657 double_int_neg
658 (tree_to_double_int (temp.op1)));
659 off = double_int_mul (off, tree_to_double_int (temp.op2));
660 if (double_int_fits_in_shwi_p (off))
661 temp.off = off.low;
663 break;
664 case STRING_CST:
665 case INTEGER_CST:
666 case COMPLEX_CST:
667 case VECTOR_CST:
668 case REAL_CST:
669 case CONSTRUCTOR:
670 case VAR_DECL:
671 case PARM_DECL:
672 case CONST_DECL:
673 case RESULT_DECL:
674 case SSA_NAME:
675 temp.op0 = ref;
676 break;
677 case ADDR_EXPR:
678 if (is_gimple_min_invariant (ref))
680 temp.op0 = ref;
681 break;
683 /* Fallthrough. */
684 /* These are only interesting for their operands, their
685 existence, and their type. They will never be the last
686 ref in the chain of references (IE they require an
687 operand), so we don't have to put anything
688 for op* as it will be handled by the iteration */
689 case REALPART_EXPR:
690 case VIEW_CONVERT_EXPR:
691 temp.off = 0;
692 break;
693 case IMAGPART_EXPR:
694 /* This is only interesting for its constant offset. */
695 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
696 break;
697 default:
698 gcc_unreachable ();
700 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
702 if (REFERENCE_CLASS_P (ref)
703 || (TREE_CODE (ref) == ADDR_EXPR
704 && !is_gimple_min_invariant (ref)))
705 ref = TREE_OPERAND (ref, 0);
706 else
707 ref = NULL_TREE;
711 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
712 operands in *OPS, the reference alias set SET and the reference type TYPE.
713 Return true if something useful was produced. */
715 bool
716 ao_ref_init_from_vn_reference (ao_ref *ref,
717 alias_set_type set, tree type,
718 VEC (vn_reference_op_s, heap) *ops)
720 vn_reference_op_t op;
721 unsigned i;
722 tree base = NULL_TREE;
723 tree *op0_p = &base;
724 HOST_WIDE_INT offset = 0;
725 HOST_WIDE_INT max_size;
726 HOST_WIDE_INT size = -1;
727 tree size_tree = NULL_TREE;
728 alias_set_type base_alias_set = -1;
730 /* First get the final access size from just the outermost expression. */
731 op = VEC_index (vn_reference_op_s, ops, 0);
732 if (op->opcode == COMPONENT_REF)
733 size_tree = DECL_SIZE (op->op0);
734 else if (op->opcode == BIT_FIELD_REF)
735 size_tree = op->op0;
736 else
738 enum machine_mode mode = TYPE_MODE (type);
739 if (mode == BLKmode)
740 size_tree = TYPE_SIZE (type);
741 else
742 size = GET_MODE_BITSIZE (mode);
744 if (size_tree != NULL_TREE)
746 if (!host_integerp (size_tree, 1))
747 size = -1;
748 else
749 size = TREE_INT_CST_LOW (size_tree);
752 /* Initially, maxsize is the same as the accessed element size.
753 In the following it will only grow (or become -1). */
754 max_size = size;
756 /* Compute cumulative bit-offset for nested component-refs and array-refs,
757 and find the ultimate containing object. */
758 for (i = 0; VEC_iterate (vn_reference_op_s, ops, i, op); ++i)
760 switch (op->opcode)
762 /* These may be in the reference ops, but we cannot do anything
763 sensible with them here. */
764 case ADDR_EXPR:
765 /* Apart from ADDR_EXPR arguments to MEM_REF. */
766 if (base != NULL_TREE
767 && TREE_CODE (base) == MEM_REF
768 && op->op0
769 && DECL_P (TREE_OPERAND (op->op0, 0)))
771 vn_reference_op_t pop = VEC_index (vn_reference_op_s, ops, i-1);
772 base = TREE_OPERAND (op->op0, 0);
773 if (pop->off == -1)
775 max_size = -1;
776 offset = 0;
778 else
779 offset += pop->off * BITS_PER_UNIT;
780 op0_p = NULL;
781 break;
783 /* Fallthru. */
784 case CALL_EXPR:
785 return false;
787 /* Record the base objects. */
788 case MISALIGNED_INDIRECT_REF:
789 *op0_p = build2 (MISALIGNED_INDIRECT_REF, op->type,
790 NULL_TREE, op->op0);
791 op0_p = &TREE_OPERAND (*op0_p, 0);
792 break;
794 case MEM_REF:
795 base_alias_set = get_deref_alias_set (op->op0);
796 *op0_p = build2 (MEM_REF, op->type,
797 NULL_TREE, op->op0);
798 op0_p = &TREE_OPERAND (*op0_p, 0);
799 break;
801 case VAR_DECL:
802 case PARM_DECL:
803 case RESULT_DECL:
804 case SSA_NAME:
805 *op0_p = op->op0;
806 op0_p = NULL;
807 break;
809 /* And now the usual component-reference style ops. */
810 case BIT_FIELD_REF:
811 offset += tree_low_cst (op->op1, 0);
812 break;
814 case COMPONENT_REF:
816 tree field = op->op0;
817 /* We do not have a complete COMPONENT_REF tree here so we
818 cannot use component_ref_field_offset. Do the interesting
819 parts manually. */
821 if (op->op1
822 || !host_integerp (DECL_FIELD_OFFSET (field), 1))
823 max_size = -1;
824 else
826 offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
827 * BITS_PER_UNIT);
828 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
830 break;
833 case ARRAY_RANGE_REF:
834 case ARRAY_REF:
835 /* We recorded the lower bound and the element size. */
836 if (!host_integerp (op->op0, 0)
837 || !host_integerp (op->op1, 0)
838 || !host_integerp (op->op2, 0))
839 max_size = -1;
840 else
842 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
843 hindex -= TREE_INT_CST_LOW (op->op1);
844 hindex *= TREE_INT_CST_LOW (op->op2);
845 hindex *= BITS_PER_UNIT;
846 offset += hindex;
848 break;
850 case REALPART_EXPR:
851 break;
853 case IMAGPART_EXPR:
854 offset += size;
855 break;
857 case VIEW_CONVERT_EXPR:
858 break;
860 case STRING_CST:
861 case INTEGER_CST:
862 case COMPLEX_CST:
863 case VECTOR_CST:
864 case REAL_CST:
865 case CONSTRUCTOR:
866 case CONST_DECL:
867 return false;
869 default:
870 return false;
874 if (base == NULL_TREE)
875 return false;
877 ref->ref = NULL_TREE;
878 ref->base = base;
879 ref->offset = offset;
880 ref->size = size;
881 ref->max_size = max_size;
882 ref->ref_alias_set = set;
883 if (base_alias_set != -1)
884 ref->base_alias_set = base_alias_set;
885 else
886 ref->base_alias_set = get_alias_set (base);
888 return true;
891 /* Copy the operations present in load/store/call REF into RESULT, a vector of
892 vn_reference_op_s's. */
894 void
895 copy_reference_ops_from_call (gimple call,
896 VEC(vn_reference_op_s, heap) **result)
898 vn_reference_op_s temp;
899 unsigned i;
901 /* Copy the type, opcode, function being called and static chain. */
902 memset (&temp, 0, sizeof (temp));
903 temp.type = gimple_call_return_type (call);
904 temp.opcode = CALL_EXPR;
905 temp.op0 = gimple_call_fn (call);
906 temp.op1 = gimple_call_chain (call);
907 temp.off = -1;
908 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
910 /* Copy the call arguments. As they can be references as well,
911 just chain them together. */
912 for (i = 0; i < gimple_call_num_args (call); ++i)
914 tree callarg = gimple_call_arg (call, i);
915 copy_reference_ops_from_ref (callarg, result);
919 /* Create a vector of vn_reference_op_s structures from REF, a
920 REFERENCE_CLASS_P tree. The vector is not shared. */
922 static VEC(vn_reference_op_s, heap) *
923 create_reference_ops_from_ref (tree ref)
925 VEC (vn_reference_op_s, heap) *result = NULL;
927 copy_reference_ops_from_ref (ref, &result);
928 return result;
931 /* Create a vector of vn_reference_op_s structures from CALL, a
932 call statement. The vector is not shared. */
934 static VEC(vn_reference_op_s, heap) *
935 create_reference_ops_from_call (gimple call)
937 VEC (vn_reference_op_s, heap) *result = NULL;
939 copy_reference_ops_from_call (call, &result);
940 return result;
943 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
944 *I_P to point to the last element of the replacement. */
945 void
946 vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
947 unsigned int *i_p)
949 unsigned int i = *i_p;
950 vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
951 vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
952 tree addr_base;
953 HOST_WIDE_INT addr_offset;
955 /* The only thing we have to do is from &OBJ.foo.bar add the offset
956 from .foo.bar to the preceeding MEM_REF offset and replace the
957 address with &OBJ. */
958 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
959 &addr_offset);
960 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
961 if (addr_base != op->op0)
963 double_int off = tree_to_double_int (mem_op->op0);
964 off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
965 off = double_int_add (off, shwi_to_double_int (addr_offset));
966 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
967 op->op0 = build_fold_addr_expr (addr_base);
968 if (host_integerp (mem_op->op0, 0))
969 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
970 else
971 mem_op->off = -1;
975 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
976 *I_P to point to the last element of the replacement. */
977 static void
978 vn_reference_maybe_forwprop_address (VEC (vn_reference_op_s, heap) **ops,
979 unsigned int *i_p)
981 unsigned int i = *i_p;
982 vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
983 vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
984 gimple def_stmt;
985 enum tree_code code;
986 double_int off;
988 def_stmt = SSA_NAME_DEF_STMT (op->op0);
989 if (!is_gimple_assign (def_stmt))
990 return;
992 code = gimple_assign_rhs_code (def_stmt);
993 if (code != ADDR_EXPR
994 && code != POINTER_PLUS_EXPR)
995 return;
997 off = tree_to_double_int (mem_op->op0);
998 off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1000 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1001 from .foo.bar to the preceeding MEM_REF offset and replace the
1002 address with &OBJ. */
1003 if (code == ADDR_EXPR)
1005 tree addr, addr_base;
1006 HOST_WIDE_INT addr_offset;
1008 addr = gimple_assign_rhs1 (def_stmt);
1009 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1010 &addr_offset);
1011 if (!addr_base
1012 || TREE_CODE (addr_base) != MEM_REF)
1013 return;
1015 off = double_int_add (off, shwi_to_double_int (addr_offset));
1016 off = double_int_add (off, mem_ref_offset (addr_base));
1017 op->op0 = TREE_OPERAND (addr_base, 0);
1019 else
1021 tree ptr, ptroff;
1022 ptr = gimple_assign_rhs1 (def_stmt);
1023 ptroff = gimple_assign_rhs2 (def_stmt);
1024 if (TREE_CODE (ptr) != SSA_NAME
1025 || TREE_CODE (ptroff) != INTEGER_CST)
1026 return;
1028 off = double_int_add (off, tree_to_double_int (ptroff));
1029 op->op0 = ptr;
1032 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1033 if (host_integerp (mem_op->op0, 0))
1034 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1035 else
1036 mem_op->off = -1;
1037 if (TREE_CODE (op->op0) == SSA_NAME)
1039 op->op0 = SSA_VAL (op->op0);
1040 if (TREE_CODE (op->op0) != SSA_NAME)
1041 op->opcode = TREE_CODE (op->op0);
1044 /* And recurse. */
1045 if (TREE_CODE (op->op0) == SSA_NAME)
1046 vn_reference_maybe_forwprop_address (ops, i_p);
1047 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1048 vn_reference_fold_indirect (ops, i_p);
1051 /* Optimize the reference REF to a constant if possible or return
1052 NULL_TREE if not. */
1054 tree
1055 fully_constant_vn_reference_p (vn_reference_t ref)
1057 VEC (vn_reference_op_s, heap) *operands = ref->operands;
1058 vn_reference_op_t op;
1060 /* Try to simplify the translated expression if it is
1061 a call to a builtin function with at most two arguments. */
1062 op = VEC_index (vn_reference_op_s, operands, 0);
1063 if (op->opcode == CALL_EXPR
1064 && TREE_CODE (op->op0) == ADDR_EXPR
1065 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1066 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1067 && VEC_length (vn_reference_op_s, operands) >= 2
1068 && VEC_length (vn_reference_op_s, operands) <= 3)
1070 vn_reference_op_t arg0, arg1 = NULL;
1071 bool anyconst = false;
1072 arg0 = VEC_index (vn_reference_op_s, operands, 1);
1073 if (VEC_length (vn_reference_op_s, operands) > 2)
1074 arg1 = VEC_index (vn_reference_op_s, operands, 2);
1075 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1076 || (arg0->opcode == ADDR_EXPR
1077 && is_gimple_min_invariant (arg0->op0)))
1078 anyconst = true;
1079 if (arg1
1080 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1081 || (arg1->opcode == ADDR_EXPR
1082 && is_gimple_min_invariant (arg1->op0))))
1083 anyconst = true;
1084 if (anyconst)
1086 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1087 arg1 ? 2 : 1,
1088 arg0->op0,
1089 arg1 ? arg1->op0 : NULL);
1090 if (folded
1091 && TREE_CODE (folded) == NOP_EXPR)
1092 folded = TREE_OPERAND (folded, 0);
1093 if (folded
1094 && is_gimple_min_invariant (folded))
1095 return folded;
1099 /* Simplify reads from constant strings. */
1100 else if (op->opcode == ARRAY_REF
1101 && TREE_CODE (op->op0) == INTEGER_CST
1102 && integer_zerop (op->op1)
1103 && VEC_length (vn_reference_op_s, operands) == 2)
1105 vn_reference_op_t arg0;
1106 arg0 = VEC_index (vn_reference_op_s, operands, 1);
1107 if (arg0->opcode == STRING_CST
1108 && (TYPE_MODE (op->type)
1109 == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
1110 && GET_MODE_CLASS (TYPE_MODE (op->type)) == MODE_INT
1111 && GET_MODE_SIZE (TYPE_MODE (op->type)) == 1
1112 && compare_tree_int (op->op0, TREE_STRING_LENGTH (arg0->op0)) < 0)
1113 return build_int_cst_type (op->type,
1114 (TREE_STRING_POINTER (arg0->op0)
1115 [TREE_INT_CST_LOW (op->op0)]));
1118 return NULL_TREE;
1121 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1122 structures into their value numbers. This is done in-place, and
1123 the vector passed in is returned. */
1125 static VEC (vn_reference_op_s, heap) *
1126 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
1128 vn_reference_op_t vro;
1129 unsigned int i;
1131 for (i = 0; VEC_iterate (vn_reference_op_s, orig, i, vro); i++)
1133 if (vro->opcode == SSA_NAME
1134 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1136 vro->op0 = SSA_VAL (vro->op0);
1137 /* If it transforms from an SSA_NAME to a constant, update
1138 the opcode. */
1139 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1140 vro->opcode = TREE_CODE (vro->op0);
1142 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1143 vro->op1 = SSA_VAL (vro->op1);
1144 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1145 vro->op2 = SSA_VAL (vro->op2);
1146 /* If it transforms from an SSA_NAME to an address, fold with
1147 a preceding indirect reference. */
1148 if (i > 0
1149 && vro->op0
1150 && TREE_CODE (vro->op0) == ADDR_EXPR
1151 && VEC_index (vn_reference_op_s,
1152 orig, i - 1)->opcode == MEM_REF)
1153 vn_reference_fold_indirect (&orig, &i);
1154 else if (i > 0
1155 && vro->opcode == SSA_NAME
1156 && VEC_index (vn_reference_op_s,
1157 orig, i - 1)->opcode == MEM_REF)
1158 vn_reference_maybe_forwprop_address (&orig, &i);
1159 /* If it transforms a non-constant ARRAY_REF into a constant
1160 one, adjust the constant offset. */
1161 else if (vro->opcode == ARRAY_REF
1162 && vro->off == -1
1163 && TREE_CODE (vro->op0) == INTEGER_CST
1164 && TREE_CODE (vro->op1) == INTEGER_CST
1165 && TREE_CODE (vro->op2) == INTEGER_CST)
1167 double_int off = tree_to_double_int (vro->op0);
1168 off = double_int_add (off,
1169 double_int_neg
1170 (tree_to_double_int (vro->op1)));
1171 off = double_int_mul (off, tree_to_double_int (vro->op2));
1172 if (double_int_fits_in_shwi_p (off))
1173 vro->off = off.low;
1177 return orig;
1180 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
1182 /* Create a vector of vn_reference_op_s structures from REF, a
1183 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1184 this function. */
1186 static VEC(vn_reference_op_s, heap) *
1187 valueize_shared_reference_ops_from_ref (tree ref)
1189 if (!ref)
1190 return NULL;
1191 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1192 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1193 shared_lookup_references = valueize_refs (shared_lookup_references);
1194 return shared_lookup_references;
1197 /* Create a vector of vn_reference_op_s structures from CALL, a
1198 call statement. The vector is shared among all callers of
1199 this function. */
1201 static VEC(vn_reference_op_s, heap) *
1202 valueize_shared_reference_ops_from_call (gimple call)
1204 if (!call)
1205 return NULL;
1206 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1207 copy_reference_ops_from_call (call, &shared_lookup_references);
1208 shared_lookup_references = valueize_refs (shared_lookup_references);
1209 return shared_lookup_references;
1212 /* Lookup a SCCVN reference operation VR in the current hash table.
1213 Returns the resulting value number if it exists in the hash table,
1214 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1215 vn_reference_t stored in the hashtable if something is found. */
1217 static tree
1218 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1220 void **slot;
1221 hashval_t hash;
1223 hash = vr->hashcode;
1224 slot = htab_find_slot_with_hash (current_info->references, vr,
1225 hash, NO_INSERT);
1226 if (!slot && current_info == optimistic_info)
1227 slot = htab_find_slot_with_hash (valid_info->references, vr,
1228 hash, NO_INSERT);
1229 if (slot)
1231 if (vnresult)
1232 *vnresult = (vn_reference_t)*slot;
1233 return ((vn_reference_t)*slot)->result;
1236 return NULL_TREE;
1239 static tree *last_vuse_ptr;
1241 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1242 with the current VUSE and performs the expression lookup. */
1244 static void *
1245 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
1247 vn_reference_t vr = (vn_reference_t)vr_;
1248 void **slot;
1249 hashval_t hash;
1251 if (last_vuse_ptr)
1252 *last_vuse_ptr = vuse;
1254 /* Fixup vuse and hash. */
1255 if (vr->vuse)
1256 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1257 vr->vuse = SSA_VAL (vuse);
1258 if (vr->vuse)
1259 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1261 hash = vr->hashcode;
1262 slot = htab_find_slot_with_hash (current_info->references, vr,
1263 hash, NO_INSERT);
1264 if (!slot && current_info == optimistic_info)
1265 slot = htab_find_slot_with_hash (valid_info->references, vr,
1266 hash, NO_INSERT);
1267 if (slot)
1268 return *slot;
1270 return NULL;
1273 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1274 from the statement defining VUSE and if not successful tries to
1275 translate *REFP and VR_ through an aggregate copy at the defintion
1276 of VUSE. */
1278 static void *
1279 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
1281 vn_reference_t vr = (vn_reference_t)vr_;
1282 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1283 tree fndecl;
1284 tree base;
1285 HOST_WIDE_INT offset, maxsize;
1287 /* First try to disambiguate after value-replacing in the definitions LHS. */
1288 if (is_gimple_assign (def_stmt))
1290 tree lhs = gimple_assign_lhs (def_stmt);
1291 ao_ref ref1;
1292 VEC (vn_reference_op_s, heap) *operands = NULL;
1293 bool res = true;
1294 copy_reference_ops_from_ref (lhs, &operands);
1295 operands = valueize_refs (operands);
1296 if (ao_ref_init_from_vn_reference (&ref1, get_alias_set (lhs),
1297 TREE_TYPE (lhs), operands))
1298 res = refs_may_alias_p_1 (ref, &ref1, true);
1299 VEC_free (vn_reference_op_s, heap, operands);
1300 if (!res)
1301 return NULL;
1304 base = ao_ref_base (ref);
1305 offset = ref->offset;
1306 maxsize = ref->max_size;
1308 /* If we cannot constrain the size of the reference we cannot
1309 test if anything kills it. */
1310 if (maxsize == -1)
1311 return (void *)-1;
1313 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1314 from that defintion.
1315 1) Memset. */
1316 if (is_gimple_reg_type (vr->type)
1317 && is_gimple_call (def_stmt)
1318 && (fndecl = gimple_call_fndecl (def_stmt))
1319 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1320 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
1321 && integer_zerop (gimple_call_arg (def_stmt, 1))
1322 && host_integerp (gimple_call_arg (def_stmt, 2), 1)
1323 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1325 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1326 tree base2;
1327 HOST_WIDE_INT offset2, size2, maxsize2;
1328 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1329 size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
1330 if ((unsigned HOST_WIDE_INT)size2 / 8
1331 == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
1332 && operand_equal_p (base, base2, 0)
1333 && offset2 <= offset
1334 && offset2 + size2 >= offset + maxsize)
1336 tree val = fold_convert (vr->type, integer_zero_node);
1337 unsigned int value_id = get_or_alloc_constant_value_id (val);
1338 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1339 VEC_copy (vn_reference_op_s,
1340 heap, vr->operands),
1341 val, value_id);
1345 /* 2) Assignment from an empty CONSTRUCTOR. */
1346 else if (is_gimple_reg_type (vr->type)
1347 && gimple_assign_single_p (def_stmt)
1348 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1349 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1351 tree base2;
1352 HOST_WIDE_INT offset2, size2, maxsize2;
1353 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1354 &offset2, &size2, &maxsize2);
1355 if (operand_equal_p (base, base2, 0)
1356 && offset2 <= offset
1357 && offset2 + size2 >= offset + maxsize)
1359 tree val = fold_convert (vr->type, integer_zero_node);
1360 unsigned int value_id = get_or_alloc_constant_value_id (val);
1361 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1362 VEC_copy (vn_reference_op_s,
1363 heap, vr->operands),
1364 val, value_id);
1368 /* For aggregate copies translate the reference through them if
1369 the copy kills ref. */
1370 else if (gimple_assign_single_p (def_stmt)
1371 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1372 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1373 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1375 tree base2;
1376 HOST_WIDE_INT offset2, size2, maxsize2;
1377 int i, j;
1378 VEC (vn_reference_op_s, heap) *lhs = NULL, *rhs = NULL;
1379 vn_reference_op_t vro;
1380 ao_ref r;
1382 /* See if the assignment kills REF. */
1383 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1384 &offset2, &size2, &maxsize2);
1385 if (!operand_equal_p (base, base2, 0)
1386 || offset2 > offset
1387 || offset2 + size2 < offset + maxsize)
1388 return (void *)-1;
1390 /* Find the common base of ref and the lhs. */
1391 copy_reference_ops_from_ref (gimple_assign_lhs (def_stmt), &lhs);
1392 i = VEC_length (vn_reference_op_s, vr->operands) - 1;
1393 j = VEC_length (vn_reference_op_s, lhs) - 1;
1394 while (j >= 0 && i >= 0
1395 && vn_reference_op_eq (VEC_index (vn_reference_op_s,
1396 vr->operands, i),
1397 VEC_index (vn_reference_op_s, lhs, j)))
1399 i--;
1400 j--;
1403 VEC_free (vn_reference_op_s, heap, lhs);
1404 /* i now points to the first additional op.
1405 ??? LHS may not be completely contained in VR, one or more
1406 VIEW_CONVERT_EXPRs could be in its way. We could at least
1407 try handling outermost VIEW_CONVERT_EXPRs. */
1408 if (j != -1)
1409 return (void *)-1;
1411 /* Now re-write REF to be based on the rhs of the assignment. */
1412 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1413 /* We need to pre-pend vr->operands[0..i] to rhs. */
1414 if (i + 1 + VEC_length (vn_reference_op_s, rhs)
1415 > VEC_length (vn_reference_op_s, vr->operands))
1417 VEC (vn_reference_op_s, heap) *old = vr->operands;
1418 VEC_safe_grow (vn_reference_op_s, heap, vr->operands,
1419 i + 1 + VEC_length (vn_reference_op_s, rhs));
1420 if (old == shared_lookup_references
1421 && vr->operands != old)
1422 shared_lookup_references = NULL;
1424 else
1425 VEC_truncate (vn_reference_op_s, vr->operands,
1426 i + 1 + VEC_length (vn_reference_op_s, rhs));
1427 for (j = 0; VEC_iterate (vn_reference_op_s, rhs, j, vro); ++j)
1428 VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, vro);
1429 VEC_free (vn_reference_op_s, heap, rhs);
1430 vr->hashcode = vn_reference_compute_hash (vr);
1432 /* Adjust *ref from the new operands. */
1433 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1434 return (void *)-1;
1435 /* This can happen with bitfields. */
1436 if (ref->size != r.size)
1437 return (void *)-1;
1438 *ref = r;
1440 /* Do not update last seen VUSE after translating. */
1441 last_vuse_ptr = NULL;
1443 /* Keep looking for the adjusted *REF / VR pair. */
1444 return NULL;
1447 /* Bail out and stop walking. */
1448 return (void *)-1;
1451 /* Lookup a reference operation by it's parts, in the current hash table.
1452 Returns the resulting value number if it exists in the hash table,
1453 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1454 vn_reference_t stored in the hashtable if something is found. */
1456 tree
1457 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
1458 VEC (vn_reference_op_s, heap) *operands,
1459 vn_reference_t *vnresult, bool maywalk)
1461 struct vn_reference_s vr1;
1462 vn_reference_t tmp;
1463 tree cst;
1465 if (!vnresult)
1466 vnresult = &tmp;
1467 *vnresult = NULL;
1469 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1470 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1471 VEC_safe_grow (vn_reference_op_s, heap, shared_lookup_references,
1472 VEC_length (vn_reference_op_s, operands));
1473 memcpy (VEC_address (vn_reference_op_s, shared_lookup_references),
1474 VEC_address (vn_reference_op_s, operands),
1475 sizeof (vn_reference_op_s)
1476 * VEC_length (vn_reference_op_s, operands));
1477 vr1.operands = operands = shared_lookup_references
1478 = valueize_refs (shared_lookup_references);
1479 vr1.type = type;
1480 vr1.set = set;
1481 vr1.hashcode = vn_reference_compute_hash (&vr1);
1482 if ((cst = fully_constant_vn_reference_p (&vr1)))
1483 return cst;
1485 vn_reference_lookup_1 (&vr1, vnresult);
1486 if (!*vnresult
1487 && maywalk
1488 && vr1.vuse)
1490 ao_ref r;
1491 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
1492 *vnresult =
1493 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1494 vn_reference_lookup_2,
1495 vn_reference_lookup_3, &vr1);
1496 if (vr1.operands != operands)
1497 VEC_free (vn_reference_op_s, heap, vr1.operands);
1500 if (*vnresult)
1501 return (*vnresult)->result;
1503 return NULL_TREE;
1506 /* Lookup OP in the current hash table, and return the resulting value
1507 number if it exists in the hash table. Return NULL_TREE if it does
1508 not exist in the hash table or if the result field of the structure
1509 was NULL.. VNRESULT will be filled in with the vn_reference_t
1510 stored in the hashtable if one exists. */
1512 tree
1513 vn_reference_lookup (tree op, tree vuse, bool maywalk,
1514 vn_reference_t *vnresult)
1516 VEC (vn_reference_op_s, heap) *operands;
1517 struct vn_reference_s vr1;
1518 tree cst;
1520 if (vnresult)
1521 *vnresult = NULL;
1523 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1524 vr1.operands = operands = valueize_shared_reference_ops_from_ref (op);
1525 vr1.type = TREE_TYPE (op);
1526 vr1.set = get_alias_set (op);
1527 vr1.hashcode = vn_reference_compute_hash (&vr1);
1528 if ((cst = fully_constant_vn_reference_p (&vr1)))
1529 return cst;
1531 if (maywalk
1532 && vr1.vuse)
1534 vn_reference_t wvnresult;
1535 ao_ref r;
1536 ao_ref_init (&r, op);
1537 wvnresult =
1538 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1539 vn_reference_lookup_2,
1540 vn_reference_lookup_3, &vr1);
1541 if (vr1.operands != operands)
1542 VEC_free (vn_reference_op_s, heap, vr1.operands);
1543 if (wvnresult)
1545 if (vnresult)
1546 *vnresult = wvnresult;
1547 return wvnresult->result;
1550 return NULL_TREE;
1553 return vn_reference_lookup_1 (&vr1, vnresult);
1557 /* Insert OP into the current hash table with a value number of
1558 RESULT, and return the resulting reference structure we created. */
1560 vn_reference_t
1561 vn_reference_insert (tree op, tree result, tree vuse)
1563 void **slot;
1564 vn_reference_t vr1;
1566 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1567 if (TREE_CODE (result) == SSA_NAME)
1568 vr1->value_id = VN_INFO (result)->value_id;
1569 else
1570 vr1->value_id = get_or_alloc_constant_value_id (result);
1571 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1572 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
1573 vr1->type = TREE_TYPE (op);
1574 vr1->set = get_alias_set (op);
1575 vr1->hashcode = vn_reference_compute_hash (vr1);
1576 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
1578 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1579 INSERT);
1581 /* Because we lookup stores using vuses, and value number failures
1582 using the vdefs (see visit_reference_op_store for how and why),
1583 it's possible that on failure we may try to insert an already
1584 inserted store. This is not wrong, there is no ssa name for a
1585 store that we could use as a differentiator anyway. Thus, unlike
1586 the other lookup functions, you cannot gcc_assert (!*slot)
1587 here. */
1589 /* But free the old slot in case of a collision. */
1590 if (*slot)
1591 free_reference (*slot);
1593 *slot = vr1;
1594 return vr1;
1597 /* Insert a reference by it's pieces into the current hash table with
1598 a value number of RESULT. Return the resulting reference
1599 structure we created. */
1601 vn_reference_t
1602 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
1603 VEC (vn_reference_op_s, heap) *operands,
1604 tree result, unsigned int value_id)
1607 void **slot;
1608 vn_reference_t vr1;
1610 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1611 vr1->value_id = value_id;
1612 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1613 vr1->operands = valueize_refs (operands);
1614 vr1->type = type;
1615 vr1->set = set;
1616 vr1->hashcode = vn_reference_compute_hash (vr1);
1617 if (result && TREE_CODE (result) == SSA_NAME)
1618 result = SSA_VAL (result);
1619 vr1->result = result;
1621 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1622 INSERT);
1624 /* At this point we should have all the things inserted that we have
1625 seen before, and we should never try inserting something that
1626 already exists. */
1627 gcc_assert (!*slot);
1628 if (*slot)
1629 free_reference (*slot);
1631 *slot = vr1;
1632 return vr1;
1635 /* Compute and return the hash value for nary operation VBO1. */
1637 hashval_t
1638 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
1640 hashval_t hash;
1641 unsigned i;
1643 for (i = 0; i < vno1->length; ++i)
1644 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
1645 vno1->op[i] = SSA_VAL (vno1->op[i]);
1647 if (vno1->length == 2
1648 && commutative_tree_code (vno1->opcode)
1649 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
1651 tree temp = vno1->op[0];
1652 vno1->op[0] = vno1->op[1];
1653 vno1->op[1] = temp;
1656 hash = iterative_hash_hashval_t (vno1->opcode, 0);
1657 for (i = 0; i < vno1->length; ++i)
1658 hash = iterative_hash_expr (vno1->op[i], hash);
1660 return hash;
1663 /* Return the computed hashcode for nary operation P1. */
1665 static hashval_t
1666 vn_nary_op_hash (const void *p1)
1668 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1669 return vno1->hashcode;
1672 /* Compare nary operations P1 and P2 and return true if they are
1673 equivalent. */
1676 vn_nary_op_eq (const void *p1, const void *p2)
1678 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1679 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
1680 unsigned i;
1682 if (vno1->hashcode != vno2->hashcode)
1683 return false;
1685 if (vno1->opcode != vno2->opcode
1686 || !types_compatible_p (vno1->type, vno2->type))
1687 return false;
1689 for (i = 0; i < vno1->length; ++i)
1690 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
1691 return false;
1693 return true;
1696 /* Lookup a n-ary operation by its pieces and return the resulting value
1697 number if it exists in the hash table. Return NULL_TREE if it does
1698 not exist in the hash table or if the result field of the operation
1699 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1700 if it exists. */
1702 tree
1703 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
1704 tree type, tree op0, tree op1, tree op2,
1705 tree op3, vn_nary_op_t *vnresult)
1707 void **slot;
1708 struct vn_nary_op_s vno1;
1709 if (vnresult)
1710 *vnresult = NULL;
1711 vno1.opcode = code;
1712 vno1.length = length;
1713 vno1.type = type;
1714 vno1.op[0] = op0;
1715 vno1.op[1] = op1;
1716 vno1.op[2] = op2;
1717 vno1.op[3] = op3;
1718 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1719 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1720 NO_INSERT);
1721 if (!slot && current_info == optimistic_info)
1722 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1723 NO_INSERT);
1724 if (!slot)
1725 return NULL_TREE;
1726 if (vnresult)
1727 *vnresult = (vn_nary_op_t)*slot;
1728 return ((vn_nary_op_t)*slot)->result;
1731 /* Lookup OP in the current hash table, and return the resulting value
1732 number if it exists in the hash table. Return NULL_TREE if it does
1733 not exist in the hash table or if the result field of the operation
1734 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1735 if it exists. */
1737 tree
1738 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
1740 void **slot;
1741 struct vn_nary_op_s vno1;
1742 unsigned i;
1744 if (vnresult)
1745 *vnresult = NULL;
1746 vno1.opcode = TREE_CODE (op);
1747 vno1.length = TREE_CODE_LENGTH (TREE_CODE (op));
1748 vno1.type = TREE_TYPE (op);
1749 for (i = 0; i < vno1.length; ++i)
1750 vno1.op[i] = TREE_OPERAND (op, i);
1751 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1752 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1753 NO_INSERT);
1754 if (!slot && current_info == optimistic_info)
1755 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1756 NO_INSERT);
1757 if (!slot)
1758 return NULL_TREE;
1759 if (vnresult)
1760 *vnresult = (vn_nary_op_t)*slot;
1761 return ((vn_nary_op_t)*slot)->result;
1764 /* Lookup the rhs of STMT in the current hash table, and return the resulting
1765 value number if it exists in the hash table. Return NULL_TREE if
1766 it does not exist in the hash table. VNRESULT will contain the
1767 vn_nary_op_t from the hashtable if it exists. */
1769 tree
1770 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
1772 void **slot;
1773 struct vn_nary_op_s vno1;
1774 unsigned i;
1776 if (vnresult)
1777 *vnresult = NULL;
1778 vno1.opcode = gimple_assign_rhs_code (stmt);
1779 vno1.length = gimple_num_ops (stmt) - 1;
1780 vno1.type = gimple_expr_type (stmt);
1781 for (i = 0; i < vno1.length; ++i)
1782 vno1.op[i] = gimple_op (stmt, i + 1);
1783 if (vno1.opcode == REALPART_EXPR
1784 || vno1.opcode == IMAGPART_EXPR
1785 || vno1.opcode == VIEW_CONVERT_EXPR)
1786 vno1.op[0] = TREE_OPERAND (vno1.op[0], 0);
1787 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1788 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1789 NO_INSERT);
1790 if (!slot && current_info == optimistic_info)
1791 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1792 NO_INSERT);
1793 if (!slot)
1794 return NULL_TREE;
1795 if (vnresult)
1796 *vnresult = (vn_nary_op_t)*slot;
1797 return ((vn_nary_op_t)*slot)->result;
1800 /* Insert a n-ary operation into the current hash table using it's
1801 pieces. Return the vn_nary_op_t structure we created and put in
1802 the hashtable. */
1804 vn_nary_op_t
1805 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
1806 tree type, tree op0,
1807 tree op1, tree op2, tree op3,
1808 tree result,
1809 unsigned int value_id)
1811 void **slot;
1812 vn_nary_op_t vno1;
1814 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1815 (sizeof (struct vn_nary_op_s)
1816 - sizeof (tree) * (4 - length)));
1817 vno1->value_id = value_id;
1818 vno1->opcode = code;
1819 vno1->length = length;
1820 vno1->type = type;
1821 if (length >= 1)
1822 vno1->op[0] = op0;
1823 if (length >= 2)
1824 vno1->op[1] = op1;
1825 if (length >= 3)
1826 vno1->op[2] = op2;
1827 if (length >= 4)
1828 vno1->op[3] = op3;
1829 vno1->result = result;
1830 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1831 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1832 INSERT);
1833 gcc_assert (!*slot);
1835 *slot = vno1;
1836 return vno1;
1840 /* Insert OP into the current hash table with a value number of
1841 RESULT. Return the vn_nary_op_t structure we created and put in
1842 the hashtable. */
1844 vn_nary_op_t
1845 vn_nary_op_insert (tree op, tree result)
1847 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
1848 void **slot;
1849 vn_nary_op_t vno1;
1850 unsigned i;
1852 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1853 (sizeof (struct vn_nary_op_s)
1854 - sizeof (tree) * (4 - length)));
1855 vno1->value_id = VN_INFO (result)->value_id;
1856 vno1->opcode = TREE_CODE (op);
1857 vno1->length = length;
1858 vno1->type = TREE_TYPE (op);
1859 for (i = 0; i < vno1->length; ++i)
1860 vno1->op[i] = TREE_OPERAND (op, i);
1861 vno1->result = result;
1862 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1863 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1864 INSERT);
1865 gcc_assert (!*slot);
1867 *slot = vno1;
1868 return vno1;
1871 /* Insert the rhs of STMT into the current hash table with a value number of
1872 RESULT. */
1874 vn_nary_op_t
1875 vn_nary_op_insert_stmt (gimple stmt, tree result)
1877 unsigned length = gimple_num_ops (stmt) - 1;
1878 void **slot;
1879 vn_nary_op_t vno1;
1880 unsigned i;
1882 vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
1883 (sizeof (struct vn_nary_op_s)
1884 - sizeof (tree) * (4 - length)));
1885 vno1->value_id = VN_INFO (result)->value_id;
1886 vno1->opcode = gimple_assign_rhs_code (stmt);
1887 vno1->length = length;
1888 vno1->type = gimple_expr_type (stmt);
1889 for (i = 0; i < vno1->length; ++i)
1890 vno1->op[i] = gimple_op (stmt, i + 1);
1891 if (vno1->opcode == REALPART_EXPR
1892 || vno1->opcode == IMAGPART_EXPR
1893 || vno1->opcode == VIEW_CONVERT_EXPR)
1894 vno1->op[0] = TREE_OPERAND (vno1->op[0], 0);
1895 vno1->result = result;
1896 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1897 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1898 INSERT);
1899 gcc_assert (!*slot);
1901 *slot = vno1;
1902 return vno1;
1905 /* Compute a hashcode for PHI operation VP1 and return it. */
1907 static inline hashval_t
1908 vn_phi_compute_hash (vn_phi_t vp1)
1910 hashval_t result;
1911 int i;
1912 tree phi1op;
1913 tree type;
1915 result = vp1->block->index;
1917 /* If all PHI arguments are constants we need to distinguish
1918 the PHI node via its type. */
1919 type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
1920 result += (INTEGRAL_TYPE_P (type)
1921 + (INTEGRAL_TYPE_P (type)
1922 ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
1924 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1926 if (phi1op == VN_TOP)
1927 continue;
1928 result = iterative_hash_expr (phi1op, result);
1931 return result;
1934 /* Return the computed hashcode for phi operation P1. */
1936 static hashval_t
1937 vn_phi_hash (const void *p1)
1939 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1940 return vp1->hashcode;
1943 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
1945 static int
1946 vn_phi_eq (const void *p1, const void *p2)
1948 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1949 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
1951 if (vp1->hashcode != vp2->hashcode)
1952 return false;
1954 if (vp1->block == vp2->block)
1956 int i;
1957 tree phi1op;
1959 /* If the PHI nodes do not have compatible types
1960 they are not the same. */
1961 if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
1962 TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
1963 return false;
1965 /* Any phi in the same block will have it's arguments in the
1966 same edge order, because of how we store phi nodes. */
1967 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1969 tree phi2op = VEC_index (tree, vp2->phiargs, i);
1970 if (phi1op == VN_TOP || phi2op == VN_TOP)
1971 continue;
1972 if (!expressions_equal_p (phi1op, phi2op))
1973 return false;
1975 return true;
1977 return false;
1980 static VEC(tree, heap) *shared_lookup_phiargs;
1982 /* Lookup PHI in the current hash table, and return the resulting
1983 value number if it exists in the hash table. Return NULL_TREE if
1984 it does not exist in the hash table. */
1986 static tree
1987 vn_phi_lookup (gimple phi)
1989 void **slot;
1990 struct vn_phi_s vp1;
1991 unsigned i;
1993 VEC_truncate (tree, shared_lookup_phiargs, 0);
1995 /* Canonicalize the SSA_NAME's to their value number. */
1996 for (i = 0; i < gimple_phi_num_args (phi); i++)
1998 tree def = PHI_ARG_DEF (phi, i);
1999 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2000 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
2002 vp1.phiargs = shared_lookup_phiargs;
2003 vp1.block = gimple_bb (phi);
2004 vp1.hashcode = vn_phi_compute_hash (&vp1);
2005 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
2006 NO_INSERT);
2007 if (!slot && current_info == optimistic_info)
2008 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
2009 NO_INSERT);
2010 if (!slot)
2011 return NULL_TREE;
2012 return ((vn_phi_t)*slot)->result;
2015 /* Insert PHI into the current hash table with a value number of
2016 RESULT. */
2018 static vn_phi_t
2019 vn_phi_insert (gimple phi, tree result)
2021 void **slot;
2022 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
2023 unsigned i;
2024 VEC (tree, heap) *args = NULL;
2026 /* Canonicalize the SSA_NAME's to their value number. */
2027 for (i = 0; i < gimple_phi_num_args (phi); i++)
2029 tree def = PHI_ARG_DEF (phi, i);
2030 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2031 VEC_safe_push (tree, heap, args, def);
2033 vp1->value_id = VN_INFO (result)->value_id;
2034 vp1->phiargs = args;
2035 vp1->block = gimple_bb (phi);
2036 vp1->result = result;
2037 vp1->hashcode = vn_phi_compute_hash (vp1);
2039 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
2040 INSERT);
2042 /* Because we iterate over phi operations more than once, it's
2043 possible the slot might already exist here, hence no assert.*/
2044 *slot = vp1;
2045 return vp1;
2049 /* Print set of components in strongly connected component SCC to OUT. */
2051 static void
2052 print_scc (FILE *out, VEC (tree, heap) *scc)
2054 tree var;
2055 unsigned int i;
2057 fprintf (out, "SCC consists of: ");
2058 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2060 print_generic_expr (out, var, 0);
2061 fprintf (out, " ");
2063 fprintf (out, "\n");
2066 /* Set the value number of FROM to TO, return true if it has changed
2067 as a result. */
2069 static inline bool
2070 set_ssa_val_to (tree from, tree to)
2072 tree currval;
2074 if (from != to
2075 && TREE_CODE (to) == SSA_NAME
2076 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2077 to = from;
2079 /* The only thing we allow as value numbers are VN_TOP, ssa_names
2080 and invariants. So assert that here. */
2081 gcc_assert (to != NULL_TREE
2082 && (to == VN_TOP
2083 || TREE_CODE (to) == SSA_NAME
2084 || is_gimple_min_invariant (to)));
2086 if (dump_file && (dump_flags & TDF_DETAILS))
2088 fprintf (dump_file, "Setting value number of ");
2089 print_generic_expr (dump_file, from, 0);
2090 fprintf (dump_file, " to ");
2091 print_generic_expr (dump_file, to, 0);
2094 currval = SSA_VAL (from);
2096 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
2098 VN_INFO (from)->valnum = to;
2099 if (dump_file && (dump_flags & TDF_DETAILS))
2100 fprintf (dump_file, " (changed)\n");
2101 return true;
2103 if (dump_file && (dump_flags & TDF_DETAILS))
2104 fprintf (dump_file, "\n");
2105 return false;
2108 /* Set all definitions in STMT to value number to themselves.
2109 Return true if a value number changed. */
2111 static bool
2112 defs_to_varying (gimple stmt)
2114 bool changed = false;
2115 ssa_op_iter iter;
2116 def_operand_p defp;
2118 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2120 tree def = DEF_FROM_PTR (defp);
2122 VN_INFO (def)->use_processed = true;
2123 changed |= set_ssa_val_to (def, def);
2125 return changed;
2128 static bool expr_has_constants (tree expr);
2129 static tree valueize_expr (tree expr);
2131 /* Visit a copy between LHS and RHS, return true if the value number
2132 changed. */
2134 static bool
2135 visit_copy (tree lhs, tree rhs)
2137 /* Follow chains of copies to their destination. */
2138 while (TREE_CODE (rhs) == SSA_NAME
2139 && SSA_VAL (rhs) != rhs)
2140 rhs = SSA_VAL (rhs);
2142 /* The copy may have a more interesting constant filled expression
2143 (we don't, since we know our RHS is just an SSA name). */
2144 if (TREE_CODE (rhs) == SSA_NAME)
2146 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2147 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2150 return set_ssa_val_to (lhs, rhs);
2153 /* Visit a unary operator RHS, value number it, and return true if the
2154 value number of LHS has changed as a result. */
2156 static bool
2157 visit_unary_op (tree lhs, gimple stmt)
2159 bool changed = false;
2160 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2162 if (result)
2164 changed = set_ssa_val_to (lhs, result);
2166 else
2168 changed = set_ssa_val_to (lhs, lhs);
2169 vn_nary_op_insert_stmt (stmt, lhs);
2172 return changed;
2175 /* Visit a binary operator RHS, value number it, and return true if the
2176 value number of LHS has changed as a result. */
2178 static bool
2179 visit_binary_op (tree lhs, gimple stmt)
2181 bool changed = false;
2182 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2184 if (result)
2186 changed = set_ssa_val_to (lhs, result);
2188 else
2190 changed = set_ssa_val_to (lhs, lhs);
2191 vn_nary_op_insert_stmt (stmt, lhs);
2194 return changed;
2197 /* Visit a call STMT storing into LHS. Return true if the value number
2198 of the LHS has changed as a result. */
2200 static bool
2201 visit_reference_op_call (tree lhs, gimple stmt)
2203 bool changed = false;
2204 struct vn_reference_s vr1;
2205 tree result;
2206 tree vuse = gimple_vuse (stmt);
2208 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2209 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
2210 vr1.type = gimple_expr_type (stmt);
2211 vr1.set = 0;
2212 vr1.hashcode = vn_reference_compute_hash (&vr1);
2213 result = vn_reference_lookup_1 (&vr1, NULL);
2214 if (result)
2216 changed = set_ssa_val_to (lhs, result);
2217 if (TREE_CODE (result) == SSA_NAME
2218 && VN_INFO (result)->has_constants)
2219 VN_INFO (lhs)->has_constants = true;
2221 else
2223 void **slot;
2224 vn_reference_t vr2;
2225 changed = set_ssa_val_to (lhs, lhs);
2226 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
2227 vr2->vuse = vr1.vuse;
2228 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
2229 vr2->type = vr1.type;
2230 vr2->set = vr1.set;
2231 vr2->hashcode = vr1.hashcode;
2232 vr2->result = lhs;
2233 slot = htab_find_slot_with_hash (current_info->references,
2234 vr2, vr2->hashcode, INSERT);
2235 if (*slot)
2236 free_reference (*slot);
2237 *slot = vr2;
2240 return changed;
2243 /* Visit a load from a reference operator RHS, part of STMT, value number it,
2244 and return true if the value number of the LHS has changed as a result. */
2246 static bool
2247 visit_reference_op_load (tree lhs, tree op, gimple stmt)
2249 bool changed = false;
2250 tree last_vuse;
2251 tree result;
2253 last_vuse = gimple_vuse (stmt);
2254 last_vuse_ptr = &last_vuse;
2255 result = vn_reference_lookup (op, gimple_vuse (stmt), true, NULL);
2256 last_vuse_ptr = NULL;
2258 /* If we have a VCE, try looking up its operand as it might be stored in
2259 a different type. */
2260 if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
2261 result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
2262 true, NULL);
2264 /* We handle type-punning through unions by value-numbering based
2265 on offset and size of the access. Be prepared to handle a
2266 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
2267 if (result
2268 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
2270 /* We will be setting the value number of lhs to the value number
2271 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2272 So first simplify and lookup this expression to see if it
2273 is already available. */
2274 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
2275 if ((CONVERT_EXPR_P (val)
2276 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
2277 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
2279 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
2280 if ((CONVERT_EXPR_P (tem)
2281 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
2282 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
2283 TREE_TYPE (val), tem)))
2284 val = tem;
2286 result = val;
2287 if (!is_gimple_min_invariant (val)
2288 && TREE_CODE (val) != SSA_NAME)
2289 result = vn_nary_op_lookup (val, NULL);
2290 /* If the expression is not yet available, value-number lhs to
2291 a new SSA_NAME we create. */
2292 if (!result)
2294 result = make_ssa_name (SSA_NAME_VAR (lhs), gimple_build_nop ());
2295 /* Initialize value-number information properly. */
2296 VN_INFO_GET (result)->valnum = result;
2297 VN_INFO (result)->value_id = get_next_value_id ();
2298 VN_INFO (result)->expr = val;
2299 VN_INFO (result)->has_constants = expr_has_constants (val);
2300 VN_INFO (result)->needs_insertion = true;
2301 /* As all "inserted" statements are singleton SCCs, insert
2302 to the valid table. This is strictly needed to
2303 avoid re-generating new value SSA_NAMEs for the same
2304 expression during SCC iteration over and over (the
2305 optimistic table gets cleared after each iteration).
2306 We do not need to insert into the optimistic table, as
2307 lookups there will fall back to the valid table. */
2308 if (current_info == optimistic_info)
2310 current_info = valid_info;
2311 vn_nary_op_insert (val, result);
2312 current_info = optimistic_info;
2314 else
2315 vn_nary_op_insert (val, result);
2316 if (dump_file && (dump_flags & TDF_DETAILS))
2318 fprintf (dump_file, "Inserting name ");
2319 print_generic_expr (dump_file, result, 0);
2320 fprintf (dump_file, " for expression ");
2321 print_generic_expr (dump_file, val, 0);
2322 fprintf (dump_file, "\n");
2327 if (result)
2329 changed = set_ssa_val_to (lhs, result);
2330 if (TREE_CODE (result) == SSA_NAME
2331 && VN_INFO (result)->has_constants)
2333 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2334 VN_INFO (lhs)->has_constants = true;
2337 else
2339 changed = set_ssa_val_to (lhs, lhs);
2340 vn_reference_insert (op, lhs, last_vuse);
2343 return changed;
2347 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2348 and return true if the value number of the LHS has changed as a result. */
2350 static bool
2351 visit_reference_op_store (tree lhs, tree op, gimple stmt)
2353 bool changed = false;
2354 tree result;
2355 bool resultsame = false;
2357 /* First we want to lookup using the *vuses* from the store and see
2358 if there the last store to this location with the same address
2359 had the same value.
2361 The vuses represent the memory state before the store. If the
2362 memory state, address, and value of the store is the same as the
2363 last store to this location, then this store will produce the
2364 same memory state as that store.
2366 In this case the vdef versions for this store are value numbered to those
2367 vuse versions, since they represent the same memory state after
2368 this store.
2370 Otherwise, the vdefs for the store are used when inserting into
2371 the table, since the store generates a new memory state. */
2373 result = vn_reference_lookup (lhs, gimple_vuse (stmt), false, NULL);
2375 if (result)
2377 if (TREE_CODE (result) == SSA_NAME)
2378 result = SSA_VAL (result);
2379 if (TREE_CODE (op) == SSA_NAME)
2380 op = SSA_VAL (op);
2381 resultsame = expressions_equal_p (result, op);
2384 if (!result || !resultsame)
2386 tree vdef;
2388 if (dump_file && (dump_flags & TDF_DETAILS))
2390 fprintf (dump_file, "No store match\n");
2391 fprintf (dump_file, "Value numbering store ");
2392 print_generic_expr (dump_file, lhs, 0);
2393 fprintf (dump_file, " to ");
2394 print_generic_expr (dump_file, op, 0);
2395 fprintf (dump_file, "\n");
2397 /* Have to set value numbers before insert, since insert is
2398 going to valueize the references in-place. */
2399 if ((vdef = gimple_vdef (stmt)))
2401 VN_INFO (vdef)->use_processed = true;
2402 changed |= set_ssa_val_to (vdef, vdef);
2405 /* Do not insert structure copies into the tables. */
2406 if (is_gimple_min_invariant (op)
2407 || is_gimple_reg (op))
2408 vn_reference_insert (lhs, op, vdef);
2410 else
2412 /* We had a match, so value number the vdef to have the value
2413 number of the vuse it came from. */
2414 tree def, use;
2416 if (dump_file && (dump_flags & TDF_DETAILS))
2417 fprintf (dump_file, "Store matched earlier value,"
2418 "value numbering store vdefs to matching vuses.\n");
2420 def = gimple_vdef (stmt);
2421 use = gimple_vuse (stmt);
2423 VN_INFO (def)->use_processed = true;
2424 changed |= set_ssa_val_to (def, SSA_VAL (use));
2427 return changed;
2430 /* Visit and value number PHI, return true if the value number
2431 changed. */
2433 static bool
2434 visit_phi (gimple phi)
2436 bool changed = false;
2437 tree result;
2438 tree sameval = VN_TOP;
2439 bool allsame = true;
2440 unsigned i;
2442 /* TODO: We could check for this in init_sccvn, and replace this
2443 with a gcc_assert. */
2444 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
2445 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2447 /* See if all non-TOP arguments have the same value. TOP is
2448 equivalent to everything, so we can ignore it. */
2449 for (i = 0; i < gimple_phi_num_args (phi); i++)
2451 tree def = PHI_ARG_DEF (phi, i);
2453 if (TREE_CODE (def) == SSA_NAME)
2454 def = SSA_VAL (def);
2455 if (def == VN_TOP)
2456 continue;
2457 if (sameval == VN_TOP)
2459 sameval = def;
2461 else
2463 if (!expressions_equal_p (def, sameval))
2465 allsame = false;
2466 break;
2471 /* If all value numbered to the same value, the phi node has that
2472 value. */
2473 if (allsame)
2475 if (is_gimple_min_invariant (sameval))
2477 VN_INFO (PHI_RESULT (phi))->has_constants = true;
2478 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2480 else
2482 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2483 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2486 if (TREE_CODE (sameval) == SSA_NAME)
2487 return visit_copy (PHI_RESULT (phi), sameval);
2489 return set_ssa_val_to (PHI_RESULT (phi), sameval);
2492 /* Otherwise, see if it is equivalent to a phi node in this block. */
2493 result = vn_phi_lookup (phi);
2494 if (result)
2496 if (TREE_CODE (result) == SSA_NAME)
2497 changed = visit_copy (PHI_RESULT (phi), result);
2498 else
2499 changed = set_ssa_val_to (PHI_RESULT (phi), result);
2501 else
2503 vn_phi_insert (phi, PHI_RESULT (phi));
2504 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2505 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
2506 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2509 return changed;
2512 /* Return true if EXPR contains constants. */
2514 static bool
2515 expr_has_constants (tree expr)
2517 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2519 case tcc_unary:
2520 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
2522 case tcc_binary:
2523 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
2524 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
2525 /* Constants inside reference ops are rarely interesting, but
2526 it can take a lot of looking to find them. */
2527 case tcc_reference:
2528 case tcc_declaration:
2529 return false;
2530 default:
2531 return is_gimple_min_invariant (expr);
2533 return false;
2536 /* Return true if STMT contains constants. */
2538 static bool
2539 stmt_has_constants (gimple stmt)
2541 if (gimple_code (stmt) != GIMPLE_ASSIGN)
2542 return false;
2544 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2546 case GIMPLE_UNARY_RHS:
2547 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2549 case GIMPLE_BINARY_RHS:
2550 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2551 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
2552 case GIMPLE_TERNARY_RHS:
2553 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2554 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt))
2555 || is_gimple_min_invariant (gimple_assign_rhs3 (stmt)));
2556 case GIMPLE_SINGLE_RHS:
2557 /* Constants inside reference ops are rarely interesting, but
2558 it can take a lot of looking to find them. */
2559 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2560 default:
2561 gcc_unreachable ();
2563 return false;
2566 /* Replace SSA_NAMES in expr with their value numbers, and return the
2567 result.
2568 This is performed in place. */
2570 static tree
2571 valueize_expr (tree expr)
2573 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2575 case tcc_unary:
2576 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2577 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2578 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2579 break;
2580 case tcc_binary:
2581 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2582 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2583 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2584 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
2585 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
2586 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
2587 break;
2588 default:
2589 break;
2591 return expr;
2594 /* Simplify the binary expression RHS, and return the result if
2595 simplified. */
2597 static tree
2598 simplify_binary_expression (gimple stmt)
2600 tree result = NULL_TREE;
2601 tree op0 = gimple_assign_rhs1 (stmt);
2602 tree op1 = gimple_assign_rhs2 (stmt);
2604 /* This will not catch every single case we could combine, but will
2605 catch those with constants. The goal here is to simultaneously
2606 combine constants between expressions, but avoid infinite
2607 expansion of expressions during simplification. */
2608 if (TREE_CODE (op0) == SSA_NAME)
2610 if (VN_INFO (op0)->has_constants
2611 || TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)) == tcc_comparison)
2612 op0 = valueize_expr (vn_get_expr_for (op0));
2613 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
2614 op0 = SSA_VAL (op0);
2617 if (TREE_CODE (op1) == SSA_NAME)
2619 if (VN_INFO (op1)->has_constants)
2620 op1 = valueize_expr (vn_get_expr_for (op1));
2621 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
2622 op1 = SSA_VAL (op1);
2625 /* Avoid folding if nothing changed. */
2626 if (op0 == gimple_assign_rhs1 (stmt)
2627 && op1 == gimple_assign_rhs2 (stmt))
2628 return NULL_TREE;
2630 fold_defer_overflow_warnings ();
2632 result = fold_binary (gimple_assign_rhs_code (stmt),
2633 gimple_expr_type (stmt), op0, op1);
2634 if (result)
2635 STRIP_USELESS_TYPE_CONVERSION (result);
2637 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
2638 stmt, 0);
2640 /* Make sure result is not a complex expression consisting
2641 of operators of operators (IE (a + b) + (a + c))
2642 Otherwise, we will end up with unbounded expressions if
2643 fold does anything at all. */
2644 if (result && valid_gimple_rhs_p (result))
2645 return result;
2647 return NULL_TREE;
2650 /* Simplify the unary expression RHS, and return the result if
2651 simplified. */
2653 static tree
2654 simplify_unary_expression (gimple stmt)
2656 tree result = NULL_TREE;
2657 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
2659 /* We handle some tcc_reference codes here that are all
2660 GIMPLE_ASSIGN_SINGLE codes. */
2661 if (gimple_assign_rhs_code (stmt) == REALPART_EXPR
2662 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2663 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2664 op0 = TREE_OPERAND (op0, 0);
2666 if (TREE_CODE (op0) != SSA_NAME)
2667 return NULL_TREE;
2669 orig_op0 = op0;
2670 if (VN_INFO (op0)->has_constants)
2671 op0 = valueize_expr (vn_get_expr_for (op0));
2672 else if (gimple_assign_cast_p (stmt)
2673 || gimple_assign_rhs_code (stmt) == REALPART_EXPR
2674 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2675 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2677 /* We want to do tree-combining on conversion-like expressions.
2678 Make sure we feed only SSA_NAMEs or constants to fold though. */
2679 tree tem = valueize_expr (vn_get_expr_for (op0));
2680 if (UNARY_CLASS_P (tem)
2681 || BINARY_CLASS_P (tem)
2682 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
2683 || TREE_CODE (tem) == SSA_NAME
2684 || is_gimple_min_invariant (tem))
2685 op0 = tem;
2688 /* Avoid folding if nothing changed, but remember the expression. */
2689 if (op0 == orig_op0)
2690 return NULL_TREE;
2692 result = fold_unary_ignore_overflow (gimple_assign_rhs_code (stmt),
2693 gimple_expr_type (stmt), op0);
2694 if (result)
2696 STRIP_USELESS_TYPE_CONVERSION (result);
2697 if (valid_gimple_rhs_p (result))
2698 return result;
2701 return NULL_TREE;
2704 /* Try to simplify RHS using equivalences and constant folding. */
2706 static tree
2707 try_to_simplify (gimple stmt)
2709 tree tem;
2711 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
2712 in this case, there is no point in doing extra work. */
2713 if (gimple_assign_copy_p (stmt)
2714 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2715 return NULL_TREE;
2717 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2719 case tcc_declaration:
2720 tem = get_symbol_constant_value (gimple_assign_rhs1 (stmt));
2721 if (tem)
2722 return tem;
2723 break;
2725 case tcc_reference:
2726 /* Do not do full-blown reference lookup here, but simplify
2727 reads from constant aggregates. */
2728 tem = fold_const_aggregate_ref (gimple_assign_rhs1 (stmt));
2729 if (tem)
2730 return tem;
2732 /* Fallthrough for some codes that can operate on registers. */
2733 if (!(TREE_CODE (gimple_assign_rhs1 (stmt)) == REALPART_EXPR
2734 || TREE_CODE (gimple_assign_rhs1 (stmt)) == IMAGPART_EXPR
2735 || TREE_CODE (gimple_assign_rhs1 (stmt)) == VIEW_CONVERT_EXPR))
2736 break;
2737 /* We could do a little more with unary ops, if they expand
2738 into binary ops, but it's debatable whether it is worth it. */
2739 case tcc_unary:
2740 return simplify_unary_expression (stmt);
2741 break;
2742 case tcc_comparison:
2743 case tcc_binary:
2744 return simplify_binary_expression (stmt);
2745 break;
2746 default:
2747 break;
2750 return NULL_TREE;
2753 /* Visit and value number USE, return true if the value number
2754 changed. */
2756 static bool
2757 visit_use (tree use)
2759 bool changed = false;
2760 gimple stmt = SSA_NAME_DEF_STMT (use);
2762 VN_INFO (use)->use_processed = true;
2764 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
2765 if (dump_file && (dump_flags & TDF_DETAILS)
2766 && !SSA_NAME_IS_DEFAULT_DEF (use))
2768 fprintf (dump_file, "Value numbering ");
2769 print_generic_expr (dump_file, use, 0);
2770 fprintf (dump_file, " stmt = ");
2771 print_gimple_stmt (dump_file, stmt, 0, 0);
2774 /* Handle uninitialized uses. */
2775 if (SSA_NAME_IS_DEFAULT_DEF (use))
2776 changed = set_ssa_val_to (use, use);
2777 else
2779 if (gimple_code (stmt) == GIMPLE_PHI)
2780 changed = visit_phi (stmt);
2781 else if (!gimple_has_lhs (stmt)
2782 || gimple_has_volatile_ops (stmt)
2783 || stmt_could_throw_p (stmt))
2784 changed = defs_to_varying (stmt);
2785 else if (is_gimple_assign (stmt))
2787 tree lhs = gimple_assign_lhs (stmt);
2788 tree simplified;
2790 /* Shortcut for copies. Simplifying copies is pointless,
2791 since we copy the expression and value they represent. */
2792 if (gimple_assign_copy_p (stmt)
2793 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2794 && TREE_CODE (lhs) == SSA_NAME)
2796 changed = visit_copy (lhs, gimple_assign_rhs1 (stmt));
2797 goto done;
2799 simplified = try_to_simplify (stmt);
2800 if (simplified)
2802 if (dump_file && (dump_flags & TDF_DETAILS))
2804 fprintf (dump_file, "RHS ");
2805 print_gimple_expr (dump_file, stmt, 0, 0);
2806 fprintf (dump_file, " simplified to ");
2807 print_generic_expr (dump_file, simplified, 0);
2808 if (TREE_CODE (lhs) == SSA_NAME)
2809 fprintf (dump_file, " has constants %d\n",
2810 expr_has_constants (simplified));
2811 else
2812 fprintf (dump_file, "\n");
2815 /* Setting value numbers to constants will occasionally
2816 screw up phi congruence because constants are not
2817 uniquely associated with a single ssa name that can be
2818 looked up. */
2819 if (simplified
2820 && is_gimple_min_invariant (simplified)
2821 && TREE_CODE (lhs) == SSA_NAME)
2823 VN_INFO (lhs)->expr = simplified;
2824 VN_INFO (lhs)->has_constants = true;
2825 changed = set_ssa_val_to (lhs, simplified);
2826 goto done;
2828 else if (simplified
2829 && TREE_CODE (simplified) == SSA_NAME
2830 && TREE_CODE (lhs) == SSA_NAME)
2832 changed = visit_copy (lhs, simplified);
2833 goto done;
2835 else if (simplified)
2837 if (TREE_CODE (lhs) == SSA_NAME)
2839 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
2840 /* We have to unshare the expression or else
2841 valuizing may change the IL stream. */
2842 VN_INFO (lhs)->expr = unshare_expr (simplified);
2845 else if (stmt_has_constants (stmt)
2846 && TREE_CODE (lhs) == SSA_NAME)
2847 VN_INFO (lhs)->has_constants = true;
2848 else if (TREE_CODE (lhs) == SSA_NAME)
2850 /* We reset expr and constantness here because we may
2851 have been value numbering optimistically, and
2852 iterating. They may become non-constant in this case,
2853 even if they were optimistically constant. */
2855 VN_INFO (lhs)->has_constants = false;
2856 VN_INFO (lhs)->expr = NULL_TREE;
2859 if ((TREE_CODE (lhs) == SSA_NAME
2860 /* We can substitute SSA_NAMEs that are live over
2861 abnormal edges with their constant value. */
2862 && !(gimple_assign_copy_p (stmt)
2863 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2864 && !(simplified
2865 && is_gimple_min_invariant (simplified))
2866 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2867 /* Stores or copies from SSA_NAMEs that are live over
2868 abnormal edges are a problem. */
2869 || (gimple_assign_single_p (stmt)
2870 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2871 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt))))
2872 changed = defs_to_varying (stmt);
2873 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
2875 changed = visit_reference_op_store (lhs, gimple_assign_rhs1 (stmt), stmt);
2877 else if (TREE_CODE (lhs) == SSA_NAME)
2879 if ((gimple_assign_copy_p (stmt)
2880 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2881 || (simplified
2882 && is_gimple_min_invariant (simplified)))
2884 VN_INFO (lhs)->has_constants = true;
2885 if (simplified)
2886 changed = set_ssa_val_to (lhs, simplified);
2887 else
2888 changed = set_ssa_val_to (lhs, gimple_assign_rhs1 (stmt));
2890 else
2892 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2894 case GIMPLE_UNARY_RHS:
2895 changed = visit_unary_op (lhs, stmt);
2896 break;
2897 case GIMPLE_BINARY_RHS:
2898 changed = visit_binary_op (lhs, stmt);
2899 break;
2900 case GIMPLE_SINGLE_RHS:
2901 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2903 case tcc_reference:
2904 /* VOP-less references can go through unary case. */
2905 if ((gimple_assign_rhs_code (stmt) == REALPART_EXPR
2906 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2907 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR )
2908 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (stmt), 0)) == SSA_NAME)
2910 changed = visit_unary_op (lhs, stmt);
2911 break;
2913 /* Fallthrough. */
2914 case tcc_declaration:
2915 changed = visit_reference_op_load
2916 (lhs, gimple_assign_rhs1 (stmt), stmt);
2917 break;
2918 case tcc_expression:
2919 if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
2921 changed = visit_unary_op (lhs, stmt);
2922 break;
2924 /* Fallthrough. */
2925 default:
2926 changed = defs_to_varying (stmt);
2928 break;
2929 default:
2930 changed = defs_to_varying (stmt);
2931 break;
2935 else
2936 changed = defs_to_varying (stmt);
2938 else if (is_gimple_call (stmt))
2940 tree lhs = gimple_call_lhs (stmt);
2942 /* ??? We could try to simplify calls. */
2944 if (stmt_has_constants (stmt)
2945 && TREE_CODE (lhs) == SSA_NAME)
2946 VN_INFO (lhs)->has_constants = true;
2947 else if (TREE_CODE (lhs) == SSA_NAME)
2949 /* We reset expr and constantness here because we may
2950 have been value numbering optimistically, and
2951 iterating. They may become non-constant in this case,
2952 even if they were optimistically constant. */
2953 VN_INFO (lhs)->has_constants = false;
2954 VN_INFO (lhs)->expr = NULL_TREE;
2957 if (TREE_CODE (lhs) == SSA_NAME
2958 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2959 changed = defs_to_varying (stmt);
2960 /* ??? We should handle stores from calls. */
2961 else if (TREE_CODE (lhs) == SSA_NAME)
2963 if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
2964 changed = visit_reference_op_call (lhs, stmt);
2965 else
2966 changed = defs_to_varying (stmt);
2968 else
2969 changed = defs_to_varying (stmt);
2972 done:
2973 return changed;
2976 /* Compare two operands by reverse postorder index */
2978 static int
2979 compare_ops (const void *pa, const void *pb)
2981 const tree opa = *((const tree *)pa);
2982 const tree opb = *((const tree *)pb);
2983 gimple opstmta = SSA_NAME_DEF_STMT (opa);
2984 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
2985 basic_block bba;
2986 basic_block bbb;
2988 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
2989 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2990 else if (gimple_nop_p (opstmta))
2991 return -1;
2992 else if (gimple_nop_p (opstmtb))
2993 return 1;
2995 bba = gimple_bb (opstmta);
2996 bbb = gimple_bb (opstmtb);
2998 if (!bba && !bbb)
2999 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3000 else if (!bba)
3001 return -1;
3002 else if (!bbb)
3003 return 1;
3005 if (bba == bbb)
3007 if (gimple_code (opstmta) == GIMPLE_PHI
3008 && gimple_code (opstmtb) == GIMPLE_PHI)
3009 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3010 else if (gimple_code (opstmta) == GIMPLE_PHI)
3011 return -1;
3012 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3013 return 1;
3014 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3015 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3016 else
3017 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3019 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3022 /* Sort an array containing members of a strongly connected component
3023 SCC so that the members are ordered by RPO number.
3024 This means that when the sort is complete, iterating through the
3025 array will give you the members in RPO order. */
3027 static void
3028 sort_scc (VEC (tree, heap) *scc)
3030 qsort (VEC_address (tree, scc),
3031 VEC_length (tree, scc),
3032 sizeof (tree),
3033 compare_ops);
3036 /* Insert the no longer used nary ONARY to the hash INFO. */
3038 static void
3039 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3041 size_t size = (sizeof (struct vn_nary_op_s)
3042 - sizeof (tree) * (4 - onary->length));
3043 vn_nary_op_t nary = (vn_nary_op_t) obstack_alloc (&info->nary_obstack, size);
3044 void **slot;
3045 memcpy (nary, onary, size);
3046 slot = htab_find_slot_with_hash (info->nary, nary, nary->hashcode, INSERT);
3047 gcc_assert (!*slot);
3048 *slot = nary;
3051 /* Insert the no longer used phi OPHI to the hash INFO. */
3053 static void
3054 copy_phi (vn_phi_t ophi, vn_tables_t info)
3056 vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
3057 void **slot;
3058 memcpy (phi, ophi, sizeof (*phi));
3059 ophi->phiargs = NULL;
3060 slot = htab_find_slot_with_hash (info->phis, phi, phi->hashcode, INSERT);
3061 gcc_assert (!*slot);
3062 *slot = phi;
3065 /* Insert the no longer used reference OREF to the hash INFO. */
3067 static void
3068 copy_reference (vn_reference_t oref, vn_tables_t info)
3070 vn_reference_t ref;
3071 void **slot;
3072 ref = (vn_reference_t) pool_alloc (info->references_pool);
3073 memcpy (ref, oref, sizeof (*ref));
3074 oref->operands = NULL;
3075 slot = htab_find_slot_with_hash (info->references, ref, ref->hashcode,
3076 INSERT);
3077 if (*slot)
3078 free_reference (*slot);
3079 *slot = ref;
3082 /* Process a strongly connected component in the SSA graph. */
3084 static void
3085 process_scc (VEC (tree, heap) *scc)
3087 tree var;
3088 unsigned int i;
3089 unsigned int iterations = 0;
3090 bool changed = true;
3091 htab_iterator hi;
3092 vn_nary_op_t nary;
3093 vn_phi_t phi;
3094 vn_reference_t ref;
3096 /* If the SCC has a single member, just visit it. */
3097 if (VEC_length (tree, scc) == 1)
3099 tree use = VEC_index (tree, scc, 0);
3100 if (!VN_INFO (use)->use_processed)
3101 visit_use (use);
3102 return;
3105 /* Iterate over the SCC with the optimistic table until it stops
3106 changing. */
3107 current_info = optimistic_info;
3108 while (changed)
3110 changed = false;
3111 iterations++;
3112 /* As we are value-numbering optimistically we have to
3113 clear the expression tables and the simplified expressions
3114 in each iteration until we converge. */
3115 htab_empty (optimistic_info->nary);
3116 htab_empty (optimistic_info->phis);
3117 htab_empty (optimistic_info->references);
3118 obstack_free (&optimistic_info->nary_obstack, NULL);
3119 gcc_obstack_init (&optimistic_info->nary_obstack);
3120 empty_alloc_pool (optimistic_info->phis_pool);
3121 empty_alloc_pool (optimistic_info->references_pool);
3122 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
3123 VN_INFO (var)->expr = NULL_TREE;
3124 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
3125 changed |= visit_use (var);
3128 statistics_histogram_event (cfun, "SCC iterations", iterations);
3130 /* Finally, copy the contents of the no longer used optimistic
3131 table to the valid table. */
3132 FOR_EACH_HTAB_ELEMENT (optimistic_info->nary, nary, vn_nary_op_t, hi)
3133 copy_nary (nary, valid_info);
3134 FOR_EACH_HTAB_ELEMENT (optimistic_info->phis, phi, vn_phi_t, hi)
3135 copy_phi (phi, valid_info);
3136 FOR_EACH_HTAB_ELEMENT (optimistic_info->references, ref, vn_reference_t, hi)
3137 copy_reference (ref, valid_info);
3139 current_info = valid_info;
3142 DEF_VEC_O(ssa_op_iter);
3143 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
3145 /* Pop the components of the found SCC for NAME off the SCC stack
3146 and process them. Returns true if all went well, false if
3147 we run into resource limits. */
3149 static bool
3150 extract_and_process_scc_for_name (tree name)
3152 VEC (tree, heap) *scc = NULL;
3153 tree x;
3155 /* Found an SCC, pop the components off the SCC stack and
3156 process them. */
3159 x = VEC_pop (tree, sccstack);
3161 VN_INFO (x)->on_sccstack = false;
3162 VEC_safe_push (tree, heap, scc, x);
3163 } while (x != name);
3165 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
3166 if (VEC_length (tree, scc)
3167 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
3169 if (dump_file)
3170 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
3171 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
3172 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
3173 return false;
3176 if (VEC_length (tree, scc) > 1)
3177 sort_scc (scc);
3179 if (dump_file && (dump_flags & TDF_DETAILS))
3180 print_scc (dump_file, scc);
3182 process_scc (scc);
3184 VEC_free (tree, heap, scc);
3186 return true;
3189 /* Depth first search on NAME to discover and process SCC's in the SSA
3190 graph.
3191 Execution of this algorithm relies on the fact that the SCC's are
3192 popped off the stack in topological order.
3193 Returns true if successful, false if we stopped processing SCC's due
3194 to resource constraints. */
3196 static bool
3197 DFS (tree name)
3199 VEC(ssa_op_iter, heap) *itervec = NULL;
3200 VEC(tree, heap) *namevec = NULL;
3201 use_operand_p usep = NULL;
3202 gimple defstmt;
3203 tree use;
3204 ssa_op_iter iter;
3206 start_over:
3207 /* SCC info */
3208 VN_INFO (name)->dfsnum = next_dfs_num++;
3209 VN_INFO (name)->visited = true;
3210 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
3212 VEC_safe_push (tree, heap, sccstack, name);
3213 VN_INFO (name)->on_sccstack = true;
3214 defstmt = SSA_NAME_DEF_STMT (name);
3216 /* Recursively DFS on our operands, looking for SCC's. */
3217 if (!gimple_nop_p (defstmt))
3219 /* Push a new iterator. */
3220 if (gimple_code (defstmt) == GIMPLE_PHI)
3221 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
3222 else
3223 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
3225 else
3226 clear_and_done_ssa_iter (&iter);
3228 while (1)
3230 /* If we are done processing uses of a name, go up the stack
3231 of iterators and process SCCs as we found them. */
3232 if (op_iter_done (&iter))
3234 /* See if we found an SCC. */
3235 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
3236 if (!extract_and_process_scc_for_name (name))
3238 VEC_free (tree, heap, namevec);
3239 VEC_free (ssa_op_iter, heap, itervec);
3240 return false;
3243 /* Check if we are done. */
3244 if (VEC_empty (tree, namevec))
3246 VEC_free (tree, heap, namevec);
3247 VEC_free (ssa_op_iter, heap, itervec);
3248 return true;
3251 /* Restore the last use walker and continue walking there. */
3252 use = name;
3253 name = VEC_pop (tree, namevec);
3254 memcpy (&iter, VEC_last (ssa_op_iter, itervec),
3255 sizeof (ssa_op_iter));
3256 VEC_pop (ssa_op_iter, itervec);
3257 goto continue_walking;
3260 use = USE_FROM_PTR (usep);
3262 /* Since we handle phi nodes, we will sometimes get
3263 invariants in the use expression. */
3264 if (TREE_CODE (use) == SSA_NAME)
3266 if (! (VN_INFO (use)->visited))
3268 /* Recurse by pushing the current use walking state on
3269 the stack and starting over. */
3270 VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
3271 VEC_safe_push(tree, heap, namevec, name);
3272 name = use;
3273 goto start_over;
3275 continue_walking:
3276 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3277 VN_INFO (use)->low);
3279 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3280 && VN_INFO (use)->on_sccstack)
3282 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3283 VN_INFO (name)->low);
3287 usep = op_iter_next_use (&iter);
3291 /* Allocate a value number table. */
3293 static void
3294 allocate_vn_table (vn_tables_t table)
3296 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
3297 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
3298 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
3299 free_reference);
3301 gcc_obstack_init (&table->nary_obstack);
3302 table->phis_pool = create_alloc_pool ("VN phis",
3303 sizeof (struct vn_phi_s),
3304 30);
3305 table->references_pool = create_alloc_pool ("VN references",
3306 sizeof (struct vn_reference_s),
3307 30);
3310 /* Free a value number table. */
3312 static void
3313 free_vn_table (vn_tables_t table)
3315 htab_delete (table->phis);
3316 htab_delete (table->nary);
3317 htab_delete (table->references);
3318 obstack_free (&table->nary_obstack, NULL);
3319 free_alloc_pool (table->phis_pool);
3320 free_alloc_pool (table->references_pool);
3323 static void
3324 init_scc_vn (void)
3326 size_t i;
3327 int j;
3328 int *rpo_numbers_temp;
3330 calculate_dominance_info (CDI_DOMINATORS);
3331 sccstack = NULL;
3332 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
3333 free);
3335 constant_value_ids = BITMAP_ALLOC (NULL);
3337 next_dfs_num = 1;
3338 next_value_id = 1;
3340 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
3341 /* VEC_alloc doesn't actually grow it to the right size, it just
3342 preallocates the space to do so. */
3343 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
3344 gcc_obstack_init (&vn_ssa_aux_obstack);
3346 shared_lookup_phiargs = NULL;
3347 shared_lookup_references = NULL;
3348 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3349 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3350 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
3352 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3353 the i'th block in RPO order is bb. We want to map bb's to RPO
3354 numbers, so we need to rearrange this array. */
3355 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
3356 rpo_numbers[rpo_numbers_temp[j]] = j;
3358 XDELETE (rpo_numbers_temp);
3360 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
3362 /* Create the VN_INFO structures, and initialize value numbers to
3363 TOP. */
3364 for (i = 0; i < num_ssa_names; i++)
3366 tree name = ssa_name (i);
3367 if (name)
3369 VN_INFO_GET (name)->valnum = VN_TOP;
3370 VN_INFO (name)->expr = NULL_TREE;
3371 VN_INFO (name)->value_id = 0;
3375 renumber_gimple_stmt_uids ();
3377 /* Create the valid and optimistic value numbering tables. */
3378 valid_info = XCNEW (struct vn_tables_s);
3379 allocate_vn_table (valid_info);
3380 optimistic_info = XCNEW (struct vn_tables_s);
3381 allocate_vn_table (optimistic_info);
3384 void
3385 free_scc_vn (void)
3387 size_t i;
3389 htab_delete (constant_to_value_id);
3390 BITMAP_FREE (constant_value_ids);
3391 VEC_free (tree, heap, shared_lookup_phiargs);
3392 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
3393 XDELETEVEC (rpo_numbers);
3395 for (i = 0; i < num_ssa_names; i++)
3397 tree name = ssa_name (i);
3398 if (name
3399 && VN_INFO (name)->needs_insertion)
3400 release_ssa_name (name);
3402 obstack_free (&vn_ssa_aux_obstack, NULL);
3403 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
3405 VEC_free (tree, heap, sccstack);
3406 free_vn_table (valid_info);
3407 XDELETE (valid_info);
3408 free_vn_table (optimistic_info);
3409 XDELETE (optimistic_info);
3412 /* Set the value ids in the valid hash tables. */
3414 static void
3415 set_hashtable_value_ids (void)
3417 htab_iterator hi;
3418 vn_nary_op_t vno;
3419 vn_reference_t vr;
3420 vn_phi_t vp;
3422 /* Now set the value ids of the things we had put in the hash
3423 table. */
3425 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
3426 vno, vn_nary_op_t, hi)
3428 if (vno->result)
3430 if (TREE_CODE (vno->result) == SSA_NAME)
3431 vno->value_id = VN_INFO (vno->result)->value_id;
3432 else if (is_gimple_min_invariant (vno->result))
3433 vno->value_id = get_or_alloc_constant_value_id (vno->result);
3437 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
3438 vp, vn_phi_t, hi)
3440 if (vp->result)
3442 if (TREE_CODE (vp->result) == SSA_NAME)
3443 vp->value_id = VN_INFO (vp->result)->value_id;
3444 else if (is_gimple_min_invariant (vp->result))
3445 vp->value_id = get_or_alloc_constant_value_id (vp->result);
3449 FOR_EACH_HTAB_ELEMENT (valid_info->references,
3450 vr, vn_reference_t, hi)
3452 if (vr->result)
3454 if (TREE_CODE (vr->result) == SSA_NAME)
3455 vr->value_id = VN_INFO (vr->result)->value_id;
3456 else if (is_gimple_min_invariant (vr->result))
3457 vr->value_id = get_or_alloc_constant_value_id (vr->result);
3462 /* Do SCCVN. Returns true if it finished, false if we bailed out
3463 due to resource constraints. */
3465 bool
3466 run_scc_vn (void)
3468 size_t i;
3469 tree param;
3470 bool changed = true;
3472 init_scc_vn ();
3473 current_info = valid_info;
3475 for (param = DECL_ARGUMENTS (current_function_decl);
3476 param;
3477 param = DECL_CHAIN (param))
3479 if (gimple_default_def (cfun, param) != NULL)
3481 tree def = gimple_default_def (cfun, param);
3482 VN_INFO (def)->valnum = def;
3486 for (i = 1; i < num_ssa_names; ++i)
3488 tree name = ssa_name (i);
3489 if (name
3490 && VN_INFO (name)->visited == false
3491 && !has_zero_uses (name))
3492 if (!DFS (name))
3494 free_scc_vn ();
3495 return false;
3499 /* Initialize the value ids. */
3501 for (i = 1; i < num_ssa_names; ++i)
3503 tree name = ssa_name (i);
3504 vn_ssa_aux_t info;
3505 if (!name)
3506 continue;
3507 info = VN_INFO (name);
3508 if (info->valnum == name
3509 || info->valnum == VN_TOP)
3510 info->value_id = get_next_value_id ();
3511 else if (is_gimple_min_invariant (info->valnum))
3512 info->value_id = get_or_alloc_constant_value_id (info->valnum);
3515 /* Propagate until they stop changing. */
3516 while (changed)
3518 changed = false;
3519 for (i = 1; i < num_ssa_names; ++i)
3521 tree name = ssa_name (i);
3522 vn_ssa_aux_t info;
3523 if (!name)
3524 continue;
3525 info = VN_INFO (name);
3526 if (TREE_CODE (info->valnum) == SSA_NAME
3527 && info->valnum != name
3528 && info->value_id != VN_INFO (info->valnum)->value_id)
3530 changed = true;
3531 info->value_id = VN_INFO (info->valnum)->value_id;
3536 set_hashtable_value_ids ();
3538 if (dump_file && (dump_flags & TDF_DETAILS))
3540 fprintf (dump_file, "Value numbers:\n");
3541 for (i = 0; i < num_ssa_names; i++)
3543 tree name = ssa_name (i);
3544 if (name
3545 && VN_INFO (name)->visited
3546 && SSA_VAL (name) != name)
3548 print_generic_expr (dump_file, name, 0);
3549 fprintf (dump_file, " = ");
3550 print_generic_expr (dump_file, SSA_VAL (name), 0);
3551 fprintf (dump_file, "\n");
3556 return true;
3559 /* Return the maximum value id we have ever seen. */
3561 unsigned int
3562 get_max_value_id (void)
3564 return next_value_id;
3567 /* Return the next unique value id. */
3569 unsigned int
3570 get_next_value_id (void)
3572 return next_value_id++;
3576 /* Compare two expressions E1 and E2 and return true if they are equal. */
3578 bool
3579 expressions_equal_p (tree e1, tree e2)
3581 /* The obvious case. */
3582 if (e1 == e2)
3583 return true;
3585 /* If only one of them is null, they cannot be equal. */
3586 if (!e1 || !e2)
3587 return false;
3589 /* Now perform the actual comparison. */
3590 if (TREE_CODE (e1) == TREE_CODE (e2)
3591 && operand_equal_p (e1, e2, OEP_PURE_SAME))
3592 return true;
3594 return false;
3598 /* Return true if the nary operation NARY may trap. This is a copy
3599 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
3601 bool
3602 vn_nary_may_trap (vn_nary_op_t nary)
3604 tree type;
3605 tree rhs2 = NULL_TREE;
3606 bool honor_nans = false;
3607 bool honor_snans = false;
3608 bool fp_operation = false;
3609 bool honor_trapv = false;
3610 bool handled, ret;
3611 unsigned i;
3613 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
3614 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
3615 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
3617 type = nary->type;
3618 fp_operation = FLOAT_TYPE_P (type);
3619 if (fp_operation)
3621 honor_nans = flag_trapping_math && !flag_finite_math_only;
3622 honor_snans = flag_signaling_nans != 0;
3624 else if (INTEGRAL_TYPE_P (type)
3625 && TYPE_OVERFLOW_TRAPS (type))
3626 honor_trapv = true;
3628 if (nary->length >= 2)
3629 rhs2 = nary->op[1];
3630 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
3631 honor_trapv,
3632 honor_nans, honor_snans, rhs2,
3633 &handled);
3634 if (handled
3635 && ret)
3636 return true;
3638 for (i = 0; i < nary->length; ++i)
3639 if (tree_could_trap_p (nary->op[i]))
3640 return true;
3642 return false;