2011-08-19 Vladimir Makarov <vmakarov@redhat.com>
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blob4ccc0a29fd515970ed32b3f7e0bfb82f7b1f15fe
1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "basic-block.h"
28 #include "tree-pretty-print.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "gimple.h"
33 #include "tree-dump.h"
34 #include "timevar.h"
35 #include "fibheap.h"
36 #include "hashtab.h"
37 #include "tree-iterator.h"
38 #include "alloc-pool.h"
39 #include "tree-pass.h"
40 #include "flags.h"
41 #include "bitmap.h"
42 #include "langhooks.h"
43 #include "cfgloop.h"
44 #include "params.h"
45 #include "tree-ssa-propagate.h"
46 #include "tree-ssa-sccvn.h"
47 #include "gimple-fold.h"
49 /* This algorithm is based on the SCC algorithm presented by Keith
50 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51 (http://citeseer.ist.psu.edu/41805.html). In
52 straight line code, it is equivalent to a regular hash based value
53 numbering that is performed in reverse postorder.
55 For code with cycles, there are two alternatives, both of which
56 require keeping the hashtables separate from the actual list of
57 value numbers for SSA names.
59 1. Iterate value numbering in an RPO walk of the blocks, removing
60 all the entries from the hashtable after each iteration (but
61 keeping the SSA name->value number mapping between iterations).
62 Iterate until it does not change.
64 2. Perform value numbering as part of an SCC walk on the SSA graph,
65 iterating only the cycles in the SSA graph until they do not change
66 (using a separate, optimistic hashtable for value numbering the SCC
67 operands).
69 The second is not just faster in practice (because most SSA graph
70 cycles do not involve all the variables in the graph), it also has
71 some nice properties.
73 One of these nice properties is that when we pop an SCC off the
74 stack, we are guaranteed to have processed all the operands coming from
75 *outside of that SCC*, so we do not need to do anything special to
76 ensure they have value numbers.
78 Another nice property is that the SCC walk is done as part of a DFS
79 of the SSA graph, which makes it easy to perform combining and
80 simplifying operations at the same time.
82 The code below is deliberately written in a way that makes it easy
83 to separate the SCC walk from the other work it does.
85 In order to propagate constants through the code, we track which
86 expressions contain constants, and use those while folding. In
87 theory, we could also track expressions whose value numbers are
88 replaced, in case we end up folding based on expression
89 identities.
91 In order to value number memory, we assign value numbers to vuses.
92 This enables us to note that, for example, stores to the same
93 address of the same value from the same starting memory states are
94 equivalent.
95 TODO:
97 1. We can iterate only the changing portions of the SCC's, but
98 I have not seen an SCC big enough for this to be a win.
99 2. If you differentiate between phi nodes for loops and phi nodes
100 for if-then-else, you can properly consider phi nodes in different
101 blocks for equivalence.
102 3. We could value number vuses in more cases, particularly, whole
103 structure copies.
106 /* The set of hashtables and alloc_pool's for their items. */
108 typedef struct vn_tables_s
110 htab_t nary;
111 htab_t phis;
112 htab_t references;
113 struct obstack nary_obstack;
114 alloc_pool phis_pool;
115 alloc_pool references_pool;
116 } *vn_tables_t;
118 static htab_t constant_to_value_id;
119 static bitmap constant_value_ids;
122 /* Valid hashtables storing information we have proven to be
123 correct. */
125 static vn_tables_t valid_info;
127 /* Optimistic hashtables storing information we are making assumptions about
128 during iterations. */
130 static vn_tables_t optimistic_info;
132 /* Pointer to the set of hashtables that is currently being used.
133 Should always point to either the optimistic_info, or the
134 valid_info. */
136 static vn_tables_t current_info;
139 /* Reverse post order index for each basic block. */
141 static int *rpo_numbers;
143 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
145 /* This represents the top of the VN lattice, which is the universal
146 value. */
148 tree VN_TOP;
150 /* Unique counter for our value ids. */
152 static unsigned int next_value_id;
154 /* Next DFS number and the stack for strongly connected component
155 detection. */
157 static unsigned int next_dfs_num;
158 static VEC (tree, heap) *sccstack;
161 DEF_VEC_P(vn_ssa_aux_t);
162 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
164 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
165 are allocated on an obstack for locality reasons, and to free them
166 without looping over the VEC. */
168 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
169 static struct obstack vn_ssa_aux_obstack;
171 /* Return the value numbering information for a given SSA name. */
173 vn_ssa_aux_t
174 VN_INFO (tree name)
176 vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
177 SSA_NAME_VERSION (name));
178 gcc_checking_assert (res);
179 return res;
182 /* Set the value numbering info for a given SSA name to a given
183 value. */
185 static inline void
186 VN_INFO_SET (tree name, vn_ssa_aux_t value)
188 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
189 SSA_NAME_VERSION (name), value);
192 /* Initialize the value numbering info for a given SSA name.
193 This should be called just once for every SSA name. */
195 vn_ssa_aux_t
196 VN_INFO_GET (tree name)
198 vn_ssa_aux_t newinfo;
200 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
201 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
202 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
203 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
204 SSA_NAME_VERSION (name) + 1);
205 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
206 SSA_NAME_VERSION (name), newinfo);
207 return newinfo;
211 /* Get the representative expression for the SSA_NAME NAME. Returns
212 the representative SSA_NAME if there is no expression associated with it. */
214 tree
215 vn_get_expr_for (tree name)
217 vn_ssa_aux_t vn = VN_INFO (name);
218 gimple def_stmt;
219 tree expr = NULL_TREE;
221 if (vn->valnum == VN_TOP)
222 return name;
224 /* If the value-number is a constant it is the representative
225 expression. */
226 if (TREE_CODE (vn->valnum) != SSA_NAME)
227 return vn->valnum;
229 /* Get to the information of the value of this SSA_NAME. */
230 vn = VN_INFO (vn->valnum);
232 /* If the value-number is a constant it is the representative
233 expression. */
234 if (TREE_CODE (vn->valnum) != SSA_NAME)
235 return vn->valnum;
237 /* Else if we have an expression, return it. */
238 if (vn->expr != NULL_TREE)
239 return vn->expr;
241 /* Otherwise use the defining statement to build the expression. */
242 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
244 /* If the value number is a default-definition or a PHI result
245 use it directly. */
246 if (gimple_nop_p (def_stmt)
247 || gimple_code (def_stmt) == GIMPLE_PHI)
248 return vn->valnum;
250 if (!is_gimple_assign (def_stmt))
251 return vn->valnum;
253 /* FIXME tuples. This is incomplete and likely will miss some
254 simplifications. */
255 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)))
257 case tcc_reference:
258 if ((gimple_assign_rhs_code (def_stmt) == VIEW_CONVERT_EXPR
259 || gimple_assign_rhs_code (def_stmt) == REALPART_EXPR
260 || gimple_assign_rhs_code (def_stmt) == IMAGPART_EXPR)
261 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
262 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
263 gimple_expr_type (def_stmt),
264 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
265 break;
267 case tcc_unary:
268 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
269 gimple_expr_type (def_stmt),
270 gimple_assign_rhs1 (def_stmt));
271 break;
273 case tcc_binary:
274 expr = fold_build2 (gimple_assign_rhs_code (def_stmt),
275 gimple_expr_type (def_stmt),
276 gimple_assign_rhs1 (def_stmt),
277 gimple_assign_rhs2 (def_stmt));
278 break;
280 default:;
282 if (expr == NULL_TREE)
283 return vn->valnum;
285 /* Cache the expression. */
286 vn->expr = expr;
288 return expr;
292 /* Free a phi operation structure VP. */
294 static void
295 free_phi (void *vp)
297 vn_phi_t phi = (vn_phi_t) vp;
298 VEC_free (tree, heap, phi->phiargs);
301 /* Free a reference operation structure VP. */
303 static void
304 free_reference (void *vp)
306 vn_reference_t vr = (vn_reference_t) vp;
307 VEC_free (vn_reference_op_s, heap, vr->operands);
310 /* Hash table equality function for vn_constant_t. */
312 static int
313 vn_constant_eq (const void *p1, const void *p2)
315 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
316 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
318 if (vc1->hashcode != vc2->hashcode)
319 return false;
321 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
324 /* Hash table hash function for vn_constant_t. */
326 static hashval_t
327 vn_constant_hash (const void *p1)
329 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
330 return vc1->hashcode;
333 /* Lookup a value id for CONSTANT and return it. If it does not
334 exist returns 0. */
336 unsigned int
337 get_constant_value_id (tree constant)
339 void **slot;
340 struct vn_constant_s vc;
342 vc.hashcode = vn_hash_constant_with_type (constant);
343 vc.constant = constant;
344 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
345 vc.hashcode, NO_INSERT);
346 if (slot)
347 return ((vn_constant_t)*slot)->value_id;
348 return 0;
351 /* Lookup a value id for CONSTANT, and if it does not exist, create a
352 new one and return it. If it does exist, return it. */
354 unsigned int
355 get_or_alloc_constant_value_id (tree constant)
357 void **slot;
358 struct vn_constant_s vc;
359 vn_constant_t vcp;
361 vc.hashcode = vn_hash_constant_with_type (constant);
362 vc.constant = constant;
363 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
364 vc.hashcode, INSERT);
365 if (*slot)
366 return ((vn_constant_t)*slot)->value_id;
368 vcp = XNEW (struct vn_constant_s);
369 vcp->hashcode = vc.hashcode;
370 vcp->constant = constant;
371 vcp->value_id = get_next_value_id ();
372 *slot = (void *) vcp;
373 bitmap_set_bit (constant_value_ids, vcp->value_id);
374 return vcp->value_id;
377 /* Return true if V is a value id for a constant. */
379 bool
380 value_id_constant_p (unsigned int v)
382 return bitmap_bit_p (constant_value_ids, v);
385 /* Compare two reference operands P1 and P2 for equality. Return true if
386 they are equal, and false otherwise. */
388 static int
389 vn_reference_op_eq (const void *p1, const void *p2)
391 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
392 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
394 return (vro1->opcode == vro2->opcode
395 /* We do not care for differences in type qualification. */
396 && (vro1->type == vro2->type
397 || (vro1->type && vro2->type
398 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
399 TYPE_MAIN_VARIANT (vro2->type))))
400 && expressions_equal_p (vro1->op0, vro2->op0)
401 && expressions_equal_p (vro1->op1, vro2->op1)
402 && expressions_equal_p (vro1->op2, vro2->op2));
405 /* Compute the hash for a reference operand VRO1. */
407 static hashval_t
408 vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result)
410 result = iterative_hash_hashval_t (vro1->opcode, result);
411 if (vro1->op0)
412 result = iterative_hash_expr (vro1->op0, result);
413 if (vro1->op1)
414 result = iterative_hash_expr (vro1->op1, result);
415 if (vro1->op2)
416 result = iterative_hash_expr (vro1->op2, result);
417 return result;
420 /* Return the hashcode for a given reference operation P1. */
422 static hashval_t
423 vn_reference_hash (const void *p1)
425 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
426 return vr1->hashcode;
429 /* Compute a hash for the reference operation VR1 and return it. */
431 hashval_t
432 vn_reference_compute_hash (const vn_reference_t vr1)
434 hashval_t result = 0;
435 int i;
436 vn_reference_op_t vro;
437 HOST_WIDE_INT off = -1;
438 bool deref = false;
440 FOR_EACH_VEC_ELT (vn_reference_op_s, vr1->operands, i, vro)
442 if (vro->opcode == MEM_REF)
443 deref = true;
444 else if (vro->opcode != ADDR_EXPR)
445 deref = false;
446 if (vro->off != -1)
448 if (off == -1)
449 off = 0;
450 off += vro->off;
452 else
454 if (off != -1
455 && off != 0)
456 result = iterative_hash_hashval_t (off, result);
457 off = -1;
458 if (deref
459 && vro->opcode == ADDR_EXPR)
461 if (vro->op0)
463 tree op = TREE_OPERAND (vro->op0, 0);
464 result = iterative_hash_hashval_t (TREE_CODE (op), result);
465 result = iterative_hash_expr (op, result);
468 else
469 result = vn_reference_op_compute_hash (vro, result);
472 if (vr1->vuse)
473 result += SSA_NAME_VERSION (vr1->vuse);
475 return result;
478 /* Return true if reference operations P1 and P2 are equivalent. This
479 means they have the same set of operands and vuses. */
482 vn_reference_eq (const void *p1, const void *p2)
484 unsigned i, j;
486 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
487 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
488 if (vr1->hashcode != vr2->hashcode)
489 return false;
491 /* Early out if this is not a hash collision. */
492 if (vr1->hashcode != vr2->hashcode)
493 return false;
495 /* The VOP needs to be the same. */
496 if (vr1->vuse != vr2->vuse)
497 return false;
499 /* If the operands are the same we are done. */
500 if (vr1->operands == vr2->operands)
501 return true;
503 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
504 return false;
506 if (INTEGRAL_TYPE_P (vr1->type)
507 && INTEGRAL_TYPE_P (vr2->type))
509 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
510 return false;
512 else if (INTEGRAL_TYPE_P (vr1->type)
513 && (TYPE_PRECISION (vr1->type)
514 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
515 return false;
516 else if (INTEGRAL_TYPE_P (vr2->type)
517 && (TYPE_PRECISION (vr2->type)
518 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
519 return false;
521 i = 0;
522 j = 0;
525 HOST_WIDE_INT off1 = 0, off2 = 0;
526 vn_reference_op_t vro1, vro2;
527 vn_reference_op_s tem1, tem2;
528 bool deref1 = false, deref2 = false;
529 for (; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro1); i++)
531 if (vro1->opcode == MEM_REF)
532 deref1 = true;
533 if (vro1->off == -1)
534 break;
535 off1 += vro1->off;
537 for (; VEC_iterate (vn_reference_op_s, vr2->operands, j, vro2); j++)
539 if (vro2->opcode == MEM_REF)
540 deref2 = true;
541 if (vro2->off == -1)
542 break;
543 off2 += vro2->off;
545 if (off1 != off2)
546 return false;
547 if (deref1 && vro1->opcode == ADDR_EXPR)
549 memset (&tem1, 0, sizeof (tem1));
550 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
551 tem1.type = TREE_TYPE (tem1.op0);
552 tem1.opcode = TREE_CODE (tem1.op0);
553 vro1 = &tem1;
555 if (deref2 && vro2->opcode == ADDR_EXPR)
557 memset (&tem2, 0, sizeof (tem2));
558 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
559 tem2.type = TREE_TYPE (tem2.op0);
560 tem2.opcode = TREE_CODE (tem2.op0);
561 vro2 = &tem2;
563 if (!vn_reference_op_eq (vro1, vro2))
564 return false;
565 ++j;
566 ++i;
568 while (VEC_length (vn_reference_op_s, vr1->operands) != i
569 || VEC_length (vn_reference_op_s, vr2->operands) != j);
571 return true;
574 /* Copy the operations present in load/store REF into RESULT, a vector of
575 vn_reference_op_s's. */
577 void
578 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
580 if (TREE_CODE (ref) == TARGET_MEM_REF)
582 vn_reference_op_s temp;
584 memset (&temp, 0, sizeof (temp));
585 temp.type = TREE_TYPE (ref);
586 temp.opcode = TREE_CODE (ref);
587 temp.op0 = TMR_INDEX (ref);
588 temp.op1 = TMR_STEP (ref);
589 temp.op2 = TMR_OFFSET (ref);
590 temp.off = -1;
591 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
593 memset (&temp, 0, sizeof (temp));
594 temp.type = NULL_TREE;
595 temp.opcode = ERROR_MARK;
596 temp.op0 = TMR_INDEX2 (ref);
597 temp.off = -1;
598 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
600 memset (&temp, 0, sizeof (temp));
601 temp.type = NULL_TREE;
602 temp.opcode = TREE_CODE (TMR_BASE (ref));
603 temp.op0 = TMR_BASE (ref);
604 temp.off = -1;
605 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
606 return;
609 /* For non-calls, store the information that makes up the address. */
611 while (ref)
613 vn_reference_op_s temp;
615 memset (&temp, 0, sizeof (temp));
616 temp.type = TREE_TYPE (ref);
617 temp.opcode = TREE_CODE (ref);
618 temp.off = -1;
620 switch (temp.opcode)
622 case MEM_REF:
623 /* The base address gets its own vn_reference_op_s structure. */
624 temp.op0 = TREE_OPERAND (ref, 1);
625 if (host_integerp (TREE_OPERAND (ref, 1), 0))
626 temp.off = TREE_INT_CST_LOW (TREE_OPERAND (ref, 1));
627 break;
628 case BIT_FIELD_REF:
629 /* Record bits and position. */
630 temp.op0 = TREE_OPERAND (ref, 1);
631 temp.op1 = TREE_OPERAND (ref, 2);
632 break;
633 case COMPONENT_REF:
634 /* The field decl is enough to unambiguously specify the field,
635 a matching type is not necessary and a mismatching type
636 is always a spurious difference. */
637 temp.type = NULL_TREE;
638 temp.op0 = TREE_OPERAND (ref, 1);
639 temp.op1 = TREE_OPERAND (ref, 2);
641 tree this_offset = component_ref_field_offset (ref);
642 if (this_offset
643 && TREE_CODE (this_offset) == INTEGER_CST)
645 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
646 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
648 double_int off
649 = double_int_add (tree_to_double_int (this_offset),
650 double_int_rshift
651 (tree_to_double_int (bit_offset),
652 BITS_PER_UNIT == 8
653 ? 3 : exact_log2 (BITS_PER_UNIT),
654 HOST_BITS_PER_DOUBLE_INT, true));
655 if (double_int_fits_in_shwi_p (off))
656 temp.off = off.low;
660 break;
661 case ARRAY_RANGE_REF:
662 case ARRAY_REF:
663 /* Record index as operand. */
664 temp.op0 = TREE_OPERAND (ref, 1);
665 /* Always record lower bounds and element size. */
666 temp.op1 = array_ref_low_bound (ref);
667 temp.op2 = array_ref_element_size (ref);
668 if (TREE_CODE (temp.op0) == INTEGER_CST
669 && TREE_CODE (temp.op1) == INTEGER_CST
670 && TREE_CODE (temp.op2) == INTEGER_CST)
672 double_int off = tree_to_double_int (temp.op0);
673 off = double_int_add (off,
674 double_int_neg
675 (tree_to_double_int (temp.op1)));
676 off = double_int_mul (off, tree_to_double_int (temp.op2));
677 if (double_int_fits_in_shwi_p (off))
678 temp.off = off.low;
680 break;
681 case VAR_DECL:
682 if (DECL_HARD_REGISTER (ref))
684 temp.op0 = ref;
685 break;
687 /* Fallthru. */
688 case PARM_DECL:
689 case CONST_DECL:
690 case RESULT_DECL:
691 /* Canonicalize decls to MEM[&decl] which is what we end up with
692 when valueizing MEM[ptr] with ptr = &decl. */
693 temp.opcode = MEM_REF;
694 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
695 temp.off = 0;
696 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
697 temp.opcode = ADDR_EXPR;
698 temp.op0 = build_fold_addr_expr (ref);
699 temp.type = TREE_TYPE (temp.op0);
700 temp.off = -1;
701 break;
702 case STRING_CST:
703 case INTEGER_CST:
704 case COMPLEX_CST:
705 case VECTOR_CST:
706 case REAL_CST:
707 case FIXED_CST:
708 case CONSTRUCTOR:
709 case SSA_NAME:
710 temp.op0 = ref;
711 break;
712 case ADDR_EXPR:
713 if (is_gimple_min_invariant (ref))
715 temp.op0 = ref;
716 break;
718 /* Fallthrough. */
719 /* These are only interesting for their operands, their
720 existence, and their type. They will never be the last
721 ref in the chain of references (IE they require an
722 operand), so we don't have to put anything
723 for op* as it will be handled by the iteration */
724 case REALPART_EXPR:
725 case VIEW_CONVERT_EXPR:
726 temp.off = 0;
727 break;
728 case IMAGPART_EXPR:
729 /* This is only interesting for its constant offset. */
730 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
731 break;
732 default:
733 gcc_unreachable ();
735 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
737 if (REFERENCE_CLASS_P (ref)
738 || (TREE_CODE (ref) == ADDR_EXPR
739 && !is_gimple_min_invariant (ref)))
740 ref = TREE_OPERAND (ref, 0);
741 else
742 ref = NULL_TREE;
746 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
747 operands in *OPS, the reference alias set SET and the reference type TYPE.
748 Return true if something useful was produced. */
750 bool
751 ao_ref_init_from_vn_reference (ao_ref *ref,
752 alias_set_type set, tree type,
753 VEC (vn_reference_op_s, heap) *ops)
755 vn_reference_op_t op;
756 unsigned i;
757 tree base = NULL_TREE;
758 tree *op0_p = &base;
759 HOST_WIDE_INT offset = 0;
760 HOST_WIDE_INT max_size;
761 HOST_WIDE_INT size = -1;
762 tree size_tree = NULL_TREE;
763 alias_set_type base_alias_set = -1;
765 /* First get the final access size from just the outermost expression. */
766 op = VEC_index (vn_reference_op_s, ops, 0);
767 if (op->opcode == COMPONENT_REF)
768 size_tree = DECL_SIZE (op->op0);
769 else if (op->opcode == BIT_FIELD_REF)
770 size_tree = op->op0;
771 else
773 enum machine_mode mode = TYPE_MODE (type);
774 if (mode == BLKmode)
775 size_tree = TYPE_SIZE (type);
776 else
777 size = GET_MODE_BITSIZE (mode);
779 if (size_tree != NULL_TREE)
781 if (!host_integerp (size_tree, 1))
782 size = -1;
783 else
784 size = TREE_INT_CST_LOW (size_tree);
787 /* Initially, maxsize is the same as the accessed element size.
788 In the following it will only grow (or become -1). */
789 max_size = size;
791 /* Compute cumulative bit-offset for nested component-refs and array-refs,
792 and find the ultimate containing object. */
793 FOR_EACH_VEC_ELT (vn_reference_op_s, ops, i, op)
795 switch (op->opcode)
797 /* These may be in the reference ops, but we cannot do anything
798 sensible with them here. */
799 case ADDR_EXPR:
800 /* Apart from ADDR_EXPR arguments to MEM_REF. */
801 if (base != NULL_TREE
802 && TREE_CODE (base) == MEM_REF
803 && op->op0
804 && DECL_P (TREE_OPERAND (op->op0, 0)))
806 vn_reference_op_t pop = VEC_index (vn_reference_op_s, ops, i-1);
807 base = TREE_OPERAND (op->op0, 0);
808 if (pop->off == -1)
810 max_size = -1;
811 offset = 0;
813 else
814 offset += pop->off * BITS_PER_UNIT;
815 op0_p = NULL;
816 break;
818 /* Fallthru. */
819 case CALL_EXPR:
820 return false;
822 /* Record the base objects. */
823 case MEM_REF:
824 base_alias_set = get_deref_alias_set (op->op0);
825 *op0_p = build2 (MEM_REF, op->type,
826 NULL_TREE, op->op0);
827 op0_p = &TREE_OPERAND (*op0_p, 0);
828 break;
830 case VAR_DECL:
831 case PARM_DECL:
832 case RESULT_DECL:
833 case SSA_NAME:
834 *op0_p = op->op0;
835 op0_p = NULL;
836 break;
838 /* And now the usual component-reference style ops. */
839 case BIT_FIELD_REF:
840 offset += tree_low_cst (op->op1, 0);
841 break;
843 case COMPONENT_REF:
845 tree field = op->op0;
846 /* We do not have a complete COMPONENT_REF tree here so we
847 cannot use component_ref_field_offset. Do the interesting
848 parts manually. */
850 if (op->op1
851 || !host_integerp (DECL_FIELD_OFFSET (field), 1))
852 max_size = -1;
853 else
855 offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
856 * BITS_PER_UNIT);
857 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
859 break;
862 case ARRAY_RANGE_REF:
863 case ARRAY_REF:
864 /* We recorded the lower bound and the element size. */
865 if (!host_integerp (op->op0, 0)
866 || !host_integerp (op->op1, 0)
867 || !host_integerp (op->op2, 0))
868 max_size = -1;
869 else
871 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
872 hindex -= TREE_INT_CST_LOW (op->op1);
873 hindex *= TREE_INT_CST_LOW (op->op2);
874 hindex *= BITS_PER_UNIT;
875 offset += hindex;
877 break;
879 case REALPART_EXPR:
880 break;
882 case IMAGPART_EXPR:
883 offset += size;
884 break;
886 case VIEW_CONVERT_EXPR:
887 break;
889 case STRING_CST:
890 case INTEGER_CST:
891 case COMPLEX_CST:
892 case VECTOR_CST:
893 case REAL_CST:
894 case CONSTRUCTOR:
895 case CONST_DECL:
896 return false;
898 default:
899 return false;
903 if (base == NULL_TREE)
904 return false;
906 ref->ref = NULL_TREE;
907 ref->base = base;
908 ref->offset = offset;
909 ref->size = size;
910 ref->max_size = max_size;
911 ref->ref_alias_set = set;
912 if (base_alias_set != -1)
913 ref->base_alias_set = base_alias_set;
914 else
915 ref->base_alias_set = get_alias_set (base);
917 return true;
920 /* Copy the operations present in load/store/call REF into RESULT, a vector of
921 vn_reference_op_s's. */
923 void
924 copy_reference_ops_from_call (gimple call,
925 VEC(vn_reference_op_s, heap) **result)
927 vn_reference_op_s temp;
928 unsigned i;
930 /* Copy the type, opcode, function being called and static chain. */
931 memset (&temp, 0, sizeof (temp));
932 temp.type = gimple_call_return_type (call);
933 temp.opcode = CALL_EXPR;
934 temp.op0 = gimple_call_fn (call);
935 temp.op1 = gimple_call_chain (call);
936 temp.off = -1;
937 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
939 /* Copy the call arguments. As they can be references as well,
940 just chain them together. */
941 for (i = 0; i < gimple_call_num_args (call); ++i)
943 tree callarg = gimple_call_arg (call, i);
944 copy_reference_ops_from_ref (callarg, result);
948 /* Create a vector of vn_reference_op_s structures from REF, a
949 REFERENCE_CLASS_P tree. The vector is not shared. */
951 static VEC(vn_reference_op_s, heap) *
952 create_reference_ops_from_ref (tree ref)
954 VEC (vn_reference_op_s, heap) *result = NULL;
956 copy_reference_ops_from_ref (ref, &result);
957 return result;
960 /* Create a vector of vn_reference_op_s structures from CALL, a
961 call statement. The vector is not shared. */
963 static VEC(vn_reference_op_s, heap) *
964 create_reference_ops_from_call (gimple call)
966 VEC (vn_reference_op_s, heap) *result = NULL;
968 copy_reference_ops_from_call (call, &result);
969 return result;
972 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
973 *I_P to point to the last element of the replacement. */
974 void
975 vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
976 unsigned int *i_p)
978 unsigned int i = *i_p;
979 vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
980 vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
981 tree addr_base;
982 HOST_WIDE_INT addr_offset;
984 /* The only thing we have to do is from &OBJ.foo.bar add the offset
985 from .foo.bar to the preceeding MEM_REF offset and replace the
986 address with &OBJ. */
987 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
988 &addr_offset);
989 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
990 if (addr_base != op->op0)
992 double_int off = tree_to_double_int (mem_op->op0);
993 off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
994 off = double_int_add (off, shwi_to_double_int (addr_offset));
995 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
996 op->op0 = build_fold_addr_expr (addr_base);
997 if (host_integerp (mem_op->op0, 0))
998 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
999 else
1000 mem_op->off = -1;
1004 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1005 *I_P to point to the last element of the replacement. */
1006 static void
1007 vn_reference_maybe_forwprop_address (VEC (vn_reference_op_s, heap) **ops,
1008 unsigned int *i_p)
1010 unsigned int i = *i_p;
1011 vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
1012 vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
1013 gimple def_stmt;
1014 enum tree_code code;
1015 double_int off;
1017 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1018 if (!is_gimple_assign (def_stmt))
1019 return;
1021 code = gimple_assign_rhs_code (def_stmt);
1022 if (code != ADDR_EXPR
1023 && code != POINTER_PLUS_EXPR)
1024 return;
1026 off = tree_to_double_int (mem_op->op0);
1027 off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1029 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1030 from .foo.bar to the preceeding MEM_REF offset and replace the
1031 address with &OBJ. */
1032 if (code == ADDR_EXPR)
1034 tree addr, addr_base;
1035 HOST_WIDE_INT addr_offset;
1037 addr = gimple_assign_rhs1 (def_stmt);
1038 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1039 &addr_offset);
1040 if (!addr_base
1041 || TREE_CODE (addr_base) != MEM_REF)
1042 return;
1044 off = double_int_add (off, shwi_to_double_int (addr_offset));
1045 off = double_int_add (off, mem_ref_offset (addr_base));
1046 op->op0 = TREE_OPERAND (addr_base, 0);
1048 else
1050 tree ptr, ptroff;
1051 ptr = gimple_assign_rhs1 (def_stmt);
1052 ptroff = gimple_assign_rhs2 (def_stmt);
1053 if (TREE_CODE (ptr) != SSA_NAME
1054 || TREE_CODE (ptroff) != INTEGER_CST)
1055 return;
1057 off = double_int_add (off, tree_to_double_int (ptroff));
1058 op->op0 = ptr;
1061 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1062 if (host_integerp (mem_op->op0, 0))
1063 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1064 else
1065 mem_op->off = -1;
1066 if (TREE_CODE (op->op0) == SSA_NAME)
1067 op->op0 = SSA_VAL (op->op0);
1068 if (TREE_CODE (op->op0) != SSA_NAME)
1069 op->opcode = TREE_CODE (op->op0);
1071 /* And recurse. */
1072 if (TREE_CODE (op->op0) == SSA_NAME)
1073 vn_reference_maybe_forwprop_address (ops, i_p);
1074 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1075 vn_reference_fold_indirect (ops, i_p);
1078 /* Optimize the reference REF to a constant if possible or return
1079 NULL_TREE if not. */
1081 tree
1082 fully_constant_vn_reference_p (vn_reference_t ref)
1084 VEC (vn_reference_op_s, heap) *operands = ref->operands;
1085 vn_reference_op_t op;
1087 /* Try to simplify the translated expression if it is
1088 a call to a builtin function with at most two arguments. */
1089 op = VEC_index (vn_reference_op_s, operands, 0);
1090 if (op->opcode == CALL_EXPR
1091 && TREE_CODE (op->op0) == ADDR_EXPR
1092 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1093 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1094 && VEC_length (vn_reference_op_s, operands) >= 2
1095 && VEC_length (vn_reference_op_s, operands) <= 3)
1097 vn_reference_op_t arg0, arg1 = NULL;
1098 bool anyconst = false;
1099 arg0 = VEC_index (vn_reference_op_s, operands, 1);
1100 if (VEC_length (vn_reference_op_s, operands) > 2)
1101 arg1 = VEC_index (vn_reference_op_s, operands, 2);
1102 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1103 || (arg0->opcode == ADDR_EXPR
1104 && is_gimple_min_invariant (arg0->op0)))
1105 anyconst = true;
1106 if (arg1
1107 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1108 || (arg1->opcode == ADDR_EXPR
1109 && is_gimple_min_invariant (arg1->op0))))
1110 anyconst = true;
1111 if (anyconst)
1113 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1114 arg1 ? 2 : 1,
1115 arg0->op0,
1116 arg1 ? arg1->op0 : NULL);
1117 if (folded
1118 && TREE_CODE (folded) == NOP_EXPR)
1119 folded = TREE_OPERAND (folded, 0);
1120 if (folded
1121 && is_gimple_min_invariant (folded))
1122 return folded;
1126 /* Simplify reads from constant strings. */
1127 else if (op->opcode == ARRAY_REF
1128 && TREE_CODE (op->op0) == INTEGER_CST
1129 && integer_zerop (op->op1)
1130 && VEC_length (vn_reference_op_s, operands) == 2)
1132 vn_reference_op_t arg0;
1133 arg0 = VEC_index (vn_reference_op_s, operands, 1);
1134 if (arg0->opcode == STRING_CST
1135 && (TYPE_MODE (op->type)
1136 == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
1137 && GET_MODE_CLASS (TYPE_MODE (op->type)) == MODE_INT
1138 && GET_MODE_SIZE (TYPE_MODE (op->type)) == 1
1139 && compare_tree_int (op->op0, TREE_STRING_LENGTH (arg0->op0)) < 0)
1140 return build_int_cst_type (op->type,
1141 (TREE_STRING_POINTER (arg0->op0)
1142 [TREE_INT_CST_LOW (op->op0)]));
1145 return NULL_TREE;
1148 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1149 structures into their value numbers. This is done in-place, and
1150 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1151 whether any operands were valueized. */
1153 static VEC (vn_reference_op_s, heap) *
1154 valueize_refs_1 (VEC (vn_reference_op_s, heap) *orig, bool *valueized_anything)
1156 vn_reference_op_t vro;
1157 unsigned int i;
1159 *valueized_anything = false;
1161 FOR_EACH_VEC_ELT (vn_reference_op_s, orig, i, vro)
1163 if (vro->opcode == SSA_NAME
1164 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1166 tree tem = SSA_VAL (vro->op0);
1167 if (tem != vro->op0)
1169 *valueized_anything = true;
1170 vro->op0 = tem;
1172 /* If it transforms from an SSA_NAME to a constant, update
1173 the opcode. */
1174 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1175 vro->opcode = TREE_CODE (vro->op0);
1177 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1179 tree tem = SSA_VAL (vro->op1);
1180 if (tem != vro->op1)
1182 *valueized_anything = true;
1183 vro->op1 = tem;
1186 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1188 tree tem = SSA_VAL (vro->op2);
1189 if (tem != vro->op2)
1191 *valueized_anything = true;
1192 vro->op2 = tem;
1195 /* If it transforms from an SSA_NAME to an address, fold with
1196 a preceding indirect reference. */
1197 if (i > 0
1198 && vro->op0
1199 && TREE_CODE (vro->op0) == ADDR_EXPR
1200 && VEC_index (vn_reference_op_s,
1201 orig, i - 1)->opcode == MEM_REF)
1202 vn_reference_fold_indirect (&orig, &i);
1203 else if (i > 0
1204 && vro->opcode == SSA_NAME
1205 && VEC_index (vn_reference_op_s,
1206 orig, i - 1)->opcode == MEM_REF)
1207 vn_reference_maybe_forwprop_address (&orig, &i);
1208 /* If it transforms a non-constant ARRAY_REF into a constant
1209 one, adjust the constant offset. */
1210 else if (vro->opcode == ARRAY_REF
1211 && vro->off == -1
1212 && TREE_CODE (vro->op0) == INTEGER_CST
1213 && TREE_CODE (vro->op1) == INTEGER_CST
1214 && TREE_CODE (vro->op2) == INTEGER_CST)
1216 double_int off = tree_to_double_int (vro->op0);
1217 off = double_int_add (off,
1218 double_int_neg
1219 (tree_to_double_int (vro->op1)));
1220 off = double_int_mul (off, tree_to_double_int (vro->op2));
1221 if (double_int_fits_in_shwi_p (off))
1222 vro->off = off.low;
1226 return orig;
1229 static VEC (vn_reference_op_s, heap) *
1230 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
1232 bool tem;
1233 return valueize_refs_1 (orig, &tem);
1236 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
1238 /* Create a vector of vn_reference_op_s structures from REF, a
1239 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1240 this function. *VALUEIZED_ANYTHING will specify whether any
1241 operands were valueized. */
1243 static VEC(vn_reference_op_s, heap) *
1244 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1246 if (!ref)
1247 return NULL;
1248 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1249 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1250 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1251 valueized_anything);
1252 return shared_lookup_references;
1255 /* Create a vector of vn_reference_op_s structures from CALL, a
1256 call statement. The vector is shared among all callers of
1257 this function. */
1259 static VEC(vn_reference_op_s, heap) *
1260 valueize_shared_reference_ops_from_call (gimple call)
1262 if (!call)
1263 return NULL;
1264 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1265 copy_reference_ops_from_call (call, &shared_lookup_references);
1266 shared_lookup_references = valueize_refs (shared_lookup_references);
1267 return shared_lookup_references;
1270 /* Lookup a SCCVN reference operation VR in the current hash table.
1271 Returns the resulting value number if it exists in the hash table,
1272 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1273 vn_reference_t stored in the hashtable if something is found. */
1275 static tree
1276 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1278 void **slot;
1279 hashval_t hash;
1281 hash = vr->hashcode;
1282 slot = htab_find_slot_with_hash (current_info->references, vr,
1283 hash, NO_INSERT);
1284 if (!slot && current_info == optimistic_info)
1285 slot = htab_find_slot_with_hash (valid_info->references, vr,
1286 hash, NO_INSERT);
1287 if (slot)
1289 if (vnresult)
1290 *vnresult = (vn_reference_t)*slot;
1291 return ((vn_reference_t)*slot)->result;
1294 return NULL_TREE;
1297 static tree *last_vuse_ptr;
1298 static vn_lookup_kind vn_walk_kind;
1299 static vn_lookup_kind default_vn_walk_kind;
1301 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1302 with the current VUSE and performs the expression lookup. */
1304 static void *
1305 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
1307 vn_reference_t vr = (vn_reference_t)vr_;
1308 void **slot;
1309 hashval_t hash;
1311 if (last_vuse_ptr)
1312 *last_vuse_ptr = vuse;
1314 /* Fixup vuse and hash. */
1315 if (vr->vuse)
1316 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1317 vr->vuse = SSA_VAL (vuse);
1318 if (vr->vuse)
1319 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1321 hash = vr->hashcode;
1322 slot = htab_find_slot_with_hash (current_info->references, vr,
1323 hash, NO_INSERT);
1324 if (!slot && current_info == optimistic_info)
1325 slot = htab_find_slot_with_hash (valid_info->references, vr,
1326 hash, NO_INSERT);
1327 if (slot)
1328 return *slot;
1330 return NULL;
1333 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1334 from the statement defining VUSE and if not successful tries to
1335 translate *REFP and VR_ through an aggregate copy at the defintion
1336 of VUSE. */
1338 static void *
1339 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
1341 vn_reference_t vr = (vn_reference_t)vr_;
1342 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1343 tree base;
1344 HOST_WIDE_INT offset, maxsize;
1345 static VEC (vn_reference_op_s, heap) *lhs_ops = NULL;
1346 ao_ref lhs_ref;
1347 bool lhs_ref_ok = false;
1349 /* First try to disambiguate after value-replacing in the definitions LHS. */
1350 if (is_gimple_assign (def_stmt))
1352 VEC (vn_reference_op_s, heap) *tem;
1353 tree lhs = gimple_assign_lhs (def_stmt);
1354 bool valueized_anything = false;
1355 /* Avoid re-allocation overhead. */
1356 VEC_truncate (vn_reference_op_s, lhs_ops, 0);
1357 copy_reference_ops_from_ref (lhs, &lhs_ops);
1358 tem = lhs_ops;
1359 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1360 gcc_assert (lhs_ops == tem);
1361 if (valueized_anything)
1363 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1364 get_alias_set (lhs),
1365 TREE_TYPE (lhs), lhs_ops);
1366 if (lhs_ref_ok
1367 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1368 return NULL;
1370 else
1372 ao_ref_init (&lhs_ref, lhs);
1373 lhs_ref_ok = true;
1377 base = ao_ref_base (ref);
1378 offset = ref->offset;
1379 maxsize = ref->max_size;
1381 /* If we cannot constrain the size of the reference we cannot
1382 test if anything kills it. */
1383 if (maxsize == -1)
1384 return (void *)-1;
1386 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1387 from that defintion.
1388 1) Memset. */
1389 if (is_gimple_reg_type (vr->type)
1390 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1391 && integer_zerop (gimple_call_arg (def_stmt, 1))
1392 && host_integerp (gimple_call_arg (def_stmt, 2), 1)
1393 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1395 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1396 tree base2;
1397 HOST_WIDE_INT offset2, size2, maxsize2;
1398 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1399 size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
1400 if ((unsigned HOST_WIDE_INT)size2 / 8
1401 == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
1402 && maxsize2 != -1
1403 && operand_equal_p (base, base2, 0)
1404 && offset2 <= offset
1405 && offset2 + size2 >= offset + maxsize)
1407 tree val = build_zero_cst (vr->type);
1408 unsigned int value_id = get_or_alloc_constant_value_id (val);
1409 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1410 VEC_copy (vn_reference_op_s,
1411 heap, vr->operands),
1412 val, value_id);
1416 /* 2) Assignment from an empty CONSTRUCTOR. */
1417 else if (is_gimple_reg_type (vr->type)
1418 && gimple_assign_single_p (def_stmt)
1419 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1420 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1422 tree base2;
1423 HOST_WIDE_INT offset2, size2, maxsize2;
1424 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1425 &offset2, &size2, &maxsize2);
1426 if (maxsize2 != -1
1427 && operand_equal_p (base, base2, 0)
1428 && offset2 <= offset
1429 && offset2 + size2 >= offset + maxsize)
1431 tree val = build_zero_cst (vr->type);
1432 unsigned int value_id = get_or_alloc_constant_value_id (val);
1433 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1434 VEC_copy (vn_reference_op_s,
1435 heap, vr->operands),
1436 val, value_id);
1440 /* 3) For aggregate copies translate the reference through them if
1441 the copy kills ref. */
1442 else if (vn_walk_kind == VN_WALKREWRITE
1443 && gimple_assign_single_p (def_stmt)
1444 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1445 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1446 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1448 tree base2;
1449 HOST_WIDE_INT offset2, size2, maxsize2;
1450 int i, j;
1451 VEC (vn_reference_op_s, heap) *rhs = NULL;
1452 vn_reference_op_t vro;
1453 ao_ref r;
1455 if (!lhs_ref_ok)
1456 return (void *)-1;
1458 /* See if the assignment kills REF. */
1459 base2 = ao_ref_base (&lhs_ref);
1460 offset2 = lhs_ref.offset;
1461 size2 = lhs_ref.size;
1462 maxsize2 = lhs_ref.max_size;
1463 if (maxsize2 == -1
1464 || (base != base2 && !operand_equal_p (base, base2, 0))
1465 || offset2 > offset
1466 || offset2 + size2 < offset + maxsize)
1467 return (void *)-1;
1469 /* Find the common base of ref and the lhs. lhs_ops already
1470 contains valueized operands for the lhs. */
1471 i = VEC_length (vn_reference_op_s, vr->operands) - 1;
1472 j = VEC_length (vn_reference_op_s, lhs_ops) - 1;
1473 while (j >= 0 && i >= 0
1474 && vn_reference_op_eq (VEC_index (vn_reference_op_s,
1475 vr->operands, i),
1476 VEC_index (vn_reference_op_s, lhs_ops, j)))
1478 i--;
1479 j--;
1482 /* ??? The innermost op should always be a MEM_REF and we already
1483 checked that the assignment to the lhs kills vr. Thus for
1484 aggregate copies using char[] types the vn_reference_op_eq
1485 may fail when comparing types for compatibility. But we really
1486 don't care here - further lookups with the rewritten operands
1487 will simply fail if we messed up types too badly. */
1488 if (j == 0 && i >= 0
1489 && VEC_index (vn_reference_op_s, lhs_ops, 0)->opcode == MEM_REF
1490 && VEC_index (vn_reference_op_s, lhs_ops, 0)->off != -1
1491 && (VEC_index (vn_reference_op_s, lhs_ops, 0)->off
1492 == VEC_index (vn_reference_op_s, vr->operands, i)->off))
1493 i--, j--;
1495 /* i now points to the first additional op.
1496 ??? LHS may not be completely contained in VR, one or more
1497 VIEW_CONVERT_EXPRs could be in its way. We could at least
1498 try handling outermost VIEW_CONVERT_EXPRs. */
1499 if (j != -1)
1500 return (void *)-1;
1502 /* Now re-write REF to be based on the rhs of the assignment. */
1503 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1504 /* We need to pre-pend vr->operands[0..i] to rhs. */
1505 if (i + 1 + VEC_length (vn_reference_op_s, rhs)
1506 > VEC_length (vn_reference_op_s, vr->operands))
1508 VEC (vn_reference_op_s, heap) *old = vr->operands;
1509 VEC_safe_grow (vn_reference_op_s, heap, vr->operands,
1510 i + 1 + VEC_length (vn_reference_op_s, rhs));
1511 if (old == shared_lookup_references
1512 && vr->operands != old)
1513 shared_lookup_references = NULL;
1515 else
1516 VEC_truncate (vn_reference_op_s, vr->operands,
1517 i + 1 + VEC_length (vn_reference_op_s, rhs));
1518 FOR_EACH_VEC_ELT (vn_reference_op_s, rhs, j, vro)
1519 VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, vro);
1520 VEC_free (vn_reference_op_s, heap, rhs);
1521 vr->hashcode = vn_reference_compute_hash (vr);
1523 /* Adjust *ref from the new operands. */
1524 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1525 return (void *)-1;
1526 /* This can happen with bitfields. */
1527 if (ref->size != r.size)
1528 return (void *)-1;
1529 *ref = r;
1531 /* Do not update last seen VUSE after translating. */
1532 last_vuse_ptr = NULL;
1534 /* Keep looking for the adjusted *REF / VR pair. */
1535 return NULL;
1538 /* 4) For memcpy copies translate the reference through them if
1539 the copy kills ref. */
1540 else if (vn_walk_kind == VN_WALKREWRITE
1541 && is_gimple_reg_type (vr->type)
1542 /* ??? Handle BCOPY as well. */
1543 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
1544 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
1545 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
1546 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
1547 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
1548 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
1549 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
1550 && host_integerp (gimple_call_arg (def_stmt, 2), 1))
1552 tree lhs, rhs;
1553 ao_ref r;
1554 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
1555 vn_reference_op_s op;
1556 HOST_WIDE_INT at;
1559 /* Only handle non-variable, addressable refs. */
1560 if (ref->size != maxsize
1561 || offset % BITS_PER_UNIT != 0
1562 || ref->size % BITS_PER_UNIT != 0)
1563 return (void *)-1;
1565 /* Extract a pointer base and an offset for the destination. */
1566 lhs = gimple_call_arg (def_stmt, 0);
1567 lhs_offset = 0;
1568 if (TREE_CODE (lhs) == SSA_NAME)
1569 lhs = SSA_VAL (lhs);
1570 if (TREE_CODE (lhs) == ADDR_EXPR)
1572 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
1573 &lhs_offset);
1574 if (!tem)
1575 return (void *)-1;
1576 if (TREE_CODE (tem) == MEM_REF
1577 && host_integerp (TREE_OPERAND (tem, 1), 1))
1579 lhs = TREE_OPERAND (tem, 0);
1580 lhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1582 else if (DECL_P (tem))
1583 lhs = build_fold_addr_expr (tem);
1584 else
1585 return (void *)-1;
1587 if (TREE_CODE (lhs) != SSA_NAME
1588 && TREE_CODE (lhs) != ADDR_EXPR)
1589 return (void *)-1;
1591 /* Extract a pointer base and an offset for the source. */
1592 rhs = gimple_call_arg (def_stmt, 1);
1593 rhs_offset = 0;
1594 if (TREE_CODE (rhs) == SSA_NAME)
1595 rhs = SSA_VAL (rhs);
1596 if (TREE_CODE (rhs) == ADDR_EXPR)
1598 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
1599 &rhs_offset);
1600 if (!tem)
1601 return (void *)-1;
1602 if (TREE_CODE (tem) == MEM_REF
1603 && host_integerp (TREE_OPERAND (tem, 1), 1))
1605 rhs = TREE_OPERAND (tem, 0);
1606 rhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1608 else if (DECL_P (tem))
1609 rhs = build_fold_addr_expr (tem);
1610 else
1611 return (void *)-1;
1613 if (TREE_CODE (rhs) != SSA_NAME
1614 && TREE_CODE (rhs) != ADDR_EXPR)
1615 return (void *)-1;
1617 copy_size = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2));
1619 /* The bases of the destination and the references have to agree. */
1620 if ((TREE_CODE (base) != MEM_REF
1621 && !DECL_P (base))
1622 || (TREE_CODE (base) == MEM_REF
1623 && (TREE_OPERAND (base, 0) != lhs
1624 || !host_integerp (TREE_OPERAND (base, 1), 1)))
1625 || (DECL_P (base)
1626 && (TREE_CODE (lhs) != ADDR_EXPR
1627 || TREE_OPERAND (lhs, 0) != base)))
1628 return (void *)-1;
1630 /* And the access has to be contained within the memcpy destination. */
1631 at = offset / BITS_PER_UNIT;
1632 if (TREE_CODE (base) == MEM_REF)
1633 at += TREE_INT_CST_LOW (TREE_OPERAND (base, 1));
1634 if (lhs_offset > at
1635 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
1636 return (void *)-1;
1638 /* Make room for 2 operands in the new reference. */
1639 if (VEC_length (vn_reference_op_s, vr->operands) < 2)
1641 VEC (vn_reference_op_s, heap) *old = vr->operands;
1642 VEC_safe_grow (vn_reference_op_s, heap, vr->operands, 2);
1643 if (old == shared_lookup_references
1644 && vr->operands != old)
1645 shared_lookup_references = NULL;
1647 else
1648 VEC_truncate (vn_reference_op_s, vr->operands, 2);
1650 /* The looked-through reference is a simple MEM_REF. */
1651 memset (&op, 0, sizeof (op));
1652 op.type = vr->type;
1653 op.opcode = MEM_REF;
1654 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
1655 op.off = at - lhs_offset + rhs_offset;
1656 VEC_replace (vn_reference_op_s, vr->operands, 0, &op);
1657 op.type = TREE_TYPE (rhs);
1658 op.opcode = TREE_CODE (rhs);
1659 op.op0 = rhs;
1660 op.off = -1;
1661 VEC_replace (vn_reference_op_s, vr->operands, 1, &op);
1662 vr->hashcode = vn_reference_compute_hash (vr);
1664 /* Adjust *ref from the new operands. */
1665 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1666 return (void *)-1;
1667 /* This can happen with bitfields. */
1668 if (ref->size != r.size)
1669 return (void *)-1;
1670 *ref = r;
1672 /* Do not update last seen VUSE after translating. */
1673 last_vuse_ptr = NULL;
1675 /* Keep looking for the adjusted *REF / VR pair. */
1676 return NULL;
1679 /* Bail out and stop walking. */
1680 return (void *)-1;
1683 /* Lookup a reference operation by it's parts, in the current hash table.
1684 Returns the resulting value number if it exists in the hash table,
1685 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1686 vn_reference_t stored in the hashtable if something is found. */
1688 tree
1689 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
1690 VEC (vn_reference_op_s, heap) *operands,
1691 vn_reference_t *vnresult, vn_lookup_kind kind)
1693 struct vn_reference_s vr1;
1694 vn_reference_t tmp;
1695 tree cst;
1697 if (!vnresult)
1698 vnresult = &tmp;
1699 *vnresult = NULL;
1701 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1702 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1703 VEC_safe_grow (vn_reference_op_s, heap, shared_lookup_references,
1704 VEC_length (vn_reference_op_s, operands));
1705 memcpy (VEC_address (vn_reference_op_s, shared_lookup_references),
1706 VEC_address (vn_reference_op_s, operands),
1707 sizeof (vn_reference_op_s)
1708 * VEC_length (vn_reference_op_s, operands));
1709 vr1.operands = operands = shared_lookup_references
1710 = valueize_refs (shared_lookup_references);
1711 vr1.type = type;
1712 vr1.set = set;
1713 vr1.hashcode = vn_reference_compute_hash (&vr1);
1714 if ((cst = fully_constant_vn_reference_p (&vr1)))
1715 return cst;
1717 vn_reference_lookup_1 (&vr1, vnresult);
1718 if (!*vnresult
1719 && kind != VN_NOWALK
1720 && vr1.vuse)
1722 ao_ref r;
1723 vn_walk_kind = kind;
1724 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
1725 *vnresult =
1726 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1727 vn_reference_lookup_2,
1728 vn_reference_lookup_3, &vr1);
1729 if (vr1.operands != operands)
1730 VEC_free (vn_reference_op_s, heap, vr1.operands);
1733 if (*vnresult)
1734 return (*vnresult)->result;
1736 return NULL_TREE;
1739 /* Lookup OP in the current hash table, and return the resulting value
1740 number if it exists in the hash table. Return NULL_TREE if it does
1741 not exist in the hash table or if the result field of the structure
1742 was NULL.. VNRESULT will be filled in with the vn_reference_t
1743 stored in the hashtable if one exists. */
1745 tree
1746 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
1747 vn_reference_t *vnresult)
1749 VEC (vn_reference_op_s, heap) *operands;
1750 struct vn_reference_s vr1;
1751 tree cst;
1752 bool valuezied_anything;
1754 if (vnresult)
1755 *vnresult = NULL;
1757 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1758 vr1.operands = operands
1759 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
1760 vr1.type = TREE_TYPE (op);
1761 vr1.set = get_alias_set (op);
1762 vr1.hashcode = vn_reference_compute_hash (&vr1);
1763 if ((cst = fully_constant_vn_reference_p (&vr1)))
1764 return cst;
1766 if (kind != VN_NOWALK
1767 && vr1.vuse)
1769 vn_reference_t wvnresult;
1770 ao_ref r;
1771 /* Make sure to use a valueized reference if we valueized anything.
1772 Otherwise preserve the full reference for advanced TBAA. */
1773 if (!valuezied_anything
1774 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
1775 vr1.operands))
1776 ao_ref_init (&r, op);
1777 vn_walk_kind = kind;
1778 wvnresult =
1779 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1780 vn_reference_lookup_2,
1781 vn_reference_lookup_3, &vr1);
1782 if (vr1.operands != operands)
1783 VEC_free (vn_reference_op_s, heap, vr1.operands);
1784 if (wvnresult)
1786 if (vnresult)
1787 *vnresult = wvnresult;
1788 return wvnresult->result;
1791 return NULL_TREE;
1794 return vn_reference_lookup_1 (&vr1, vnresult);
1798 /* Insert OP into the current hash table with a value number of
1799 RESULT, and return the resulting reference structure we created. */
1801 vn_reference_t
1802 vn_reference_insert (tree op, tree result, tree vuse)
1804 void **slot;
1805 vn_reference_t vr1;
1807 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1808 if (TREE_CODE (result) == SSA_NAME)
1809 vr1->value_id = VN_INFO (result)->value_id;
1810 else
1811 vr1->value_id = get_or_alloc_constant_value_id (result);
1812 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1813 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
1814 vr1->type = TREE_TYPE (op);
1815 vr1->set = get_alias_set (op);
1816 vr1->hashcode = vn_reference_compute_hash (vr1);
1817 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
1819 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1820 INSERT);
1822 /* Because we lookup stores using vuses, and value number failures
1823 using the vdefs (see visit_reference_op_store for how and why),
1824 it's possible that on failure we may try to insert an already
1825 inserted store. This is not wrong, there is no ssa name for a
1826 store that we could use as a differentiator anyway. Thus, unlike
1827 the other lookup functions, you cannot gcc_assert (!*slot)
1828 here. */
1830 /* But free the old slot in case of a collision. */
1831 if (*slot)
1832 free_reference (*slot);
1834 *slot = vr1;
1835 return vr1;
1838 /* Insert a reference by it's pieces into the current hash table with
1839 a value number of RESULT. Return the resulting reference
1840 structure we created. */
1842 vn_reference_t
1843 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
1844 VEC (vn_reference_op_s, heap) *operands,
1845 tree result, unsigned int value_id)
1848 void **slot;
1849 vn_reference_t vr1;
1851 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1852 vr1->value_id = value_id;
1853 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1854 vr1->operands = valueize_refs (operands);
1855 vr1->type = type;
1856 vr1->set = set;
1857 vr1->hashcode = vn_reference_compute_hash (vr1);
1858 if (result && TREE_CODE (result) == SSA_NAME)
1859 result = SSA_VAL (result);
1860 vr1->result = result;
1862 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1863 INSERT);
1865 /* At this point we should have all the things inserted that we have
1866 seen before, and we should never try inserting something that
1867 already exists. */
1868 gcc_assert (!*slot);
1869 if (*slot)
1870 free_reference (*slot);
1872 *slot = vr1;
1873 return vr1;
1876 /* Compute and return the hash value for nary operation VBO1. */
1878 hashval_t
1879 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
1881 hashval_t hash;
1882 unsigned i;
1884 for (i = 0; i < vno1->length; ++i)
1885 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
1886 vno1->op[i] = SSA_VAL (vno1->op[i]);
1888 if (vno1->length == 2
1889 && commutative_tree_code (vno1->opcode)
1890 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
1892 tree temp = vno1->op[0];
1893 vno1->op[0] = vno1->op[1];
1894 vno1->op[1] = temp;
1897 hash = iterative_hash_hashval_t (vno1->opcode, 0);
1898 for (i = 0; i < vno1->length; ++i)
1899 hash = iterative_hash_expr (vno1->op[i], hash);
1901 return hash;
1904 /* Return the computed hashcode for nary operation P1. */
1906 static hashval_t
1907 vn_nary_op_hash (const void *p1)
1909 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1910 return vno1->hashcode;
1913 /* Compare nary operations P1 and P2 and return true if they are
1914 equivalent. */
1917 vn_nary_op_eq (const void *p1, const void *p2)
1919 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1920 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
1921 unsigned i;
1923 if (vno1->hashcode != vno2->hashcode)
1924 return false;
1926 if (vno1->opcode != vno2->opcode
1927 || !types_compatible_p (vno1->type, vno2->type))
1928 return false;
1930 for (i = 0; i < vno1->length; ++i)
1931 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
1932 return false;
1934 return true;
1937 /* Initialize VNO from the pieces provided. */
1939 static void
1940 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
1941 enum tree_code code, tree type, tree op0,
1942 tree op1, tree op2, tree op3)
1944 vno->opcode = code;
1945 vno->length = length;
1946 vno->type = type;
1947 switch (length)
1949 /* The fallthrus here are deliberate. */
1950 case 4: vno->op[3] = op3;
1951 case 3: vno->op[2] = op2;
1952 case 2: vno->op[1] = op1;
1953 case 1: vno->op[0] = op0;
1954 default:
1955 break;
1959 /* Initialize VNO from OP. */
1961 static void
1962 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
1964 unsigned i;
1966 vno->opcode = TREE_CODE (op);
1967 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
1968 vno->type = TREE_TYPE (op);
1969 for (i = 0; i < vno->length; ++i)
1970 vno->op[i] = TREE_OPERAND (op, i);
1973 /* Initialize VNO from STMT. */
1975 static void
1976 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
1978 unsigned i;
1980 vno->opcode = gimple_assign_rhs_code (stmt);
1981 vno->length = gimple_num_ops (stmt) - 1;
1982 vno->type = gimple_expr_type (stmt);
1983 for (i = 0; i < vno->length; ++i)
1984 vno->op[i] = gimple_op (stmt, i + 1);
1985 if (vno->opcode == REALPART_EXPR
1986 || vno->opcode == IMAGPART_EXPR
1987 || vno->opcode == VIEW_CONVERT_EXPR)
1988 vno->op[0] = TREE_OPERAND (vno->op[0], 0);
1991 /* Compute the hashcode for VNO and look for it in the hash table;
1992 return the resulting value number if it exists in the hash table.
1993 Return NULL_TREE if it does not exist in the hash table or if the
1994 result field of the operation is NULL. VNRESULT will contain the
1995 vn_nary_op_t from the hashtable if it exists. */
1997 static tree
1998 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2000 void **slot;
2002 if (vnresult)
2003 *vnresult = NULL;
2005 vno->hashcode = vn_nary_op_compute_hash (vno);
2006 slot = htab_find_slot_with_hash (current_info->nary, vno, vno->hashcode,
2007 NO_INSERT);
2008 if (!slot && current_info == optimistic_info)
2009 slot = htab_find_slot_with_hash (valid_info->nary, vno, vno->hashcode,
2010 NO_INSERT);
2011 if (!slot)
2012 return NULL_TREE;
2013 if (vnresult)
2014 *vnresult = (vn_nary_op_t)*slot;
2015 return ((vn_nary_op_t)*slot)->result;
2018 /* Lookup a n-ary operation by its pieces and return the resulting value
2019 number if it exists in the hash table. Return NULL_TREE if it does
2020 not exist in the hash table or if the result field of the operation
2021 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2022 if it exists. */
2024 tree
2025 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2026 tree type, tree op0, tree op1, tree op2,
2027 tree op3, vn_nary_op_t *vnresult)
2029 struct vn_nary_op_s vno1;
2030 init_vn_nary_op_from_pieces (&vno1, length, code, type, op0, op1, op2, op3);
2031 return vn_nary_op_lookup_1 (&vno1, vnresult);
2034 /* Lookup OP in the current hash table, and return the resulting value
2035 number if it exists in the hash table. Return NULL_TREE if it does
2036 not exist in the hash table or if the result field of the operation
2037 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2038 if it exists. */
2040 tree
2041 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2043 struct vn_nary_op_s vno1;
2044 init_vn_nary_op_from_op (&vno1, op);
2045 return vn_nary_op_lookup_1 (&vno1, vnresult);
2048 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2049 value number if it exists in the hash table. Return NULL_TREE if
2050 it does not exist in the hash table. VNRESULT will contain the
2051 vn_nary_op_t from the hashtable if it exists. */
2053 tree
2054 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
2056 struct vn_nary_op_s vno1;
2057 init_vn_nary_op_from_stmt (&vno1, stmt);
2058 return vn_nary_op_lookup_1 (&vno1, vnresult);
2061 /* Return the size of a vn_nary_op_t with LENGTH operands. */
2063 static size_t
2064 sizeof_vn_nary_op (unsigned int length)
2066 return sizeof (struct vn_nary_op_s) - sizeof (tree) * (4 - length);
2069 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2071 static vn_nary_op_t
2072 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2074 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2077 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2078 obstack. */
2080 static vn_nary_op_t
2081 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2083 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2084 &current_info->nary_obstack);
2086 vno1->value_id = value_id;
2087 vno1->length = length;
2088 vno1->result = result;
2090 return vno1;
2093 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2094 VNO->HASHCODE first. */
2096 static vn_nary_op_t
2097 vn_nary_op_insert_into (vn_nary_op_t vno, htab_t table, bool compute_hash)
2099 void **slot;
2101 if (compute_hash)
2102 vno->hashcode = vn_nary_op_compute_hash (vno);
2104 slot = htab_find_slot_with_hash (table, vno, vno->hashcode, INSERT);
2105 gcc_assert (!*slot);
2107 *slot = vno;
2108 return vno;
2111 /* Insert a n-ary operation into the current hash table using it's
2112 pieces. Return the vn_nary_op_t structure we created and put in
2113 the hashtable. */
2115 vn_nary_op_t
2116 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2117 tree type, tree op0,
2118 tree op1, tree op2, tree op3,
2119 tree result,
2120 unsigned int value_id)
2122 vn_nary_op_t vno1;
2124 vno1 = alloc_vn_nary_op (length, result, value_id);
2125 init_vn_nary_op_from_pieces (vno1, length, code, type, op0, op1, op2, op3);
2126 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2129 /* Insert OP into the current hash table with a value number of
2130 RESULT. Return the vn_nary_op_t structure we created and put in
2131 the hashtable. */
2133 vn_nary_op_t
2134 vn_nary_op_insert (tree op, tree result)
2136 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2137 vn_nary_op_t vno1;
2139 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2140 init_vn_nary_op_from_op (vno1, op);
2141 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2144 /* Insert the rhs of STMT into the current hash table with a value number of
2145 RESULT. */
2147 vn_nary_op_t
2148 vn_nary_op_insert_stmt (gimple stmt, tree result)
2150 unsigned length = gimple_num_ops (stmt) - 1;
2151 vn_nary_op_t vno1;
2153 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2154 init_vn_nary_op_from_stmt (vno1, stmt);
2155 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2158 /* Compute a hashcode for PHI operation VP1 and return it. */
2160 static inline hashval_t
2161 vn_phi_compute_hash (vn_phi_t vp1)
2163 hashval_t result;
2164 int i;
2165 tree phi1op;
2166 tree type;
2168 result = vp1->block->index;
2170 /* If all PHI arguments are constants we need to distinguish
2171 the PHI node via its type. */
2172 type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
2173 result += (INTEGRAL_TYPE_P (type)
2174 + (INTEGRAL_TYPE_P (type)
2175 ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
2177 FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
2179 if (phi1op == VN_TOP)
2180 continue;
2181 result = iterative_hash_expr (phi1op, result);
2184 return result;
2187 /* Return the computed hashcode for phi operation P1. */
2189 static hashval_t
2190 vn_phi_hash (const void *p1)
2192 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
2193 return vp1->hashcode;
2196 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2198 static int
2199 vn_phi_eq (const void *p1, const void *p2)
2201 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
2202 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
2204 if (vp1->hashcode != vp2->hashcode)
2205 return false;
2207 if (vp1->block == vp2->block)
2209 int i;
2210 tree phi1op;
2212 /* If the PHI nodes do not have compatible types
2213 they are not the same. */
2214 if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
2215 TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
2216 return false;
2218 /* Any phi in the same block will have it's arguments in the
2219 same edge order, because of how we store phi nodes. */
2220 FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
2222 tree phi2op = VEC_index (tree, vp2->phiargs, i);
2223 if (phi1op == VN_TOP || phi2op == VN_TOP)
2224 continue;
2225 if (!expressions_equal_p (phi1op, phi2op))
2226 return false;
2228 return true;
2230 return false;
2233 static VEC(tree, heap) *shared_lookup_phiargs;
2235 /* Lookup PHI in the current hash table, and return the resulting
2236 value number if it exists in the hash table. Return NULL_TREE if
2237 it does not exist in the hash table. */
2239 static tree
2240 vn_phi_lookup (gimple phi)
2242 void **slot;
2243 struct vn_phi_s vp1;
2244 unsigned i;
2246 VEC_truncate (tree, shared_lookup_phiargs, 0);
2248 /* Canonicalize the SSA_NAME's to their value number. */
2249 for (i = 0; i < gimple_phi_num_args (phi); i++)
2251 tree def = PHI_ARG_DEF (phi, i);
2252 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2253 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
2255 vp1.phiargs = shared_lookup_phiargs;
2256 vp1.block = gimple_bb (phi);
2257 vp1.hashcode = vn_phi_compute_hash (&vp1);
2258 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
2259 NO_INSERT);
2260 if (!slot && current_info == optimistic_info)
2261 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
2262 NO_INSERT);
2263 if (!slot)
2264 return NULL_TREE;
2265 return ((vn_phi_t)*slot)->result;
2268 /* Insert PHI into the current hash table with a value number of
2269 RESULT. */
2271 static vn_phi_t
2272 vn_phi_insert (gimple phi, tree result)
2274 void **slot;
2275 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
2276 unsigned i;
2277 VEC (tree, heap) *args = NULL;
2279 /* Canonicalize the SSA_NAME's to their value number. */
2280 for (i = 0; i < gimple_phi_num_args (phi); i++)
2282 tree def = PHI_ARG_DEF (phi, i);
2283 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2284 VEC_safe_push (tree, heap, args, def);
2286 vp1->value_id = VN_INFO (result)->value_id;
2287 vp1->phiargs = args;
2288 vp1->block = gimple_bb (phi);
2289 vp1->result = result;
2290 vp1->hashcode = vn_phi_compute_hash (vp1);
2292 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
2293 INSERT);
2295 /* Because we iterate over phi operations more than once, it's
2296 possible the slot might already exist here, hence no assert.*/
2297 *slot = vp1;
2298 return vp1;
2302 /* Print set of components in strongly connected component SCC to OUT. */
2304 static void
2305 print_scc (FILE *out, VEC (tree, heap) *scc)
2307 tree var;
2308 unsigned int i;
2310 fprintf (out, "SCC consists of: ");
2311 FOR_EACH_VEC_ELT (tree, scc, i, var)
2313 print_generic_expr (out, var, 0);
2314 fprintf (out, " ");
2316 fprintf (out, "\n");
2319 /* Set the value number of FROM to TO, return true if it has changed
2320 as a result. */
2322 static inline bool
2323 set_ssa_val_to (tree from, tree to)
2325 tree currval = SSA_VAL (from);
2327 if (from != to)
2329 if (currval == from)
2331 if (dump_file && (dump_flags & TDF_DETAILS))
2333 fprintf (dump_file, "Not changing value number of ");
2334 print_generic_expr (dump_file, from, 0);
2335 fprintf (dump_file, " from VARYING to ");
2336 print_generic_expr (dump_file, to, 0);
2337 fprintf (dump_file, "\n");
2339 return false;
2341 else if (TREE_CODE (to) == SSA_NAME
2342 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2343 to = from;
2346 /* The only thing we allow as value numbers are VN_TOP, ssa_names
2347 and invariants. So assert that here. */
2348 gcc_assert (to != NULL_TREE
2349 && (to == VN_TOP
2350 || TREE_CODE (to) == SSA_NAME
2351 || is_gimple_min_invariant (to)));
2353 if (dump_file && (dump_flags & TDF_DETAILS))
2355 fprintf (dump_file, "Setting value number of ");
2356 print_generic_expr (dump_file, from, 0);
2357 fprintf (dump_file, " to ");
2358 print_generic_expr (dump_file, to, 0);
2361 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
2363 VN_INFO (from)->valnum = to;
2364 if (dump_file && (dump_flags & TDF_DETAILS))
2365 fprintf (dump_file, " (changed)\n");
2366 return true;
2368 if (dump_file && (dump_flags & TDF_DETAILS))
2369 fprintf (dump_file, "\n");
2370 return false;
2373 /* Set all definitions in STMT to value number to themselves.
2374 Return true if a value number changed. */
2376 static bool
2377 defs_to_varying (gimple stmt)
2379 bool changed = false;
2380 ssa_op_iter iter;
2381 def_operand_p defp;
2383 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2385 tree def = DEF_FROM_PTR (defp);
2387 VN_INFO (def)->use_processed = true;
2388 changed |= set_ssa_val_to (def, def);
2390 return changed;
2393 static bool expr_has_constants (tree expr);
2394 static tree valueize_expr (tree expr);
2396 /* Visit a copy between LHS and RHS, return true if the value number
2397 changed. */
2399 static bool
2400 visit_copy (tree lhs, tree rhs)
2402 /* Follow chains of copies to their destination. */
2403 while (TREE_CODE (rhs) == SSA_NAME
2404 && SSA_VAL (rhs) != rhs)
2405 rhs = SSA_VAL (rhs);
2407 /* The copy may have a more interesting constant filled expression
2408 (we don't, since we know our RHS is just an SSA name). */
2409 if (TREE_CODE (rhs) == SSA_NAME)
2411 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2412 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2415 return set_ssa_val_to (lhs, rhs);
2418 /* Visit a nary operator RHS, value number it, and return true if the
2419 value number of LHS has changed as a result. */
2421 static bool
2422 visit_nary_op (tree lhs, gimple stmt)
2424 bool changed = false;
2425 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2427 if (result)
2428 changed = set_ssa_val_to (lhs, result);
2429 else
2431 changed = set_ssa_val_to (lhs, lhs);
2432 vn_nary_op_insert_stmt (stmt, lhs);
2435 return changed;
2438 /* Visit a call STMT storing into LHS. Return true if the value number
2439 of the LHS has changed as a result. */
2441 static bool
2442 visit_reference_op_call (tree lhs, gimple stmt)
2444 bool changed = false;
2445 struct vn_reference_s vr1;
2446 tree result;
2447 tree vuse = gimple_vuse (stmt);
2449 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2450 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
2451 vr1.type = gimple_expr_type (stmt);
2452 vr1.set = 0;
2453 vr1.hashcode = vn_reference_compute_hash (&vr1);
2454 result = vn_reference_lookup_1 (&vr1, NULL);
2455 if (result)
2457 changed = set_ssa_val_to (lhs, result);
2458 if (TREE_CODE (result) == SSA_NAME
2459 && VN_INFO (result)->has_constants)
2460 VN_INFO (lhs)->has_constants = true;
2462 else
2464 void **slot;
2465 vn_reference_t vr2;
2466 changed = set_ssa_val_to (lhs, lhs);
2467 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
2468 vr2->vuse = vr1.vuse;
2469 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
2470 vr2->type = vr1.type;
2471 vr2->set = vr1.set;
2472 vr2->hashcode = vr1.hashcode;
2473 vr2->result = lhs;
2474 slot = htab_find_slot_with_hash (current_info->references,
2475 vr2, vr2->hashcode, INSERT);
2476 if (*slot)
2477 free_reference (*slot);
2478 *slot = vr2;
2481 return changed;
2484 /* Visit a load from a reference operator RHS, part of STMT, value number it,
2485 and return true if the value number of the LHS has changed as a result. */
2487 static bool
2488 visit_reference_op_load (tree lhs, tree op, gimple stmt)
2490 bool changed = false;
2491 tree last_vuse;
2492 tree result;
2494 last_vuse = gimple_vuse (stmt);
2495 last_vuse_ptr = &last_vuse;
2496 result = vn_reference_lookup (op, gimple_vuse (stmt),
2497 default_vn_walk_kind, NULL);
2498 last_vuse_ptr = NULL;
2500 /* If we have a VCE, try looking up its operand as it might be stored in
2501 a different type. */
2502 if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
2503 result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
2504 default_vn_walk_kind, NULL);
2506 /* We handle type-punning through unions by value-numbering based
2507 on offset and size of the access. Be prepared to handle a
2508 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
2509 if (result
2510 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
2512 /* We will be setting the value number of lhs to the value number
2513 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2514 So first simplify and lookup this expression to see if it
2515 is already available. */
2516 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
2517 if ((CONVERT_EXPR_P (val)
2518 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
2519 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
2521 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
2522 if ((CONVERT_EXPR_P (tem)
2523 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
2524 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
2525 TREE_TYPE (val), tem)))
2526 val = tem;
2528 result = val;
2529 if (!is_gimple_min_invariant (val)
2530 && TREE_CODE (val) != SSA_NAME)
2531 result = vn_nary_op_lookup (val, NULL);
2532 /* If the expression is not yet available, value-number lhs to
2533 a new SSA_NAME we create. */
2534 if (!result)
2536 result = make_ssa_name (SSA_NAME_VAR (lhs), gimple_build_nop ());
2537 /* Initialize value-number information properly. */
2538 VN_INFO_GET (result)->valnum = result;
2539 VN_INFO (result)->value_id = get_next_value_id ();
2540 VN_INFO (result)->expr = val;
2541 VN_INFO (result)->has_constants = expr_has_constants (val);
2542 VN_INFO (result)->needs_insertion = true;
2543 /* As all "inserted" statements are singleton SCCs, insert
2544 to the valid table. This is strictly needed to
2545 avoid re-generating new value SSA_NAMEs for the same
2546 expression during SCC iteration over and over (the
2547 optimistic table gets cleared after each iteration).
2548 We do not need to insert into the optimistic table, as
2549 lookups there will fall back to the valid table. */
2550 if (current_info == optimistic_info)
2552 current_info = valid_info;
2553 vn_nary_op_insert (val, result);
2554 current_info = optimistic_info;
2556 else
2557 vn_nary_op_insert (val, result);
2558 if (dump_file && (dump_flags & TDF_DETAILS))
2560 fprintf (dump_file, "Inserting name ");
2561 print_generic_expr (dump_file, result, 0);
2562 fprintf (dump_file, " for expression ");
2563 print_generic_expr (dump_file, val, 0);
2564 fprintf (dump_file, "\n");
2569 if (result)
2571 changed = set_ssa_val_to (lhs, result);
2572 if (TREE_CODE (result) == SSA_NAME
2573 && VN_INFO (result)->has_constants)
2575 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2576 VN_INFO (lhs)->has_constants = true;
2579 else
2581 changed = set_ssa_val_to (lhs, lhs);
2582 vn_reference_insert (op, lhs, last_vuse);
2585 return changed;
2589 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2590 and return true if the value number of the LHS has changed as a result. */
2592 static bool
2593 visit_reference_op_store (tree lhs, tree op, gimple stmt)
2595 bool changed = false;
2596 tree result;
2597 bool resultsame = false;
2599 /* First we want to lookup using the *vuses* from the store and see
2600 if there the last store to this location with the same address
2601 had the same value.
2603 The vuses represent the memory state before the store. If the
2604 memory state, address, and value of the store is the same as the
2605 last store to this location, then this store will produce the
2606 same memory state as that store.
2608 In this case the vdef versions for this store are value numbered to those
2609 vuse versions, since they represent the same memory state after
2610 this store.
2612 Otherwise, the vdefs for the store are used when inserting into
2613 the table, since the store generates a new memory state. */
2615 result = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_NOWALK, NULL);
2617 if (result)
2619 if (TREE_CODE (result) == SSA_NAME)
2620 result = SSA_VAL (result);
2621 if (TREE_CODE (op) == SSA_NAME)
2622 op = SSA_VAL (op);
2623 resultsame = expressions_equal_p (result, op);
2626 if (!result || !resultsame)
2628 tree vdef;
2630 if (dump_file && (dump_flags & TDF_DETAILS))
2632 fprintf (dump_file, "No store match\n");
2633 fprintf (dump_file, "Value numbering store ");
2634 print_generic_expr (dump_file, lhs, 0);
2635 fprintf (dump_file, " to ");
2636 print_generic_expr (dump_file, op, 0);
2637 fprintf (dump_file, "\n");
2639 /* Have to set value numbers before insert, since insert is
2640 going to valueize the references in-place. */
2641 if ((vdef = gimple_vdef (stmt)))
2643 VN_INFO (vdef)->use_processed = true;
2644 changed |= set_ssa_val_to (vdef, vdef);
2647 /* Do not insert structure copies into the tables. */
2648 if (is_gimple_min_invariant (op)
2649 || is_gimple_reg (op))
2650 vn_reference_insert (lhs, op, vdef);
2652 else
2654 /* We had a match, so value number the vdef to have the value
2655 number of the vuse it came from. */
2656 tree def, use;
2658 if (dump_file && (dump_flags & TDF_DETAILS))
2659 fprintf (dump_file, "Store matched earlier value,"
2660 "value numbering store vdefs to matching vuses.\n");
2662 def = gimple_vdef (stmt);
2663 use = gimple_vuse (stmt);
2665 VN_INFO (def)->use_processed = true;
2666 changed |= set_ssa_val_to (def, SSA_VAL (use));
2669 return changed;
2672 /* Visit and value number PHI, return true if the value number
2673 changed. */
2675 static bool
2676 visit_phi (gimple phi)
2678 bool changed = false;
2679 tree result;
2680 tree sameval = VN_TOP;
2681 bool allsame = true;
2682 unsigned i;
2684 /* TODO: We could check for this in init_sccvn, and replace this
2685 with a gcc_assert. */
2686 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
2687 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2689 /* See if all non-TOP arguments have the same value. TOP is
2690 equivalent to everything, so we can ignore it. */
2691 for (i = 0; i < gimple_phi_num_args (phi); i++)
2693 tree def = PHI_ARG_DEF (phi, i);
2695 if (TREE_CODE (def) == SSA_NAME)
2696 def = SSA_VAL (def);
2697 if (def == VN_TOP)
2698 continue;
2699 if (sameval == VN_TOP)
2701 sameval = def;
2703 else
2705 if (!expressions_equal_p (def, sameval))
2707 allsame = false;
2708 break;
2713 /* If all value numbered to the same value, the phi node has that
2714 value. */
2715 if (allsame)
2717 if (is_gimple_min_invariant (sameval))
2719 VN_INFO (PHI_RESULT (phi))->has_constants = true;
2720 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2722 else
2724 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2725 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2728 if (TREE_CODE (sameval) == SSA_NAME)
2729 return visit_copy (PHI_RESULT (phi), sameval);
2731 return set_ssa_val_to (PHI_RESULT (phi), sameval);
2734 /* Otherwise, see if it is equivalent to a phi node in this block. */
2735 result = vn_phi_lookup (phi);
2736 if (result)
2738 if (TREE_CODE (result) == SSA_NAME)
2739 changed = visit_copy (PHI_RESULT (phi), result);
2740 else
2741 changed = set_ssa_val_to (PHI_RESULT (phi), result);
2743 else
2745 vn_phi_insert (phi, PHI_RESULT (phi));
2746 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2747 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
2748 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2751 return changed;
2754 /* Return true if EXPR contains constants. */
2756 static bool
2757 expr_has_constants (tree expr)
2759 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2761 case tcc_unary:
2762 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
2764 case tcc_binary:
2765 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
2766 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
2767 /* Constants inside reference ops are rarely interesting, but
2768 it can take a lot of looking to find them. */
2769 case tcc_reference:
2770 case tcc_declaration:
2771 return false;
2772 default:
2773 return is_gimple_min_invariant (expr);
2775 return false;
2778 /* Return true if STMT contains constants. */
2780 static bool
2781 stmt_has_constants (gimple stmt)
2783 if (gimple_code (stmt) != GIMPLE_ASSIGN)
2784 return false;
2786 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2788 case GIMPLE_UNARY_RHS:
2789 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2791 case GIMPLE_BINARY_RHS:
2792 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2793 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
2794 case GIMPLE_TERNARY_RHS:
2795 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2796 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt))
2797 || is_gimple_min_invariant (gimple_assign_rhs3 (stmt)));
2798 case GIMPLE_SINGLE_RHS:
2799 /* Constants inside reference ops are rarely interesting, but
2800 it can take a lot of looking to find them. */
2801 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2802 default:
2803 gcc_unreachable ();
2805 return false;
2808 /* Replace SSA_NAMES in expr with their value numbers, and return the
2809 result.
2810 This is performed in place. */
2812 static tree
2813 valueize_expr (tree expr)
2815 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2817 case tcc_unary:
2818 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2819 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2820 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2821 break;
2822 case tcc_binary:
2823 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2824 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2825 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2826 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
2827 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
2828 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
2829 break;
2830 default:
2831 break;
2833 return expr;
2836 /* Simplify the binary expression RHS, and return the result if
2837 simplified. */
2839 static tree
2840 simplify_binary_expression (gimple stmt)
2842 tree result = NULL_TREE;
2843 tree op0 = gimple_assign_rhs1 (stmt);
2844 tree op1 = gimple_assign_rhs2 (stmt);
2846 /* This will not catch every single case we could combine, but will
2847 catch those with constants. The goal here is to simultaneously
2848 combine constants between expressions, but avoid infinite
2849 expansion of expressions during simplification. */
2850 if (TREE_CODE (op0) == SSA_NAME)
2852 if (VN_INFO (op0)->has_constants
2853 || TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)) == tcc_comparison)
2854 op0 = valueize_expr (vn_get_expr_for (op0));
2855 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
2856 op0 = SSA_VAL (op0);
2859 if (TREE_CODE (op1) == SSA_NAME)
2861 if (VN_INFO (op1)->has_constants)
2862 op1 = valueize_expr (vn_get_expr_for (op1));
2863 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
2864 op1 = SSA_VAL (op1);
2867 /* Pointer plus constant can be represented as invariant address.
2868 Do so to allow further propatation, see also tree forwprop. */
2869 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2870 && host_integerp (op1, 1)
2871 && TREE_CODE (op0) == ADDR_EXPR
2872 && is_gimple_min_invariant (op0))
2873 return build_invariant_address (TREE_TYPE (op0),
2874 TREE_OPERAND (op0, 0),
2875 TREE_INT_CST_LOW (op1));
2877 /* Avoid folding if nothing changed. */
2878 if (op0 == gimple_assign_rhs1 (stmt)
2879 && op1 == gimple_assign_rhs2 (stmt))
2880 return NULL_TREE;
2882 fold_defer_overflow_warnings ();
2884 result = fold_binary (gimple_assign_rhs_code (stmt),
2885 gimple_expr_type (stmt), op0, op1);
2886 if (result)
2887 STRIP_USELESS_TYPE_CONVERSION (result);
2889 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
2890 stmt, 0);
2892 /* Make sure result is not a complex expression consisting
2893 of operators of operators (IE (a + b) + (a + c))
2894 Otherwise, we will end up with unbounded expressions if
2895 fold does anything at all. */
2896 if (result && valid_gimple_rhs_p (result))
2897 return result;
2899 return NULL_TREE;
2902 /* Simplify the unary expression RHS, and return the result if
2903 simplified. */
2905 static tree
2906 simplify_unary_expression (gimple stmt)
2908 tree result = NULL_TREE;
2909 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
2911 /* We handle some tcc_reference codes here that are all
2912 GIMPLE_ASSIGN_SINGLE codes. */
2913 if (gimple_assign_rhs_code (stmt) == REALPART_EXPR
2914 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2915 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2916 op0 = TREE_OPERAND (op0, 0);
2918 if (TREE_CODE (op0) != SSA_NAME)
2919 return NULL_TREE;
2921 orig_op0 = op0;
2922 if (VN_INFO (op0)->has_constants)
2923 op0 = valueize_expr (vn_get_expr_for (op0));
2924 else if (gimple_assign_cast_p (stmt)
2925 || gimple_assign_rhs_code (stmt) == REALPART_EXPR
2926 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2927 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2929 /* We want to do tree-combining on conversion-like expressions.
2930 Make sure we feed only SSA_NAMEs or constants to fold though. */
2931 tree tem = valueize_expr (vn_get_expr_for (op0));
2932 if (UNARY_CLASS_P (tem)
2933 || BINARY_CLASS_P (tem)
2934 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
2935 || TREE_CODE (tem) == SSA_NAME
2936 || is_gimple_min_invariant (tem))
2937 op0 = tem;
2940 /* Avoid folding if nothing changed, but remember the expression. */
2941 if (op0 == orig_op0)
2942 return NULL_TREE;
2944 result = fold_unary_ignore_overflow (gimple_assign_rhs_code (stmt),
2945 gimple_expr_type (stmt), op0);
2946 if (result)
2948 STRIP_USELESS_TYPE_CONVERSION (result);
2949 if (valid_gimple_rhs_p (result))
2950 return result;
2953 return NULL_TREE;
2956 /* Valueize NAME if it is an SSA name, otherwise just return it. */
2958 static inline tree
2959 vn_valueize (tree name)
2961 if (TREE_CODE (name) == SSA_NAME)
2963 tree tem = SSA_VAL (name);
2964 return tem == VN_TOP ? name : tem;
2966 return name;
2969 /* Try to simplify RHS using equivalences and constant folding. */
2971 static tree
2972 try_to_simplify (gimple stmt)
2974 tree tem;
2976 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
2977 in this case, there is no point in doing extra work. */
2978 if (gimple_assign_copy_p (stmt)
2979 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2980 return NULL_TREE;
2982 /* First try constant folding based on our current lattice. */
2983 tem = gimple_fold_stmt_to_constant (stmt, vn_valueize);
2984 if (tem)
2985 return tem;
2987 /* If that didn't work try combining multiple statements. */
2988 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2990 case tcc_reference:
2991 /* Fallthrough for some codes that can operate on registers. */
2992 if (!(TREE_CODE (gimple_assign_rhs1 (stmt)) == REALPART_EXPR
2993 || TREE_CODE (gimple_assign_rhs1 (stmt)) == IMAGPART_EXPR
2994 || TREE_CODE (gimple_assign_rhs1 (stmt)) == VIEW_CONVERT_EXPR))
2995 break;
2996 /* We could do a little more with unary ops, if they expand
2997 into binary ops, but it's debatable whether it is worth it. */
2998 case tcc_unary:
2999 return simplify_unary_expression (stmt);
3001 case tcc_comparison:
3002 case tcc_binary:
3003 return simplify_binary_expression (stmt);
3005 default:
3006 break;
3009 return NULL_TREE;
3012 /* Visit and value number USE, return true if the value number
3013 changed. */
3015 static bool
3016 visit_use (tree use)
3018 bool changed = false;
3019 gimple stmt = SSA_NAME_DEF_STMT (use);
3021 VN_INFO (use)->use_processed = true;
3023 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3024 if (dump_file && (dump_flags & TDF_DETAILS)
3025 && !SSA_NAME_IS_DEFAULT_DEF (use))
3027 fprintf (dump_file, "Value numbering ");
3028 print_generic_expr (dump_file, use, 0);
3029 fprintf (dump_file, " stmt = ");
3030 print_gimple_stmt (dump_file, stmt, 0, 0);
3033 /* Handle uninitialized uses. */
3034 if (SSA_NAME_IS_DEFAULT_DEF (use))
3035 changed = set_ssa_val_to (use, use);
3036 else
3038 if (gimple_code (stmt) == GIMPLE_PHI)
3039 changed = visit_phi (stmt);
3040 else if (!gimple_has_lhs (stmt)
3041 || gimple_has_volatile_ops (stmt)
3042 || stmt_could_throw_p (stmt))
3043 changed = defs_to_varying (stmt);
3044 else if (is_gimple_assign (stmt))
3046 tree lhs = gimple_assign_lhs (stmt);
3047 tree simplified;
3049 /* Shortcut for copies. Simplifying copies is pointless,
3050 since we copy the expression and value they represent. */
3051 if (gimple_assign_copy_p (stmt)
3052 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
3053 && TREE_CODE (lhs) == SSA_NAME)
3055 changed = visit_copy (lhs, gimple_assign_rhs1 (stmt));
3056 goto done;
3058 simplified = try_to_simplify (stmt);
3059 if (simplified)
3061 if (dump_file && (dump_flags & TDF_DETAILS))
3063 fprintf (dump_file, "RHS ");
3064 print_gimple_expr (dump_file, stmt, 0, 0);
3065 fprintf (dump_file, " simplified to ");
3066 print_generic_expr (dump_file, simplified, 0);
3067 if (TREE_CODE (lhs) == SSA_NAME)
3068 fprintf (dump_file, " has constants %d\n",
3069 expr_has_constants (simplified));
3070 else
3071 fprintf (dump_file, "\n");
3074 /* Setting value numbers to constants will occasionally
3075 screw up phi congruence because constants are not
3076 uniquely associated with a single ssa name that can be
3077 looked up. */
3078 if (simplified
3079 && is_gimple_min_invariant (simplified)
3080 && TREE_CODE (lhs) == SSA_NAME)
3082 VN_INFO (lhs)->expr = simplified;
3083 VN_INFO (lhs)->has_constants = true;
3084 changed = set_ssa_val_to (lhs, simplified);
3085 goto done;
3087 else if (simplified
3088 && TREE_CODE (simplified) == SSA_NAME
3089 && TREE_CODE (lhs) == SSA_NAME)
3091 changed = visit_copy (lhs, simplified);
3092 goto done;
3094 else if (simplified)
3096 if (TREE_CODE (lhs) == SSA_NAME)
3098 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
3099 /* We have to unshare the expression or else
3100 valuizing may change the IL stream. */
3101 VN_INFO (lhs)->expr = unshare_expr (simplified);
3104 else if (stmt_has_constants (stmt)
3105 && TREE_CODE (lhs) == SSA_NAME)
3106 VN_INFO (lhs)->has_constants = true;
3107 else if (TREE_CODE (lhs) == SSA_NAME)
3109 /* We reset expr and constantness here because we may
3110 have been value numbering optimistically, and
3111 iterating. They may become non-constant in this case,
3112 even if they were optimistically constant. */
3114 VN_INFO (lhs)->has_constants = false;
3115 VN_INFO (lhs)->expr = NULL_TREE;
3118 if ((TREE_CODE (lhs) == SSA_NAME
3119 /* We can substitute SSA_NAMEs that are live over
3120 abnormal edges with their constant value. */
3121 && !(gimple_assign_copy_p (stmt)
3122 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
3123 && !(simplified
3124 && is_gimple_min_invariant (simplified))
3125 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3126 /* Stores or copies from SSA_NAMEs that are live over
3127 abnormal edges are a problem. */
3128 || (gimple_assign_single_p (stmt)
3129 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
3130 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt))))
3131 changed = defs_to_varying (stmt);
3132 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
3134 changed = visit_reference_op_store (lhs, gimple_assign_rhs1 (stmt), stmt);
3136 else if (TREE_CODE (lhs) == SSA_NAME)
3138 if ((gimple_assign_copy_p (stmt)
3139 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
3140 || (simplified
3141 && is_gimple_min_invariant (simplified)))
3143 VN_INFO (lhs)->has_constants = true;
3144 if (simplified)
3145 changed = set_ssa_val_to (lhs, simplified);
3146 else
3147 changed = set_ssa_val_to (lhs, gimple_assign_rhs1 (stmt));
3149 else
3151 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
3153 case GIMPLE_UNARY_RHS:
3154 case GIMPLE_BINARY_RHS:
3155 case GIMPLE_TERNARY_RHS:
3156 changed = visit_nary_op (lhs, stmt);
3157 break;
3158 case GIMPLE_SINGLE_RHS:
3159 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
3161 case tcc_reference:
3162 /* VOP-less references can go through unary case. */
3163 if ((gimple_assign_rhs_code (stmt) == REALPART_EXPR
3164 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
3165 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
3166 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (stmt), 0)) == SSA_NAME)
3168 changed = visit_nary_op (lhs, stmt);
3169 break;
3171 /* Fallthrough. */
3172 case tcc_declaration:
3173 changed = visit_reference_op_load
3174 (lhs, gimple_assign_rhs1 (stmt), stmt);
3175 break;
3176 case tcc_expression:
3177 if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
3179 changed = visit_nary_op (lhs, stmt);
3180 break;
3182 /* Fallthrough. */
3183 default:
3184 changed = defs_to_varying (stmt);
3186 break;
3187 default:
3188 changed = defs_to_varying (stmt);
3189 break;
3193 else
3194 changed = defs_to_varying (stmt);
3196 else if (is_gimple_call (stmt))
3198 tree lhs = gimple_call_lhs (stmt);
3200 /* ??? We could try to simplify calls. */
3202 if (stmt_has_constants (stmt)
3203 && TREE_CODE (lhs) == SSA_NAME)
3204 VN_INFO (lhs)->has_constants = true;
3205 else if (TREE_CODE (lhs) == SSA_NAME)
3207 /* We reset expr and constantness here because we may
3208 have been value numbering optimistically, and
3209 iterating. They may become non-constant in this case,
3210 even if they were optimistically constant. */
3211 VN_INFO (lhs)->has_constants = false;
3212 VN_INFO (lhs)->expr = NULL_TREE;
3215 if (TREE_CODE (lhs) == SSA_NAME
3216 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3217 changed = defs_to_varying (stmt);
3218 /* ??? We should handle stores from calls. */
3219 else if (TREE_CODE (lhs) == SSA_NAME)
3221 if (!gimple_call_internal_p (stmt)
3222 && gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
3223 changed = visit_reference_op_call (lhs, stmt);
3224 else
3225 changed = defs_to_varying (stmt);
3227 else
3228 changed = defs_to_varying (stmt);
3231 done:
3232 return changed;
3235 /* Compare two operands by reverse postorder index */
3237 static int
3238 compare_ops (const void *pa, const void *pb)
3240 const tree opa = *((const tree *)pa);
3241 const tree opb = *((const tree *)pb);
3242 gimple opstmta = SSA_NAME_DEF_STMT (opa);
3243 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
3244 basic_block bba;
3245 basic_block bbb;
3247 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3248 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3249 else if (gimple_nop_p (opstmta))
3250 return -1;
3251 else if (gimple_nop_p (opstmtb))
3252 return 1;
3254 bba = gimple_bb (opstmta);
3255 bbb = gimple_bb (opstmtb);
3257 if (!bba && !bbb)
3258 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3259 else if (!bba)
3260 return -1;
3261 else if (!bbb)
3262 return 1;
3264 if (bba == bbb)
3266 if (gimple_code (opstmta) == GIMPLE_PHI
3267 && gimple_code (opstmtb) == GIMPLE_PHI)
3268 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3269 else if (gimple_code (opstmta) == GIMPLE_PHI)
3270 return -1;
3271 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3272 return 1;
3273 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3274 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3275 else
3276 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3278 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3281 /* Sort an array containing members of a strongly connected component
3282 SCC so that the members are ordered by RPO number.
3283 This means that when the sort is complete, iterating through the
3284 array will give you the members in RPO order. */
3286 static void
3287 sort_scc (VEC (tree, heap) *scc)
3289 VEC_qsort (tree, scc, compare_ops);
3292 /* Insert the no longer used nary ONARY to the hash INFO. */
3294 static void
3295 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3297 size_t size = sizeof_vn_nary_op (onary->length);
3298 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3299 &info->nary_obstack);
3300 memcpy (nary, onary, size);
3301 vn_nary_op_insert_into (nary, info->nary, false);
3304 /* Insert the no longer used phi OPHI to the hash INFO. */
3306 static void
3307 copy_phi (vn_phi_t ophi, vn_tables_t info)
3309 vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
3310 void **slot;
3311 memcpy (phi, ophi, sizeof (*phi));
3312 ophi->phiargs = NULL;
3313 slot = htab_find_slot_with_hash (info->phis, phi, phi->hashcode, INSERT);
3314 gcc_assert (!*slot);
3315 *slot = phi;
3318 /* Insert the no longer used reference OREF to the hash INFO. */
3320 static void
3321 copy_reference (vn_reference_t oref, vn_tables_t info)
3323 vn_reference_t ref;
3324 void **slot;
3325 ref = (vn_reference_t) pool_alloc (info->references_pool);
3326 memcpy (ref, oref, sizeof (*ref));
3327 oref->operands = NULL;
3328 slot = htab_find_slot_with_hash (info->references, ref, ref->hashcode,
3329 INSERT);
3330 if (*slot)
3331 free_reference (*slot);
3332 *slot = ref;
3335 /* Process a strongly connected component in the SSA graph. */
3337 static void
3338 process_scc (VEC (tree, heap) *scc)
3340 tree var;
3341 unsigned int i;
3342 unsigned int iterations = 0;
3343 bool changed = true;
3344 htab_iterator hi;
3345 vn_nary_op_t nary;
3346 vn_phi_t phi;
3347 vn_reference_t ref;
3349 /* If the SCC has a single member, just visit it. */
3350 if (VEC_length (tree, scc) == 1)
3352 tree use = VEC_index (tree, scc, 0);
3353 if (VN_INFO (use)->use_processed)
3354 return;
3355 /* We need to make sure it doesn't form a cycle itself, which can
3356 happen for self-referential PHI nodes. In that case we would
3357 end up inserting an expression with VN_TOP operands into the
3358 valid table which makes us derive bogus equivalences later.
3359 The cheapest way to check this is to assume it for all PHI nodes. */
3360 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3361 /* Fallthru to iteration. */ ;
3362 else
3364 visit_use (use);
3365 return;
3369 /* Iterate over the SCC with the optimistic table until it stops
3370 changing. */
3371 current_info = optimistic_info;
3372 while (changed)
3374 changed = false;
3375 iterations++;
3376 if (dump_file && (dump_flags & TDF_DETAILS))
3377 fprintf (dump_file, "Starting iteration %d\n", iterations);
3378 /* As we are value-numbering optimistically we have to
3379 clear the expression tables and the simplified expressions
3380 in each iteration until we converge. */
3381 htab_empty (optimistic_info->nary);
3382 htab_empty (optimistic_info->phis);
3383 htab_empty (optimistic_info->references);
3384 obstack_free (&optimistic_info->nary_obstack, NULL);
3385 gcc_obstack_init (&optimistic_info->nary_obstack);
3386 empty_alloc_pool (optimistic_info->phis_pool);
3387 empty_alloc_pool (optimistic_info->references_pool);
3388 FOR_EACH_VEC_ELT (tree, scc, i, var)
3389 VN_INFO (var)->expr = NULL_TREE;
3390 FOR_EACH_VEC_ELT (tree, scc, i, var)
3391 changed |= visit_use (var);
3394 statistics_histogram_event (cfun, "SCC iterations", iterations);
3396 /* Finally, copy the contents of the no longer used optimistic
3397 table to the valid table. */
3398 FOR_EACH_HTAB_ELEMENT (optimistic_info->nary, nary, vn_nary_op_t, hi)
3399 copy_nary (nary, valid_info);
3400 FOR_EACH_HTAB_ELEMENT (optimistic_info->phis, phi, vn_phi_t, hi)
3401 copy_phi (phi, valid_info);
3402 FOR_EACH_HTAB_ELEMENT (optimistic_info->references, ref, vn_reference_t, hi)
3403 copy_reference (ref, valid_info);
3405 current_info = valid_info;
3408 DEF_VEC_O(ssa_op_iter);
3409 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
3411 /* Pop the components of the found SCC for NAME off the SCC stack
3412 and process them. Returns true if all went well, false if
3413 we run into resource limits. */
3415 static bool
3416 extract_and_process_scc_for_name (tree name)
3418 VEC (tree, heap) *scc = NULL;
3419 tree x;
3421 /* Found an SCC, pop the components off the SCC stack and
3422 process them. */
3425 x = VEC_pop (tree, sccstack);
3427 VN_INFO (x)->on_sccstack = false;
3428 VEC_safe_push (tree, heap, scc, x);
3429 } while (x != name);
3431 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
3432 if (VEC_length (tree, scc)
3433 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
3435 if (dump_file)
3436 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
3437 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
3438 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
3439 return false;
3442 if (VEC_length (tree, scc) > 1)
3443 sort_scc (scc);
3445 if (dump_file && (dump_flags & TDF_DETAILS))
3446 print_scc (dump_file, scc);
3448 process_scc (scc);
3450 VEC_free (tree, heap, scc);
3452 return true;
3455 /* Depth first search on NAME to discover and process SCC's in the SSA
3456 graph.
3457 Execution of this algorithm relies on the fact that the SCC's are
3458 popped off the stack in topological order.
3459 Returns true if successful, false if we stopped processing SCC's due
3460 to resource constraints. */
3462 static bool
3463 DFS (tree name)
3465 VEC(ssa_op_iter, heap) *itervec = NULL;
3466 VEC(tree, heap) *namevec = NULL;
3467 use_operand_p usep = NULL;
3468 gimple defstmt;
3469 tree use;
3470 ssa_op_iter iter;
3472 start_over:
3473 /* SCC info */
3474 VN_INFO (name)->dfsnum = next_dfs_num++;
3475 VN_INFO (name)->visited = true;
3476 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
3478 VEC_safe_push (tree, heap, sccstack, name);
3479 VN_INFO (name)->on_sccstack = true;
3480 defstmt = SSA_NAME_DEF_STMT (name);
3482 /* Recursively DFS on our operands, looking for SCC's. */
3483 if (!gimple_nop_p (defstmt))
3485 /* Push a new iterator. */
3486 if (gimple_code (defstmt) == GIMPLE_PHI)
3487 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
3488 else
3489 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
3491 else
3492 clear_and_done_ssa_iter (&iter);
3494 while (1)
3496 /* If we are done processing uses of a name, go up the stack
3497 of iterators and process SCCs as we found them. */
3498 if (op_iter_done (&iter))
3500 /* See if we found an SCC. */
3501 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
3502 if (!extract_and_process_scc_for_name (name))
3504 VEC_free (tree, heap, namevec);
3505 VEC_free (ssa_op_iter, heap, itervec);
3506 return false;
3509 /* Check if we are done. */
3510 if (VEC_empty (tree, namevec))
3512 VEC_free (tree, heap, namevec);
3513 VEC_free (ssa_op_iter, heap, itervec);
3514 return true;
3517 /* Restore the last use walker and continue walking there. */
3518 use = name;
3519 name = VEC_pop (tree, namevec);
3520 memcpy (&iter, VEC_last (ssa_op_iter, itervec),
3521 sizeof (ssa_op_iter));
3522 VEC_pop (ssa_op_iter, itervec);
3523 goto continue_walking;
3526 use = USE_FROM_PTR (usep);
3528 /* Since we handle phi nodes, we will sometimes get
3529 invariants in the use expression. */
3530 if (TREE_CODE (use) == SSA_NAME)
3532 if (! (VN_INFO (use)->visited))
3534 /* Recurse by pushing the current use walking state on
3535 the stack and starting over. */
3536 VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
3537 VEC_safe_push(tree, heap, namevec, name);
3538 name = use;
3539 goto start_over;
3541 continue_walking:
3542 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3543 VN_INFO (use)->low);
3545 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3546 && VN_INFO (use)->on_sccstack)
3548 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3549 VN_INFO (name)->low);
3553 usep = op_iter_next_use (&iter);
3557 /* Allocate a value number table. */
3559 static void
3560 allocate_vn_table (vn_tables_t table)
3562 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
3563 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
3564 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
3565 free_reference);
3567 gcc_obstack_init (&table->nary_obstack);
3568 table->phis_pool = create_alloc_pool ("VN phis",
3569 sizeof (struct vn_phi_s),
3570 30);
3571 table->references_pool = create_alloc_pool ("VN references",
3572 sizeof (struct vn_reference_s),
3573 30);
3576 /* Free a value number table. */
3578 static void
3579 free_vn_table (vn_tables_t table)
3581 htab_delete (table->phis);
3582 htab_delete (table->nary);
3583 htab_delete (table->references);
3584 obstack_free (&table->nary_obstack, NULL);
3585 free_alloc_pool (table->phis_pool);
3586 free_alloc_pool (table->references_pool);
3589 static void
3590 init_scc_vn (void)
3592 size_t i;
3593 int j;
3594 int *rpo_numbers_temp;
3596 calculate_dominance_info (CDI_DOMINATORS);
3597 sccstack = NULL;
3598 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
3599 free);
3601 constant_value_ids = BITMAP_ALLOC (NULL);
3603 next_dfs_num = 1;
3604 next_value_id = 1;
3606 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
3607 /* VEC_alloc doesn't actually grow it to the right size, it just
3608 preallocates the space to do so. */
3609 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
3610 gcc_obstack_init (&vn_ssa_aux_obstack);
3612 shared_lookup_phiargs = NULL;
3613 shared_lookup_references = NULL;
3614 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3615 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3616 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
3618 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3619 the i'th block in RPO order is bb. We want to map bb's to RPO
3620 numbers, so we need to rearrange this array. */
3621 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
3622 rpo_numbers[rpo_numbers_temp[j]] = j;
3624 XDELETE (rpo_numbers_temp);
3626 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
3628 /* Create the VN_INFO structures, and initialize value numbers to
3629 TOP. */
3630 for (i = 0; i < num_ssa_names; i++)
3632 tree name = ssa_name (i);
3633 if (name)
3635 VN_INFO_GET (name)->valnum = VN_TOP;
3636 VN_INFO (name)->expr = NULL_TREE;
3637 VN_INFO (name)->value_id = 0;
3641 renumber_gimple_stmt_uids ();
3643 /* Create the valid and optimistic value numbering tables. */
3644 valid_info = XCNEW (struct vn_tables_s);
3645 allocate_vn_table (valid_info);
3646 optimistic_info = XCNEW (struct vn_tables_s);
3647 allocate_vn_table (optimistic_info);
3650 void
3651 free_scc_vn (void)
3653 size_t i;
3655 htab_delete (constant_to_value_id);
3656 BITMAP_FREE (constant_value_ids);
3657 VEC_free (tree, heap, shared_lookup_phiargs);
3658 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
3659 XDELETEVEC (rpo_numbers);
3661 for (i = 0; i < num_ssa_names; i++)
3663 tree name = ssa_name (i);
3664 if (name
3665 && VN_INFO (name)->needs_insertion)
3666 release_ssa_name (name);
3668 obstack_free (&vn_ssa_aux_obstack, NULL);
3669 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
3671 VEC_free (tree, heap, sccstack);
3672 free_vn_table (valid_info);
3673 XDELETE (valid_info);
3674 free_vn_table (optimistic_info);
3675 XDELETE (optimistic_info);
3678 /* Set *ID if we computed something useful in RESULT. */
3680 static void
3681 set_value_id_for_result (tree result, unsigned int *id)
3683 if (result)
3685 if (TREE_CODE (result) == SSA_NAME)
3686 *id = VN_INFO (result)->value_id;
3687 else if (is_gimple_min_invariant (result))
3688 *id = get_or_alloc_constant_value_id (result);
3692 /* Set the value ids in the valid hash tables. */
3694 static void
3695 set_hashtable_value_ids (void)
3697 htab_iterator hi;
3698 vn_nary_op_t vno;
3699 vn_reference_t vr;
3700 vn_phi_t vp;
3702 /* Now set the value ids of the things we had put in the hash
3703 table. */
3705 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
3706 vno, vn_nary_op_t, hi)
3707 set_value_id_for_result (vno->result, &vno->value_id);
3709 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
3710 vp, vn_phi_t, hi)
3711 set_value_id_for_result (vp->result, &vp->value_id);
3713 FOR_EACH_HTAB_ELEMENT (valid_info->references,
3714 vr, vn_reference_t, hi)
3715 set_value_id_for_result (vr->result, &vr->value_id);
3718 /* Do SCCVN. Returns true if it finished, false if we bailed out
3719 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
3720 how we use the alias oracle walking during the VN process. */
3722 bool
3723 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
3725 size_t i;
3726 tree param;
3727 bool changed = true;
3729 default_vn_walk_kind = default_vn_walk_kind_;
3731 init_scc_vn ();
3732 current_info = valid_info;
3734 for (param = DECL_ARGUMENTS (current_function_decl);
3735 param;
3736 param = DECL_CHAIN (param))
3738 if (gimple_default_def (cfun, param) != NULL)
3740 tree def = gimple_default_def (cfun, param);
3741 VN_INFO (def)->valnum = def;
3745 for (i = 1; i < num_ssa_names; ++i)
3747 tree name = ssa_name (i);
3748 if (name
3749 && VN_INFO (name)->visited == false
3750 && !has_zero_uses (name))
3751 if (!DFS (name))
3753 free_scc_vn ();
3754 return false;
3758 /* Initialize the value ids. */
3760 for (i = 1; i < num_ssa_names; ++i)
3762 tree name = ssa_name (i);
3763 vn_ssa_aux_t info;
3764 if (!name)
3765 continue;
3766 info = VN_INFO (name);
3767 if (info->valnum == name
3768 || info->valnum == VN_TOP)
3769 info->value_id = get_next_value_id ();
3770 else if (is_gimple_min_invariant (info->valnum))
3771 info->value_id = get_or_alloc_constant_value_id (info->valnum);
3774 /* Propagate until they stop changing. */
3775 while (changed)
3777 changed = false;
3778 for (i = 1; i < num_ssa_names; ++i)
3780 tree name = ssa_name (i);
3781 vn_ssa_aux_t info;
3782 if (!name)
3783 continue;
3784 info = VN_INFO (name);
3785 if (TREE_CODE (info->valnum) == SSA_NAME
3786 && info->valnum != name
3787 && info->value_id != VN_INFO (info->valnum)->value_id)
3789 changed = true;
3790 info->value_id = VN_INFO (info->valnum)->value_id;
3795 set_hashtable_value_ids ();
3797 if (dump_file && (dump_flags & TDF_DETAILS))
3799 fprintf (dump_file, "Value numbers:\n");
3800 for (i = 0; i < num_ssa_names; i++)
3802 tree name = ssa_name (i);
3803 if (name
3804 && VN_INFO (name)->visited
3805 && SSA_VAL (name) != name)
3807 print_generic_expr (dump_file, name, 0);
3808 fprintf (dump_file, " = ");
3809 print_generic_expr (dump_file, SSA_VAL (name), 0);
3810 fprintf (dump_file, "\n");
3815 return true;
3818 /* Return the maximum value id we have ever seen. */
3820 unsigned int
3821 get_max_value_id (void)
3823 return next_value_id;
3826 /* Return the next unique value id. */
3828 unsigned int
3829 get_next_value_id (void)
3831 return next_value_id++;
3835 /* Compare two expressions E1 and E2 and return true if they are equal. */
3837 bool
3838 expressions_equal_p (tree e1, tree e2)
3840 /* The obvious case. */
3841 if (e1 == e2)
3842 return true;
3844 /* If only one of them is null, they cannot be equal. */
3845 if (!e1 || !e2)
3846 return false;
3848 /* Now perform the actual comparison. */
3849 if (TREE_CODE (e1) == TREE_CODE (e2)
3850 && operand_equal_p (e1, e2, OEP_PURE_SAME))
3851 return true;
3853 return false;
3857 /* Return true if the nary operation NARY may trap. This is a copy
3858 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
3860 bool
3861 vn_nary_may_trap (vn_nary_op_t nary)
3863 tree type;
3864 tree rhs2 = NULL_TREE;
3865 bool honor_nans = false;
3866 bool honor_snans = false;
3867 bool fp_operation = false;
3868 bool honor_trapv = false;
3869 bool handled, ret;
3870 unsigned i;
3872 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
3873 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
3874 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
3876 type = nary->type;
3877 fp_operation = FLOAT_TYPE_P (type);
3878 if (fp_operation)
3880 honor_nans = flag_trapping_math && !flag_finite_math_only;
3881 honor_snans = flag_signaling_nans != 0;
3883 else if (INTEGRAL_TYPE_P (type)
3884 && TYPE_OVERFLOW_TRAPS (type))
3885 honor_trapv = true;
3887 if (nary->length >= 2)
3888 rhs2 = nary->op[1];
3889 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
3890 honor_trapv,
3891 honor_nans, honor_snans, rhs2,
3892 &handled);
3893 if (handled
3894 && ret)
3895 return true;
3897 for (i = 0; i < nary->length; ++i)
3898 if (tree_could_trap_p (nary->op[i]))
3899 return true;
3901 return false;