PR c++/35315
[official-gcc.git] / gcc / tree-ssa-sccvn.c
bloba346243020956d8895aa8b899572fd77811742b2
1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "basic-block.h"
28 #include "tree-pretty-print.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "gimple.h"
33 #include "tree-dump.h"
34 #include "timevar.h"
35 #include "fibheap.h"
36 #include "hashtab.h"
37 #include "tree-iterator.h"
38 #include "alloc-pool.h"
39 #include "tree-pass.h"
40 #include "flags.h"
41 #include "bitmap.h"
42 #include "langhooks.h"
43 #include "cfgloop.h"
44 #include "params.h"
45 #include "tree-ssa-propagate.h"
46 #include "tree-ssa-sccvn.h"
48 /* This algorithm is based on the SCC algorithm presented by Keith
49 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
50 (http://citeseer.ist.psu.edu/41805.html). In
51 straight line code, it is equivalent to a regular hash based value
52 numbering that is performed in reverse postorder.
54 For code with cycles, there are two alternatives, both of which
55 require keeping the hashtables separate from the actual list of
56 value numbers for SSA names.
58 1. Iterate value numbering in an RPO walk of the blocks, removing
59 all the entries from the hashtable after each iteration (but
60 keeping the SSA name->value number mapping between iterations).
61 Iterate until it does not change.
63 2. Perform value numbering as part of an SCC walk on the SSA graph,
64 iterating only the cycles in the SSA graph until they do not change
65 (using a separate, optimistic hashtable for value numbering the SCC
66 operands).
68 The second is not just faster in practice (because most SSA graph
69 cycles do not involve all the variables in the graph), it also has
70 some nice properties.
72 One of these nice properties is that when we pop an SCC off the
73 stack, we are guaranteed to have processed all the operands coming from
74 *outside of that SCC*, so we do not need to do anything special to
75 ensure they have value numbers.
77 Another nice property is that the SCC walk is done as part of a DFS
78 of the SSA graph, which makes it easy to perform combining and
79 simplifying operations at the same time.
81 The code below is deliberately written in a way that makes it easy
82 to separate the SCC walk from the other work it does.
84 In order to propagate constants through the code, we track which
85 expressions contain constants, and use those while folding. In
86 theory, we could also track expressions whose value numbers are
87 replaced, in case we end up folding based on expression
88 identities.
90 In order to value number memory, we assign value numbers to vuses.
91 This enables us to note that, for example, stores to the same
92 address of the same value from the same starting memory states are
93 equivalent.
94 TODO:
96 1. We can iterate only the changing portions of the SCC's, but
97 I have not seen an SCC big enough for this to be a win.
98 2. If you differentiate between phi nodes for loops and phi nodes
99 for if-then-else, you can properly consider phi nodes in different
100 blocks for equivalence.
101 3. We could value number vuses in more cases, particularly, whole
102 structure copies.
105 /* The set of hashtables and alloc_pool's for their items. */
107 typedef struct vn_tables_s
109 htab_t nary;
110 htab_t phis;
111 htab_t references;
112 struct obstack nary_obstack;
113 alloc_pool phis_pool;
114 alloc_pool references_pool;
115 } *vn_tables_t;
117 static htab_t constant_to_value_id;
118 static bitmap constant_value_ids;
121 /* Valid hashtables storing information we have proven to be
122 correct. */
124 static vn_tables_t valid_info;
126 /* Optimistic hashtables storing information we are making assumptions about
127 during iterations. */
129 static vn_tables_t optimistic_info;
131 /* Pointer to the set of hashtables that is currently being used.
132 Should always point to either the optimistic_info, or the
133 valid_info. */
135 static vn_tables_t current_info;
138 /* Reverse post order index for each basic block. */
140 static int *rpo_numbers;
142 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
144 /* This represents the top of the VN lattice, which is the universal
145 value. */
147 tree VN_TOP;
149 /* Unique counter for our value ids. */
151 static unsigned int next_value_id;
153 /* Next DFS number and the stack for strongly connected component
154 detection. */
156 static unsigned int next_dfs_num;
157 static VEC (tree, heap) *sccstack;
160 DEF_VEC_P(vn_ssa_aux_t);
161 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
163 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
164 are allocated on an obstack for locality reasons, and to free them
165 without looping over the VEC. */
167 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
168 static struct obstack vn_ssa_aux_obstack;
170 /* Return the value numbering information for a given SSA name. */
172 vn_ssa_aux_t
173 VN_INFO (tree name)
175 vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
176 SSA_NAME_VERSION (name));
177 gcc_checking_assert (res);
178 return res;
181 /* Set the value numbering info for a given SSA name to a given
182 value. */
184 static inline void
185 VN_INFO_SET (tree name, vn_ssa_aux_t value)
187 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
188 SSA_NAME_VERSION (name), value);
191 /* Initialize the value numbering info for a given SSA name.
192 This should be called just once for every SSA name. */
194 vn_ssa_aux_t
195 VN_INFO_GET (tree name)
197 vn_ssa_aux_t newinfo;
199 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
200 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
201 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
202 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
203 SSA_NAME_VERSION (name) + 1);
204 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
205 SSA_NAME_VERSION (name), newinfo);
206 return newinfo;
210 /* Get the representative expression for the SSA_NAME NAME. Returns
211 the representative SSA_NAME if there is no expression associated with it. */
213 tree
214 vn_get_expr_for (tree name)
216 vn_ssa_aux_t vn = VN_INFO (name);
217 gimple def_stmt;
218 tree expr = NULL_TREE;
220 if (vn->valnum == VN_TOP)
221 return name;
223 /* If the value-number is a constant it is the representative
224 expression. */
225 if (TREE_CODE (vn->valnum) != SSA_NAME)
226 return vn->valnum;
228 /* Get to the information of the value of this SSA_NAME. */
229 vn = VN_INFO (vn->valnum);
231 /* If the value-number is a constant it is the representative
232 expression. */
233 if (TREE_CODE (vn->valnum) != SSA_NAME)
234 return vn->valnum;
236 /* Else if we have an expression, return it. */
237 if (vn->expr != NULL_TREE)
238 return vn->expr;
240 /* Otherwise use the defining statement to build the expression. */
241 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
243 /* If the value number is a default-definition or a PHI result
244 use it directly. */
245 if (gimple_nop_p (def_stmt)
246 || gimple_code (def_stmt) == GIMPLE_PHI)
247 return vn->valnum;
249 if (!is_gimple_assign (def_stmt))
250 return vn->valnum;
252 /* FIXME tuples. This is incomplete and likely will miss some
253 simplifications. */
254 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)))
256 case tcc_reference:
257 if ((gimple_assign_rhs_code (def_stmt) == VIEW_CONVERT_EXPR
258 || gimple_assign_rhs_code (def_stmt) == REALPART_EXPR
259 || gimple_assign_rhs_code (def_stmt) == IMAGPART_EXPR)
260 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
261 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
262 gimple_expr_type (def_stmt),
263 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
264 break;
266 case tcc_unary:
267 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
268 gimple_expr_type (def_stmt),
269 gimple_assign_rhs1 (def_stmt));
270 break;
272 case tcc_binary:
273 expr = fold_build2 (gimple_assign_rhs_code (def_stmt),
274 gimple_expr_type (def_stmt),
275 gimple_assign_rhs1 (def_stmt),
276 gimple_assign_rhs2 (def_stmt));
277 break;
279 default:;
281 if (expr == NULL_TREE)
282 return vn->valnum;
284 /* Cache the expression. */
285 vn->expr = expr;
287 return expr;
291 /* Free a phi operation structure VP. */
293 static void
294 free_phi (void *vp)
296 vn_phi_t phi = (vn_phi_t) vp;
297 VEC_free (tree, heap, phi->phiargs);
300 /* Free a reference operation structure VP. */
302 static void
303 free_reference (void *vp)
305 vn_reference_t vr = (vn_reference_t) vp;
306 VEC_free (vn_reference_op_s, heap, vr->operands);
309 /* Hash table equality function for vn_constant_t. */
311 static int
312 vn_constant_eq (const void *p1, const void *p2)
314 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
315 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
317 if (vc1->hashcode != vc2->hashcode)
318 return false;
320 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
323 /* Hash table hash function for vn_constant_t. */
325 static hashval_t
326 vn_constant_hash (const void *p1)
328 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
329 return vc1->hashcode;
332 /* Lookup a value id for CONSTANT and return it. If it does not
333 exist returns 0. */
335 unsigned int
336 get_constant_value_id (tree constant)
338 void **slot;
339 struct vn_constant_s vc;
341 vc.hashcode = vn_hash_constant_with_type (constant);
342 vc.constant = constant;
343 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
344 vc.hashcode, NO_INSERT);
345 if (slot)
346 return ((vn_constant_t)*slot)->value_id;
347 return 0;
350 /* Lookup a value id for CONSTANT, and if it does not exist, create a
351 new one and return it. If it does exist, return it. */
353 unsigned int
354 get_or_alloc_constant_value_id (tree constant)
356 void **slot;
357 struct vn_constant_s vc;
358 vn_constant_t vcp;
360 vc.hashcode = vn_hash_constant_with_type (constant);
361 vc.constant = constant;
362 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
363 vc.hashcode, INSERT);
364 if (*slot)
365 return ((vn_constant_t)*slot)->value_id;
367 vcp = XNEW (struct vn_constant_s);
368 vcp->hashcode = vc.hashcode;
369 vcp->constant = constant;
370 vcp->value_id = get_next_value_id ();
371 *slot = (void *) vcp;
372 bitmap_set_bit (constant_value_ids, vcp->value_id);
373 return vcp->value_id;
376 /* Return true if V is a value id for a constant. */
378 bool
379 value_id_constant_p (unsigned int v)
381 return bitmap_bit_p (constant_value_ids, v);
384 /* Compare two reference operands P1 and P2 for equality. Return true if
385 they are equal, and false otherwise. */
387 static int
388 vn_reference_op_eq (const void *p1, const void *p2)
390 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
391 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
393 return vro1->opcode == vro2->opcode
394 && types_compatible_p (vro1->type, vro2->type)
395 && expressions_equal_p (vro1->op0, vro2->op0)
396 && expressions_equal_p (vro1->op1, vro2->op1)
397 && expressions_equal_p (vro1->op2, vro2->op2);
400 /* Compute the hash for a reference operand VRO1. */
402 static hashval_t
403 vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result)
405 result = iterative_hash_hashval_t (vro1->opcode, result);
406 if (vro1->op0)
407 result = iterative_hash_expr (vro1->op0, result);
408 if (vro1->op1)
409 result = iterative_hash_expr (vro1->op1, result);
410 if (vro1->op2)
411 result = iterative_hash_expr (vro1->op2, result);
412 return result;
415 /* Return the hashcode for a given reference operation P1. */
417 static hashval_t
418 vn_reference_hash (const void *p1)
420 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
421 return vr1->hashcode;
424 /* Compute a hash for the reference operation VR1 and return it. */
426 hashval_t
427 vn_reference_compute_hash (const vn_reference_t vr1)
429 hashval_t result = 0;
430 int i;
431 vn_reference_op_t vro;
432 HOST_WIDE_INT off = -1;
433 bool deref = false;
435 FOR_EACH_VEC_ELT (vn_reference_op_s, vr1->operands, i, vro)
437 if (vro->opcode == MEM_REF)
438 deref = true;
439 else if (vro->opcode != ADDR_EXPR)
440 deref = false;
441 if (vro->off != -1)
443 if (off == -1)
444 off = 0;
445 off += vro->off;
447 else
449 if (off != -1
450 && off != 0)
451 result = iterative_hash_hashval_t (off, result);
452 off = -1;
453 if (deref
454 && vro->opcode == ADDR_EXPR)
456 if (vro->op0)
458 tree op = TREE_OPERAND (vro->op0, 0);
459 result = iterative_hash_hashval_t (TREE_CODE (op), result);
460 result = iterative_hash_expr (op, result);
463 else
464 result = vn_reference_op_compute_hash (vro, result);
467 if (vr1->vuse)
468 result += SSA_NAME_VERSION (vr1->vuse);
470 return result;
473 /* Return true if reference operations P1 and P2 are equivalent. This
474 means they have the same set of operands and vuses. */
477 vn_reference_eq (const void *p1, const void *p2)
479 unsigned i, j;
481 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
482 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
483 if (vr1->hashcode != vr2->hashcode)
484 return false;
486 /* Early out if this is not a hash collision. */
487 if (vr1->hashcode != vr2->hashcode)
488 return false;
490 /* The VOP needs to be the same. */
491 if (vr1->vuse != vr2->vuse)
492 return false;
494 /* If the operands are the same we are done. */
495 if (vr1->operands == vr2->operands)
496 return true;
498 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
499 return false;
501 if (INTEGRAL_TYPE_P (vr1->type)
502 && INTEGRAL_TYPE_P (vr2->type))
504 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
505 return false;
507 else if (INTEGRAL_TYPE_P (vr1->type)
508 && (TYPE_PRECISION (vr1->type)
509 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
510 return false;
511 else if (INTEGRAL_TYPE_P (vr2->type)
512 && (TYPE_PRECISION (vr2->type)
513 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
514 return false;
516 i = 0;
517 j = 0;
520 HOST_WIDE_INT off1 = 0, off2 = 0;
521 vn_reference_op_t vro1, vro2;
522 vn_reference_op_s tem1, tem2;
523 bool deref1 = false, deref2 = false;
524 for (; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro1); i++)
526 if (vro1->opcode == MEM_REF)
527 deref1 = true;
528 if (vro1->off == -1)
529 break;
530 off1 += vro1->off;
532 for (; VEC_iterate (vn_reference_op_s, vr2->operands, j, vro2); j++)
534 if (vro2->opcode == MEM_REF)
535 deref2 = true;
536 if (vro2->off == -1)
537 break;
538 off2 += vro2->off;
540 if (off1 != off2)
541 return false;
542 if (deref1 && vro1->opcode == ADDR_EXPR)
544 memset (&tem1, 0, sizeof (tem1));
545 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
546 tem1.type = TREE_TYPE (tem1.op0);
547 tem1.opcode = TREE_CODE (tem1.op0);
548 vro1 = &tem1;
550 if (deref2 && vro2->opcode == ADDR_EXPR)
552 memset (&tem2, 0, sizeof (tem2));
553 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
554 tem2.type = TREE_TYPE (tem2.op0);
555 tem2.opcode = TREE_CODE (tem2.op0);
556 vro2 = &tem2;
558 if (!vn_reference_op_eq (vro1, vro2))
559 return false;
560 ++j;
561 ++i;
563 while (VEC_length (vn_reference_op_s, vr1->operands) != i
564 || VEC_length (vn_reference_op_s, vr2->operands) != j);
566 return true;
569 /* Copy the operations present in load/store REF into RESULT, a vector of
570 vn_reference_op_s's. */
572 void
573 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
575 if (TREE_CODE (ref) == TARGET_MEM_REF)
577 vn_reference_op_s temp;
579 memset (&temp, 0, sizeof (temp));
580 /* We do not care for spurious type qualifications. */
581 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
582 temp.opcode = TREE_CODE (ref);
583 temp.op0 = TMR_INDEX (ref);
584 temp.op1 = TMR_STEP (ref);
585 temp.op2 = TMR_OFFSET (ref);
586 temp.off = -1;
587 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
589 memset (&temp, 0, sizeof (temp));
590 temp.type = NULL_TREE;
591 temp.opcode = ERROR_MARK;
592 temp.op0 = TMR_INDEX2 (ref);
593 temp.off = -1;
594 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
596 memset (&temp, 0, sizeof (temp));
597 temp.type = NULL_TREE;
598 temp.opcode = TREE_CODE (TMR_BASE (ref));
599 temp.op0 = TMR_BASE (ref);
600 temp.off = -1;
601 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
602 return;
605 /* For non-calls, store the information that makes up the address. */
607 while (ref)
609 vn_reference_op_s temp;
611 memset (&temp, 0, sizeof (temp));
612 /* We do not care for spurious type qualifications. */
613 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
614 temp.opcode = TREE_CODE (ref);
615 temp.off = -1;
617 switch (temp.opcode)
619 case MEM_REF:
620 /* The base address gets its own vn_reference_op_s structure. */
621 temp.op0 = TREE_OPERAND (ref, 1);
622 if (host_integerp (TREE_OPERAND (ref, 1), 0))
623 temp.off = TREE_INT_CST_LOW (TREE_OPERAND (ref, 1));
624 break;
625 case BIT_FIELD_REF:
626 /* Record bits and position. */
627 temp.op0 = TREE_OPERAND (ref, 1);
628 temp.op1 = TREE_OPERAND (ref, 2);
629 break;
630 case COMPONENT_REF:
631 /* The field decl is enough to unambiguously specify the field,
632 a matching type is not necessary and a mismatching type
633 is always a spurious difference. */
634 temp.type = NULL_TREE;
635 temp.op0 = TREE_OPERAND (ref, 1);
636 temp.op1 = TREE_OPERAND (ref, 2);
638 tree this_offset = component_ref_field_offset (ref);
639 if (this_offset
640 && TREE_CODE (this_offset) == INTEGER_CST)
642 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
643 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
645 double_int off
646 = double_int_add (tree_to_double_int (this_offset),
647 double_int_rshift
648 (tree_to_double_int (bit_offset),
649 BITS_PER_UNIT == 8
650 ? 3 : exact_log2 (BITS_PER_UNIT),
651 HOST_BITS_PER_DOUBLE_INT, true));
652 if (double_int_fits_in_shwi_p (off))
653 temp.off = off.low;
657 break;
658 case ARRAY_RANGE_REF:
659 case ARRAY_REF:
660 /* Record index as operand. */
661 temp.op0 = TREE_OPERAND (ref, 1);
662 /* Always record lower bounds and element size. */
663 temp.op1 = array_ref_low_bound (ref);
664 temp.op2 = array_ref_element_size (ref);
665 if (TREE_CODE (temp.op0) == INTEGER_CST
666 && TREE_CODE (temp.op1) == INTEGER_CST
667 && TREE_CODE (temp.op2) == INTEGER_CST)
669 double_int off = tree_to_double_int (temp.op0);
670 off = double_int_add (off,
671 double_int_neg
672 (tree_to_double_int (temp.op1)));
673 off = double_int_mul (off, tree_to_double_int (temp.op2));
674 if (double_int_fits_in_shwi_p (off))
675 temp.off = off.low;
677 break;
678 case STRING_CST:
679 case INTEGER_CST:
680 case COMPLEX_CST:
681 case VECTOR_CST:
682 case REAL_CST:
683 case CONSTRUCTOR:
684 case VAR_DECL:
685 case PARM_DECL:
686 case CONST_DECL:
687 case RESULT_DECL:
688 case SSA_NAME:
689 temp.op0 = ref;
690 break;
691 case ADDR_EXPR:
692 if (is_gimple_min_invariant (ref))
694 temp.op0 = ref;
695 break;
697 /* Fallthrough. */
698 /* These are only interesting for their operands, their
699 existence, and their type. They will never be the last
700 ref in the chain of references (IE they require an
701 operand), so we don't have to put anything
702 for op* as it will be handled by the iteration */
703 case REALPART_EXPR:
704 case VIEW_CONVERT_EXPR:
705 temp.off = 0;
706 break;
707 case IMAGPART_EXPR:
708 /* This is only interesting for its constant offset. */
709 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
710 break;
711 default:
712 gcc_unreachable ();
714 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
716 if (REFERENCE_CLASS_P (ref)
717 || (TREE_CODE (ref) == ADDR_EXPR
718 && !is_gimple_min_invariant (ref)))
719 ref = TREE_OPERAND (ref, 0);
720 else
721 ref = NULL_TREE;
725 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
726 operands in *OPS, the reference alias set SET and the reference type TYPE.
727 Return true if something useful was produced. */
729 bool
730 ao_ref_init_from_vn_reference (ao_ref *ref,
731 alias_set_type set, tree type,
732 VEC (vn_reference_op_s, heap) *ops)
734 vn_reference_op_t op;
735 unsigned i;
736 tree base = NULL_TREE;
737 tree *op0_p = &base;
738 HOST_WIDE_INT offset = 0;
739 HOST_WIDE_INT max_size;
740 HOST_WIDE_INT size = -1;
741 tree size_tree = NULL_TREE;
742 alias_set_type base_alias_set = -1;
744 /* First get the final access size from just the outermost expression. */
745 op = VEC_index (vn_reference_op_s, ops, 0);
746 if (op->opcode == COMPONENT_REF)
747 size_tree = DECL_SIZE (op->op0);
748 else if (op->opcode == BIT_FIELD_REF)
749 size_tree = op->op0;
750 else
752 enum machine_mode mode = TYPE_MODE (type);
753 if (mode == BLKmode)
754 size_tree = TYPE_SIZE (type);
755 else
756 size = GET_MODE_BITSIZE (mode);
758 if (size_tree != NULL_TREE)
760 if (!host_integerp (size_tree, 1))
761 size = -1;
762 else
763 size = TREE_INT_CST_LOW (size_tree);
766 /* Initially, maxsize is the same as the accessed element size.
767 In the following it will only grow (or become -1). */
768 max_size = size;
770 /* Compute cumulative bit-offset for nested component-refs and array-refs,
771 and find the ultimate containing object. */
772 FOR_EACH_VEC_ELT (vn_reference_op_s, ops, i, op)
774 switch (op->opcode)
776 /* These may be in the reference ops, but we cannot do anything
777 sensible with them here. */
778 case ADDR_EXPR:
779 /* Apart from ADDR_EXPR arguments to MEM_REF. */
780 if (base != NULL_TREE
781 && TREE_CODE (base) == MEM_REF
782 && op->op0
783 && DECL_P (TREE_OPERAND (op->op0, 0)))
785 vn_reference_op_t pop = VEC_index (vn_reference_op_s, ops, i-1);
786 base = TREE_OPERAND (op->op0, 0);
787 if (pop->off == -1)
789 max_size = -1;
790 offset = 0;
792 else
793 offset += pop->off * BITS_PER_UNIT;
794 op0_p = NULL;
795 break;
797 /* Fallthru. */
798 case CALL_EXPR:
799 return false;
801 /* Record the base objects. */
802 case MEM_REF:
803 base_alias_set = get_deref_alias_set (op->op0);
804 *op0_p = build2 (MEM_REF, op->type,
805 NULL_TREE, op->op0);
806 op0_p = &TREE_OPERAND (*op0_p, 0);
807 break;
809 case VAR_DECL:
810 case PARM_DECL:
811 case RESULT_DECL:
812 case SSA_NAME:
813 *op0_p = op->op0;
814 op0_p = NULL;
815 break;
817 /* And now the usual component-reference style ops. */
818 case BIT_FIELD_REF:
819 offset += tree_low_cst (op->op1, 0);
820 break;
822 case COMPONENT_REF:
824 tree field = op->op0;
825 /* We do not have a complete COMPONENT_REF tree here so we
826 cannot use component_ref_field_offset. Do the interesting
827 parts manually. */
829 if (op->op1
830 || !host_integerp (DECL_FIELD_OFFSET (field), 1))
831 max_size = -1;
832 else
834 offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
835 * BITS_PER_UNIT);
836 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
838 break;
841 case ARRAY_RANGE_REF:
842 case ARRAY_REF:
843 /* We recorded the lower bound and the element size. */
844 if (!host_integerp (op->op0, 0)
845 || !host_integerp (op->op1, 0)
846 || !host_integerp (op->op2, 0))
847 max_size = -1;
848 else
850 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
851 hindex -= TREE_INT_CST_LOW (op->op1);
852 hindex *= TREE_INT_CST_LOW (op->op2);
853 hindex *= BITS_PER_UNIT;
854 offset += hindex;
856 break;
858 case REALPART_EXPR:
859 break;
861 case IMAGPART_EXPR:
862 offset += size;
863 break;
865 case VIEW_CONVERT_EXPR:
866 break;
868 case STRING_CST:
869 case INTEGER_CST:
870 case COMPLEX_CST:
871 case VECTOR_CST:
872 case REAL_CST:
873 case CONSTRUCTOR:
874 case CONST_DECL:
875 return false;
877 default:
878 return false;
882 if (base == NULL_TREE)
883 return false;
885 ref->ref = NULL_TREE;
886 ref->base = base;
887 ref->offset = offset;
888 ref->size = size;
889 ref->max_size = max_size;
890 ref->ref_alias_set = set;
891 if (base_alias_set != -1)
892 ref->base_alias_set = base_alias_set;
893 else
894 ref->base_alias_set = get_alias_set (base);
896 return true;
899 /* Copy the operations present in load/store/call REF into RESULT, a vector of
900 vn_reference_op_s's. */
902 void
903 copy_reference_ops_from_call (gimple call,
904 VEC(vn_reference_op_s, heap) **result)
906 vn_reference_op_s temp;
907 unsigned i;
909 /* Copy the type, opcode, function being called and static chain. */
910 memset (&temp, 0, sizeof (temp));
911 temp.type = gimple_call_return_type (call);
912 temp.opcode = CALL_EXPR;
913 temp.op0 = gimple_call_fn (call);
914 temp.op1 = gimple_call_chain (call);
915 temp.off = -1;
916 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
918 /* Copy the call arguments. As they can be references as well,
919 just chain them together. */
920 for (i = 0; i < gimple_call_num_args (call); ++i)
922 tree callarg = gimple_call_arg (call, i);
923 copy_reference_ops_from_ref (callarg, result);
927 /* Create a vector of vn_reference_op_s structures from REF, a
928 REFERENCE_CLASS_P tree. The vector is not shared. */
930 static VEC(vn_reference_op_s, heap) *
931 create_reference_ops_from_ref (tree ref)
933 VEC (vn_reference_op_s, heap) *result = NULL;
935 copy_reference_ops_from_ref (ref, &result);
936 return result;
939 /* Create a vector of vn_reference_op_s structures from CALL, a
940 call statement. The vector is not shared. */
942 static VEC(vn_reference_op_s, heap) *
943 create_reference_ops_from_call (gimple call)
945 VEC (vn_reference_op_s, heap) *result = NULL;
947 copy_reference_ops_from_call (call, &result);
948 return result;
951 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
952 *I_P to point to the last element of the replacement. */
953 void
954 vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
955 unsigned int *i_p)
957 unsigned int i = *i_p;
958 vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
959 vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
960 tree addr_base;
961 HOST_WIDE_INT addr_offset;
963 /* The only thing we have to do is from &OBJ.foo.bar add the offset
964 from .foo.bar to the preceeding MEM_REF offset and replace the
965 address with &OBJ. */
966 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
967 &addr_offset);
968 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
969 if (addr_base != op->op0)
971 double_int off = tree_to_double_int (mem_op->op0);
972 off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
973 off = double_int_add (off, shwi_to_double_int (addr_offset));
974 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
975 op->op0 = build_fold_addr_expr (addr_base);
976 if (host_integerp (mem_op->op0, 0))
977 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
978 else
979 mem_op->off = -1;
983 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
984 *I_P to point to the last element of the replacement. */
985 static void
986 vn_reference_maybe_forwprop_address (VEC (vn_reference_op_s, heap) **ops,
987 unsigned int *i_p)
989 unsigned int i = *i_p;
990 vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
991 vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
992 gimple def_stmt;
993 enum tree_code code;
994 double_int off;
996 def_stmt = SSA_NAME_DEF_STMT (op->op0);
997 if (!is_gimple_assign (def_stmt))
998 return;
1000 code = gimple_assign_rhs_code (def_stmt);
1001 if (code != ADDR_EXPR
1002 && code != POINTER_PLUS_EXPR)
1003 return;
1005 off = tree_to_double_int (mem_op->op0);
1006 off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1008 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1009 from .foo.bar to the preceeding MEM_REF offset and replace the
1010 address with &OBJ. */
1011 if (code == ADDR_EXPR)
1013 tree addr, addr_base;
1014 HOST_WIDE_INT addr_offset;
1016 addr = gimple_assign_rhs1 (def_stmt);
1017 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1018 &addr_offset);
1019 if (!addr_base
1020 || TREE_CODE (addr_base) != MEM_REF)
1021 return;
1023 off = double_int_add (off, shwi_to_double_int (addr_offset));
1024 off = double_int_add (off, mem_ref_offset (addr_base));
1025 op->op0 = TREE_OPERAND (addr_base, 0);
1027 else
1029 tree ptr, ptroff;
1030 ptr = gimple_assign_rhs1 (def_stmt);
1031 ptroff = gimple_assign_rhs2 (def_stmt);
1032 if (TREE_CODE (ptr) != SSA_NAME
1033 || TREE_CODE (ptroff) != INTEGER_CST)
1034 return;
1036 off = double_int_add (off, tree_to_double_int (ptroff));
1037 op->op0 = ptr;
1040 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1041 if (host_integerp (mem_op->op0, 0))
1042 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1043 else
1044 mem_op->off = -1;
1045 if (TREE_CODE (op->op0) == SSA_NAME)
1046 op->op0 = SSA_VAL (op->op0);
1047 if (TREE_CODE (op->op0) != SSA_NAME)
1048 op->opcode = TREE_CODE (op->op0);
1050 /* And recurse. */
1051 if (TREE_CODE (op->op0) == SSA_NAME)
1052 vn_reference_maybe_forwprop_address (ops, i_p);
1053 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1054 vn_reference_fold_indirect (ops, i_p);
1057 /* Optimize the reference REF to a constant if possible or return
1058 NULL_TREE if not. */
1060 tree
1061 fully_constant_vn_reference_p (vn_reference_t ref)
1063 VEC (vn_reference_op_s, heap) *operands = ref->operands;
1064 vn_reference_op_t op;
1066 /* Try to simplify the translated expression if it is
1067 a call to a builtin function with at most two arguments. */
1068 op = VEC_index (vn_reference_op_s, operands, 0);
1069 if (op->opcode == CALL_EXPR
1070 && TREE_CODE (op->op0) == ADDR_EXPR
1071 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1072 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1073 && VEC_length (vn_reference_op_s, operands) >= 2
1074 && VEC_length (vn_reference_op_s, operands) <= 3)
1076 vn_reference_op_t arg0, arg1 = NULL;
1077 bool anyconst = false;
1078 arg0 = VEC_index (vn_reference_op_s, operands, 1);
1079 if (VEC_length (vn_reference_op_s, operands) > 2)
1080 arg1 = VEC_index (vn_reference_op_s, operands, 2);
1081 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1082 || (arg0->opcode == ADDR_EXPR
1083 && is_gimple_min_invariant (arg0->op0)))
1084 anyconst = true;
1085 if (arg1
1086 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1087 || (arg1->opcode == ADDR_EXPR
1088 && is_gimple_min_invariant (arg1->op0))))
1089 anyconst = true;
1090 if (anyconst)
1092 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1093 arg1 ? 2 : 1,
1094 arg0->op0,
1095 arg1 ? arg1->op0 : NULL);
1096 if (folded
1097 && TREE_CODE (folded) == NOP_EXPR)
1098 folded = TREE_OPERAND (folded, 0);
1099 if (folded
1100 && is_gimple_min_invariant (folded))
1101 return folded;
1105 /* Simplify reads from constant strings. */
1106 else if (op->opcode == ARRAY_REF
1107 && TREE_CODE (op->op0) == INTEGER_CST
1108 && integer_zerop (op->op1)
1109 && VEC_length (vn_reference_op_s, operands) == 2)
1111 vn_reference_op_t arg0;
1112 arg0 = VEC_index (vn_reference_op_s, operands, 1);
1113 if (arg0->opcode == STRING_CST
1114 && (TYPE_MODE (op->type)
1115 == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
1116 && GET_MODE_CLASS (TYPE_MODE (op->type)) == MODE_INT
1117 && GET_MODE_SIZE (TYPE_MODE (op->type)) == 1
1118 && compare_tree_int (op->op0, TREE_STRING_LENGTH (arg0->op0)) < 0)
1119 return build_int_cst_type (op->type,
1120 (TREE_STRING_POINTER (arg0->op0)
1121 [TREE_INT_CST_LOW (op->op0)]));
1124 return NULL_TREE;
1127 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1128 structures into their value numbers. This is done in-place, and
1129 the vector passed in is returned. */
1131 static VEC (vn_reference_op_s, heap) *
1132 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
1134 vn_reference_op_t vro;
1135 unsigned int i;
1137 FOR_EACH_VEC_ELT (vn_reference_op_s, orig, i, vro)
1139 if (vro->opcode == SSA_NAME
1140 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1142 vro->op0 = SSA_VAL (vro->op0);
1143 /* If it transforms from an SSA_NAME to a constant, update
1144 the opcode. */
1145 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1146 vro->opcode = TREE_CODE (vro->op0);
1148 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1149 vro->op1 = SSA_VAL (vro->op1);
1150 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1151 vro->op2 = SSA_VAL (vro->op2);
1152 /* If it transforms from an SSA_NAME to an address, fold with
1153 a preceding indirect reference. */
1154 if (i > 0
1155 && vro->op0
1156 && TREE_CODE (vro->op0) == ADDR_EXPR
1157 && VEC_index (vn_reference_op_s,
1158 orig, i - 1)->opcode == MEM_REF)
1159 vn_reference_fold_indirect (&orig, &i);
1160 else if (i > 0
1161 && vro->opcode == SSA_NAME
1162 && VEC_index (vn_reference_op_s,
1163 orig, i - 1)->opcode == MEM_REF)
1164 vn_reference_maybe_forwprop_address (&orig, &i);
1165 /* If it transforms a non-constant ARRAY_REF into a constant
1166 one, adjust the constant offset. */
1167 else if (vro->opcode == ARRAY_REF
1168 && vro->off == -1
1169 && TREE_CODE (vro->op0) == INTEGER_CST
1170 && TREE_CODE (vro->op1) == INTEGER_CST
1171 && TREE_CODE (vro->op2) == INTEGER_CST)
1173 double_int off = tree_to_double_int (vro->op0);
1174 off = double_int_add (off,
1175 double_int_neg
1176 (tree_to_double_int (vro->op1)));
1177 off = double_int_mul (off, tree_to_double_int (vro->op2));
1178 if (double_int_fits_in_shwi_p (off))
1179 vro->off = off.low;
1183 return orig;
1186 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
1188 /* Create a vector of vn_reference_op_s structures from REF, a
1189 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1190 this function. */
1192 static VEC(vn_reference_op_s, heap) *
1193 valueize_shared_reference_ops_from_ref (tree ref)
1195 if (!ref)
1196 return NULL;
1197 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1198 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1199 shared_lookup_references = valueize_refs (shared_lookup_references);
1200 return shared_lookup_references;
1203 /* Create a vector of vn_reference_op_s structures from CALL, a
1204 call statement. The vector is shared among all callers of
1205 this function. */
1207 static VEC(vn_reference_op_s, heap) *
1208 valueize_shared_reference_ops_from_call (gimple call)
1210 if (!call)
1211 return NULL;
1212 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1213 copy_reference_ops_from_call (call, &shared_lookup_references);
1214 shared_lookup_references = valueize_refs (shared_lookup_references);
1215 return shared_lookup_references;
1218 /* Lookup a SCCVN reference operation VR in the current hash table.
1219 Returns the resulting value number if it exists in the hash table,
1220 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1221 vn_reference_t stored in the hashtable if something is found. */
1223 static tree
1224 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1226 void **slot;
1227 hashval_t hash;
1229 hash = vr->hashcode;
1230 slot = htab_find_slot_with_hash (current_info->references, vr,
1231 hash, NO_INSERT);
1232 if (!slot && current_info == optimistic_info)
1233 slot = htab_find_slot_with_hash (valid_info->references, vr,
1234 hash, NO_INSERT);
1235 if (slot)
1237 if (vnresult)
1238 *vnresult = (vn_reference_t)*slot;
1239 return ((vn_reference_t)*slot)->result;
1242 return NULL_TREE;
1245 static tree *last_vuse_ptr;
1246 static vn_lookup_kind vn_walk_kind;
1247 static vn_lookup_kind default_vn_walk_kind;
1249 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1250 with the current VUSE and performs the expression lookup. */
1252 static void *
1253 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
1255 vn_reference_t vr = (vn_reference_t)vr_;
1256 void **slot;
1257 hashval_t hash;
1259 if (last_vuse_ptr)
1260 *last_vuse_ptr = vuse;
1262 /* Fixup vuse and hash. */
1263 if (vr->vuse)
1264 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1265 vr->vuse = SSA_VAL (vuse);
1266 if (vr->vuse)
1267 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1269 hash = vr->hashcode;
1270 slot = htab_find_slot_with_hash (current_info->references, vr,
1271 hash, NO_INSERT);
1272 if (!slot && current_info == optimistic_info)
1273 slot = htab_find_slot_with_hash (valid_info->references, vr,
1274 hash, NO_INSERT);
1275 if (slot)
1276 return *slot;
1278 return NULL;
1281 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1282 from the statement defining VUSE and if not successful tries to
1283 translate *REFP and VR_ through an aggregate copy at the defintion
1284 of VUSE. */
1286 static void *
1287 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
1289 vn_reference_t vr = (vn_reference_t)vr_;
1290 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1291 tree base;
1292 HOST_WIDE_INT offset, maxsize;
1293 static VEC (vn_reference_op_s, heap) *lhs_ops = NULL;
1294 ao_ref lhs_ref;
1295 bool lhs_ref_ok = false;
1297 /* First try to disambiguate after value-replacing in the definitions LHS. */
1298 if (is_gimple_assign (def_stmt))
1300 VEC (vn_reference_op_s, heap) *tem;
1301 tree lhs = gimple_assign_lhs (def_stmt);
1302 /* Avoid re-allocation overhead. */
1303 VEC_truncate (vn_reference_op_s, lhs_ops, 0);
1304 copy_reference_ops_from_ref (lhs, &lhs_ops);
1305 tem = lhs_ops;
1306 lhs_ops = valueize_refs (lhs_ops);
1307 gcc_assert (lhs_ops == tem);
1308 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref, get_alias_set (lhs),
1309 TREE_TYPE (lhs), lhs_ops);
1310 if (lhs_ref_ok
1311 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1312 return NULL;
1315 base = ao_ref_base (ref);
1316 offset = ref->offset;
1317 maxsize = ref->max_size;
1319 /* If we cannot constrain the size of the reference we cannot
1320 test if anything kills it. */
1321 if (maxsize == -1)
1322 return (void *)-1;
1324 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1325 from that defintion.
1326 1) Memset. */
1327 if (is_gimple_reg_type (vr->type)
1328 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1329 && integer_zerop (gimple_call_arg (def_stmt, 1))
1330 && host_integerp (gimple_call_arg (def_stmt, 2), 1)
1331 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1333 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1334 tree base2;
1335 HOST_WIDE_INT offset2, size2, maxsize2;
1336 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1337 size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
1338 if ((unsigned HOST_WIDE_INT)size2 / 8
1339 == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
1340 && maxsize2 != -1
1341 && operand_equal_p (base, base2, 0)
1342 && offset2 <= offset
1343 && offset2 + size2 >= offset + maxsize)
1345 tree val = build_zero_cst (vr->type);
1346 unsigned int value_id = get_or_alloc_constant_value_id (val);
1347 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1348 VEC_copy (vn_reference_op_s,
1349 heap, vr->operands),
1350 val, value_id);
1354 /* 2) Assignment from an empty CONSTRUCTOR. */
1355 else if (is_gimple_reg_type (vr->type)
1356 && gimple_assign_single_p (def_stmt)
1357 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1358 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1360 tree base2;
1361 HOST_WIDE_INT offset2, size2, maxsize2;
1362 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1363 &offset2, &size2, &maxsize2);
1364 if (maxsize2 != -1
1365 && operand_equal_p (base, base2, 0)
1366 && offset2 <= offset
1367 && offset2 + size2 >= offset + maxsize)
1369 tree val = build_zero_cst (vr->type);
1370 unsigned int value_id = get_or_alloc_constant_value_id (val);
1371 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1372 VEC_copy (vn_reference_op_s,
1373 heap, vr->operands),
1374 val, value_id);
1378 /* 3) For aggregate copies translate the reference through them if
1379 the copy kills ref. */
1380 else if (vn_walk_kind == VN_WALKREWRITE
1381 && gimple_assign_single_p (def_stmt)
1382 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1383 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1384 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1386 tree base2;
1387 HOST_WIDE_INT offset2, size2, maxsize2;
1388 int i, j;
1389 VEC (vn_reference_op_s, heap) *rhs = NULL;
1390 vn_reference_op_t vro;
1391 ao_ref r;
1393 if (!lhs_ref_ok)
1394 return (void *)-1;
1396 /* See if the assignment kills REF. */
1397 base2 = ao_ref_base (&lhs_ref);
1398 offset2 = lhs_ref.offset;
1399 size2 = lhs_ref.size;
1400 maxsize2 = lhs_ref.max_size;
1401 if (maxsize2 == -1
1402 || (base != base2 && !operand_equal_p (base, base2, 0))
1403 || offset2 > offset
1404 || offset2 + size2 < offset + maxsize)
1405 return (void *)-1;
1407 /* Find the common base of ref and the lhs. lhs_ops already
1408 contains valueized operands for the lhs. */
1409 i = VEC_length (vn_reference_op_s, vr->operands) - 1;
1410 j = VEC_length (vn_reference_op_s, lhs_ops) - 1;
1411 while (j >= 0 && i >= 0
1412 && vn_reference_op_eq (VEC_index (vn_reference_op_s,
1413 vr->operands, i),
1414 VEC_index (vn_reference_op_s, lhs_ops, j)))
1416 i--;
1417 j--;
1420 /* i now points to the first additional op.
1421 ??? LHS may not be completely contained in VR, one or more
1422 VIEW_CONVERT_EXPRs could be in its way. We could at least
1423 try handling outermost VIEW_CONVERT_EXPRs. */
1424 if (j != -1)
1425 return (void *)-1;
1427 /* Now re-write REF to be based on the rhs of the assignment. */
1428 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1429 /* We need to pre-pend vr->operands[0..i] to rhs. */
1430 if (i + 1 + VEC_length (vn_reference_op_s, rhs)
1431 > VEC_length (vn_reference_op_s, vr->operands))
1433 VEC (vn_reference_op_s, heap) *old = vr->operands;
1434 VEC_safe_grow (vn_reference_op_s, heap, vr->operands,
1435 i + 1 + VEC_length (vn_reference_op_s, rhs));
1436 if (old == shared_lookup_references
1437 && vr->operands != old)
1438 shared_lookup_references = NULL;
1440 else
1441 VEC_truncate (vn_reference_op_s, vr->operands,
1442 i + 1 + VEC_length (vn_reference_op_s, rhs));
1443 FOR_EACH_VEC_ELT (vn_reference_op_s, rhs, j, vro)
1444 VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, vro);
1445 VEC_free (vn_reference_op_s, heap, rhs);
1446 vr->hashcode = vn_reference_compute_hash (vr);
1448 /* Adjust *ref from the new operands. */
1449 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1450 return (void *)-1;
1451 /* This can happen with bitfields. */
1452 if (ref->size != r.size)
1453 return (void *)-1;
1454 *ref = r;
1456 /* Do not update last seen VUSE after translating. */
1457 last_vuse_ptr = NULL;
1459 /* Keep looking for the adjusted *REF / VR pair. */
1460 return NULL;
1463 /* 4) For memcpy copies translate the reference through them if
1464 the copy kills ref. */
1465 else if (vn_walk_kind == VN_WALKREWRITE
1466 && is_gimple_reg_type (vr->type)
1467 /* ??? Handle BCOPY as well. */
1468 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
1469 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
1470 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
1471 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
1472 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
1473 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
1474 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
1475 && host_integerp (gimple_call_arg (def_stmt, 2), 1))
1477 tree lhs, rhs;
1478 ao_ref r;
1479 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
1480 vn_reference_op_s op;
1481 HOST_WIDE_INT at;
1484 /* Only handle non-variable, addressable refs. */
1485 if (ref->size != maxsize
1486 || offset % BITS_PER_UNIT != 0
1487 || ref->size % BITS_PER_UNIT != 0)
1488 return (void *)-1;
1490 /* Extract a pointer base and an offset for the destination. */
1491 lhs = gimple_call_arg (def_stmt, 0);
1492 lhs_offset = 0;
1493 if (TREE_CODE (lhs) == SSA_NAME)
1494 lhs = SSA_VAL (lhs);
1495 if (TREE_CODE (lhs) == ADDR_EXPR)
1497 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
1498 &lhs_offset);
1499 if (!tem)
1500 return (void *)-1;
1501 if (TREE_CODE (tem) == MEM_REF
1502 && host_integerp (TREE_OPERAND (tem, 1), 1))
1504 lhs = TREE_OPERAND (tem, 0);
1505 lhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1507 else if (DECL_P (tem))
1508 lhs = build_fold_addr_expr (tem);
1509 else
1510 return (void *)-1;
1512 if (TREE_CODE (lhs) != SSA_NAME
1513 && TREE_CODE (lhs) != ADDR_EXPR)
1514 return (void *)-1;
1516 /* Extract a pointer base and an offset for the source. */
1517 rhs = gimple_call_arg (def_stmt, 1);
1518 rhs_offset = 0;
1519 if (TREE_CODE (rhs) == SSA_NAME)
1520 rhs = SSA_VAL (rhs);
1521 if (TREE_CODE (rhs) == ADDR_EXPR)
1523 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
1524 &rhs_offset);
1525 if (!tem)
1526 return (void *)-1;
1527 if (TREE_CODE (tem) == MEM_REF
1528 && host_integerp (TREE_OPERAND (tem, 1), 1))
1530 rhs = TREE_OPERAND (tem, 0);
1531 rhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1533 else if (DECL_P (tem))
1534 rhs = build_fold_addr_expr (tem);
1535 else
1536 return (void *)-1;
1538 if (TREE_CODE (rhs) != SSA_NAME
1539 && TREE_CODE (rhs) != ADDR_EXPR)
1540 return (void *)-1;
1542 copy_size = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2));
1544 /* The bases of the destination and the references have to agree. */
1545 if ((TREE_CODE (base) != MEM_REF
1546 && !DECL_P (base))
1547 || (TREE_CODE (base) == MEM_REF
1548 && (TREE_OPERAND (base, 0) != lhs
1549 || !host_integerp (TREE_OPERAND (base, 1), 1)))
1550 || (DECL_P (base)
1551 && (TREE_CODE (lhs) != ADDR_EXPR
1552 || TREE_OPERAND (lhs, 0) != base)))
1553 return (void *)-1;
1555 /* And the access has to be contained within the memcpy destination. */
1556 at = offset / BITS_PER_UNIT;
1557 if (TREE_CODE (base) == MEM_REF)
1558 at += TREE_INT_CST_LOW (TREE_OPERAND (base, 1));
1559 if (lhs_offset > at
1560 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
1561 return (void *)-1;
1563 /* Make room for 2 operands in the new reference. */
1564 if (VEC_length (vn_reference_op_s, vr->operands) < 2)
1566 VEC (vn_reference_op_s, heap) *old = vr->operands;
1567 VEC_safe_grow (vn_reference_op_s, heap, vr->operands, 2);
1568 if (old == shared_lookup_references
1569 && vr->operands != old)
1570 shared_lookup_references = NULL;
1572 else
1573 VEC_truncate (vn_reference_op_s, vr->operands, 2);
1575 /* The looked-through reference is a simple MEM_REF. */
1576 memset (&op, 0, sizeof (op));
1577 op.type = vr->type;
1578 op.opcode = MEM_REF;
1579 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
1580 op.off = at - lhs_offset + rhs_offset;
1581 VEC_replace (vn_reference_op_s, vr->operands, 0, &op);
1582 op.type = TYPE_MAIN_VARIANT (TREE_TYPE (rhs));
1583 op.opcode = TREE_CODE (rhs);
1584 op.op0 = rhs;
1585 op.off = -1;
1586 VEC_replace (vn_reference_op_s, vr->operands, 1, &op);
1587 vr->hashcode = vn_reference_compute_hash (vr);
1589 /* Adjust *ref from the new operands. */
1590 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1591 return (void *)-1;
1592 /* This can happen with bitfields. */
1593 if (ref->size != r.size)
1594 return (void *)-1;
1595 *ref = r;
1597 /* Do not update last seen VUSE after translating. */
1598 last_vuse_ptr = NULL;
1600 /* Keep looking for the adjusted *REF / VR pair. */
1601 return NULL;
1604 /* Bail out and stop walking. */
1605 return (void *)-1;
1608 /* Lookup a reference operation by it's parts, in the current hash table.
1609 Returns the resulting value number if it exists in the hash table,
1610 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1611 vn_reference_t stored in the hashtable if something is found. */
1613 tree
1614 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
1615 VEC (vn_reference_op_s, heap) *operands,
1616 vn_reference_t *vnresult, vn_lookup_kind kind)
1618 struct vn_reference_s vr1;
1619 vn_reference_t tmp;
1620 tree cst;
1622 if (!vnresult)
1623 vnresult = &tmp;
1624 *vnresult = NULL;
1626 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1627 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1628 VEC_safe_grow (vn_reference_op_s, heap, shared_lookup_references,
1629 VEC_length (vn_reference_op_s, operands));
1630 memcpy (VEC_address (vn_reference_op_s, shared_lookup_references),
1631 VEC_address (vn_reference_op_s, operands),
1632 sizeof (vn_reference_op_s)
1633 * VEC_length (vn_reference_op_s, operands));
1634 vr1.operands = operands = shared_lookup_references
1635 = valueize_refs (shared_lookup_references);
1636 vr1.type = type;
1637 vr1.set = set;
1638 vr1.hashcode = vn_reference_compute_hash (&vr1);
1639 if ((cst = fully_constant_vn_reference_p (&vr1)))
1640 return cst;
1642 vn_reference_lookup_1 (&vr1, vnresult);
1643 if (!*vnresult
1644 && kind != VN_NOWALK
1645 && vr1.vuse)
1647 ao_ref r;
1648 vn_walk_kind = kind;
1649 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
1650 *vnresult =
1651 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1652 vn_reference_lookup_2,
1653 vn_reference_lookup_3, &vr1);
1654 if (vr1.operands != operands)
1655 VEC_free (vn_reference_op_s, heap, vr1.operands);
1658 if (*vnresult)
1659 return (*vnresult)->result;
1661 return NULL_TREE;
1664 /* Lookup OP in the current hash table, and return the resulting value
1665 number if it exists in the hash table. Return NULL_TREE if it does
1666 not exist in the hash table or if the result field of the structure
1667 was NULL.. VNRESULT will be filled in with the vn_reference_t
1668 stored in the hashtable if one exists. */
1670 tree
1671 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
1672 vn_reference_t *vnresult)
1674 VEC (vn_reference_op_s, heap) *operands;
1675 struct vn_reference_s vr1;
1676 tree cst;
1678 if (vnresult)
1679 *vnresult = NULL;
1681 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1682 vr1.operands = operands = valueize_shared_reference_ops_from_ref (op);
1683 vr1.type = TREE_TYPE (op);
1684 vr1.set = get_alias_set (op);
1685 vr1.hashcode = vn_reference_compute_hash (&vr1);
1686 if ((cst = fully_constant_vn_reference_p (&vr1)))
1687 return cst;
1689 if (kind != VN_NOWALK
1690 && vr1.vuse)
1692 vn_reference_t wvnresult;
1693 ao_ref r;
1694 ao_ref_init (&r, op);
1695 vn_walk_kind = kind;
1696 wvnresult =
1697 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1698 vn_reference_lookup_2,
1699 vn_reference_lookup_3, &vr1);
1700 if (vr1.operands != operands)
1701 VEC_free (vn_reference_op_s, heap, vr1.operands);
1702 if (wvnresult)
1704 if (vnresult)
1705 *vnresult = wvnresult;
1706 return wvnresult->result;
1709 return NULL_TREE;
1712 return vn_reference_lookup_1 (&vr1, vnresult);
1716 /* Insert OP into the current hash table with a value number of
1717 RESULT, and return the resulting reference structure we created. */
1719 vn_reference_t
1720 vn_reference_insert (tree op, tree result, tree vuse)
1722 void **slot;
1723 vn_reference_t vr1;
1725 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1726 if (TREE_CODE (result) == SSA_NAME)
1727 vr1->value_id = VN_INFO (result)->value_id;
1728 else
1729 vr1->value_id = get_or_alloc_constant_value_id (result);
1730 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1731 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
1732 vr1->type = TREE_TYPE (op);
1733 vr1->set = get_alias_set (op);
1734 vr1->hashcode = vn_reference_compute_hash (vr1);
1735 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
1737 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1738 INSERT);
1740 /* Because we lookup stores using vuses, and value number failures
1741 using the vdefs (see visit_reference_op_store for how and why),
1742 it's possible that on failure we may try to insert an already
1743 inserted store. This is not wrong, there is no ssa name for a
1744 store that we could use as a differentiator anyway. Thus, unlike
1745 the other lookup functions, you cannot gcc_assert (!*slot)
1746 here. */
1748 /* But free the old slot in case of a collision. */
1749 if (*slot)
1750 free_reference (*slot);
1752 *slot = vr1;
1753 return vr1;
1756 /* Insert a reference by it's pieces into the current hash table with
1757 a value number of RESULT. Return the resulting reference
1758 structure we created. */
1760 vn_reference_t
1761 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
1762 VEC (vn_reference_op_s, heap) *operands,
1763 tree result, unsigned int value_id)
1766 void **slot;
1767 vn_reference_t vr1;
1769 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1770 vr1->value_id = value_id;
1771 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1772 vr1->operands = valueize_refs (operands);
1773 vr1->type = type;
1774 vr1->set = set;
1775 vr1->hashcode = vn_reference_compute_hash (vr1);
1776 if (result && TREE_CODE (result) == SSA_NAME)
1777 result = SSA_VAL (result);
1778 vr1->result = result;
1780 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1781 INSERT);
1783 /* At this point we should have all the things inserted that we have
1784 seen before, and we should never try inserting something that
1785 already exists. */
1786 gcc_assert (!*slot);
1787 if (*slot)
1788 free_reference (*slot);
1790 *slot = vr1;
1791 return vr1;
1794 /* Compute and return the hash value for nary operation VBO1. */
1796 hashval_t
1797 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
1799 hashval_t hash;
1800 unsigned i;
1802 for (i = 0; i < vno1->length; ++i)
1803 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
1804 vno1->op[i] = SSA_VAL (vno1->op[i]);
1806 if (vno1->length == 2
1807 && commutative_tree_code (vno1->opcode)
1808 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
1810 tree temp = vno1->op[0];
1811 vno1->op[0] = vno1->op[1];
1812 vno1->op[1] = temp;
1815 hash = iterative_hash_hashval_t (vno1->opcode, 0);
1816 for (i = 0; i < vno1->length; ++i)
1817 hash = iterative_hash_expr (vno1->op[i], hash);
1819 return hash;
1822 /* Return the computed hashcode for nary operation P1. */
1824 static hashval_t
1825 vn_nary_op_hash (const void *p1)
1827 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1828 return vno1->hashcode;
1831 /* Compare nary operations P1 and P2 and return true if they are
1832 equivalent. */
1835 vn_nary_op_eq (const void *p1, const void *p2)
1837 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1838 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
1839 unsigned i;
1841 if (vno1->hashcode != vno2->hashcode)
1842 return false;
1844 if (vno1->opcode != vno2->opcode
1845 || !types_compatible_p (vno1->type, vno2->type))
1846 return false;
1848 for (i = 0; i < vno1->length; ++i)
1849 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
1850 return false;
1852 return true;
1855 /* Initialize VNO from the pieces provided. */
1857 static void
1858 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
1859 enum tree_code code, tree type, tree op0,
1860 tree op1, tree op2, tree op3)
1862 vno->opcode = code;
1863 vno->length = length;
1864 vno->type = type;
1865 switch (length)
1867 /* The fallthrus here are deliberate. */
1868 case 4: vno->op[3] = op3;
1869 case 3: vno->op[2] = op2;
1870 case 2: vno->op[1] = op1;
1871 case 1: vno->op[0] = op0;
1872 default:
1873 break;
1877 /* Initialize VNO from OP. */
1879 static void
1880 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
1882 unsigned i;
1884 vno->opcode = TREE_CODE (op);
1885 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
1886 vno->type = TREE_TYPE (op);
1887 for (i = 0; i < vno->length; ++i)
1888 vno->op[i] = TREE_OPERAND (op, i);
1891 /* Initialize VNO from STMT. */
1893 static void
1894 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
1896 unsigned i;
1898 vno->opcode = gimple_assign_rhs_code (stmt);
1899 vno->length = gimple_num_ops (stmt) - 1;
1900 vno->type = gimple_expr_type (stmt);
1901 for (i = 0; i < vno->length; ++i)
1902 vno->op[i] = gimple_op (stmt, i + 1);
1903 if (vno->opcode == REALPART_EXPR
1904 || vno->opcode == IMAGPART_EXPR
1905 || vno->opcode == VIEW_CONVERT_EXPR)
1906 vno->op[0] = TREE_OPERAND (vno->op[0], 0);
1909 /* Compute the hashcode for VNO and look for it in the hash table;
1910 return the resulting value number if it exists in the hash table.
1911 Return NULL_TREE if it does not exist in the hash table or if the
1912 result field of the operation is NULL. VNRESULT will contain the
1913 vn_nary_op_t from the hashtable if it exists. */
1915 static tree
1916 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
1918 void **slot;
1920 if (vnresult)
1921 *vnresult = NULL;
1923 vno->hashcode = vn_nary_op_compute_hash (vno);
1924 slot = htab_find_slot_with_hash (current_info->nary, vno, vno->hashcode,
1925 NO_INSERT);
1926 if (!slot && current_info == optimistic_info)
1927 slot = htab_find_slot_with_hash (valid_info->nary, vno, vno->hashcode,
1928 NO_INSERT);
1929 if (!slot)
1930 return NULL_TREE;
1931 if (vnresult)
1932 *vnresult = (vn_nary_op_t)*slot;
1933 return ((vn_nary_op_t)*slot)->result;
1936 /* Lookup a n-ary operation by its pieces and return the resulting value
1937 number if it exists in the hash table. Return NULL_TREE if it does
1938 not exist in the hash table or if the result field of the operation
1939 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1940 if it exists. */
1942 tree
1943 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
1944 tree type, tree op0, tree op1, tree op2,
1945 tree op3, vn_nary_op_t *vnresult)
1947 struct vn_nary_op_s vno1;
1948 init_vn_nary_op_from_pieces (&vno1, length, code, type, op0, op1, op2, op3);
1949 return vn_nary_op_lookup_1 (&vno1, vnresult);
1952 /* Lookup OP in the current hash table, and return the resulting value
1953 number if it exists in the hash table. Return NULL_TREE if it does
1954 not exist in the hash table or if the result field of the operation
1955 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1956 if it exists. */
1958 tree
1959 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
1961 struct vn_nary_op_s vno1;
1962 init_vn_nary_op_from_op (&vno1, op);
1963 return vn_nary_op_lookup_1 (&vno1, vnresult);
1966 /* Lookup the rhs of STMT in the current hash table, and return the resulting
1967 value number if it exists in the hash table. Return NULL_TREE if
1968 it does not exist in the hash table. VNRESULT will contain the
1969 vn_nary_op_t from the hashtable if it exists. */
1971 tree
1972 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
1974 struct vn_nary_op_s vno1;
1975 init_vn_nary_op_from_stmt (&vno1, stmt);
1976 return vn_nary_op_lookup_1 (&vno1, vnresult);
1979 /* Return the size of a vn_nary_op_t with LENGTH operands. */
1981 static size_t
1982 sizeof_vn_nary_op (unsigned int length)
1984 return sizeof (struct vn_nary_op_s) - sizeof (tree) * (4 - length);
1987 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
1989 static vn_nary_op_t
1990 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
1992 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
1995 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
1996 obstack. */
1998 static vn_nary_op_t
1999 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2001 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2002 &current_info->nary_obstack);
2004 vno1->value_id = value_id;
2005 vno1->length = length;
2006 vno1->result = result;
2008 return vno1;
2011 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2012 VNO->HASHCODE first. */
2014 static vn_nary_op_t
2015 vn_nary_op_insert_into (vn_nary_op_t vno, htab_t table, bool compute_hash)
2017 void **slot;
2019 if (compute_hash)
2020 vno->hashcode = vn_nary_op_compute_hash (vno);
2022 slot = htab_find_slot_with_hash (table, vno, vno->hashcode, INSERT);
2023 gcc_assert (!*slot);
2025 *slot = vno;
2026 return vno;
2029 /* Insert a n-ary operation into the current hash table using it's
2030 pieces. Return the vn_nary_op_t structure we created and put in
2031 the hashtable. */
2033 vn_nary_op_t
2034 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2035 tree type, tree op0,
2036 tree op1, tree op2, tree op3,
2037 tree result,
2038 unsigned int value_id)
2040 vn_nary_op_t vno1;
2042 vno1 = alloc_vn_nary_op (length, result, value_id);
2043 init_vn_nary_op_from_pieces (vno1, length, code, type, op0, op1, op2, op3);
2044 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2047 /* Insert OP into the current hash table with a value number of
2048 RESULT. Return the vn_nary_op_t structure we created and put in
2049 the hashtable. */
2051 vn_nary_op_t
2052 vn_nary_op_insert (tree op, tree result)
2054 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2055 vn_nary_op_t vno1;
2057 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2058 init_vn_nary_op_from_op (vno1, op);
2059 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2062 /* Insert the rhs of STMT into the current hash table with a value number of
2063 RESULT. */
2065 vn_nary_op_t
2066 vn_nary_op_insert_stmt (gimple stmt, tree result)
2068 unsigned length = gimple_num_ops (stmt) - 1;
2069 vn_nary_op_t vno1;
2071 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2072 init_vn_nary_op_from_stmt (vno1, stmt);
2073 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2076 /* Compute a hashcode for PHI operation VP1 and return it. */
2078 static inline hashval_t
2079 vn_phi_compute_hash (vn_phi_t vp1)
2081 hashval_t result;
2082 int i;
2083 tree phi1op;
2084 tree type;
2086 result = vp1->block->index;
2088 /* If all PHI arguments are constants we need to distinguish
2089 the PHI node via its type. */
2090 type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
2091 result += (INTEGRAL_TYPE_P (type)
2092 + (INTEGRAL_TYPE_P (type)
2093 ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
2095 FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
2097 if (phi1op == VN_TOP)
2098 continue;
2099 result = iterative_hash_expr (phi1op, result);
2102 return result;
2105 /* Return the computed hashcode for phi operation P1. */
2107 static hashval_t
2108 vn_phi_hash (const void *p1)
2110 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
2111 return vp1->hashcode;
2114 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2116 static int
2117 vn_phi_eq (const void *p1, const void *p2)
2119 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
2120 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
2122 if (vp1->hashcode != vp2->hashcode)
2123 return false;
2125 if (vp1->block == vp2->block)
2127 int i;
2128 tree phi1op;
2130 /* If the PHI nodes do not have compatible types
2131 they are not the same. */
2132 if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
2133 TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
2134 return false;
2136 /* Any phi in the same block will have it's arguments in the
2137 same edge order, because of how we store phi nodes. */
2138 FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
2140 tree phi2op = VEC_index (tree, vp2->phiargs, i);
2141 if (phi1op == VN_TOP || phi2op == VN_TOP)
2142 continue;
2143 if (!expressions_equal_p (phi1op, phi2op))
2144 return false;
2146 return true;
2148 return false;
2151 static VEC(tree, heap) *shared_lookup_phiargs;
2153 /* Lookup PHI in the current hash table, and return the resulting
2154 value number if it exists in the hash table. Return NULL_TREE if
2155 it does not exist in the hash table. */
2157 static tree
2158 vn_phi_lookup (gimple phi)
2160 void **slot;
2161 struct vn_phi_s vp1;
2162 unsigned i;
2164 VEC_truncate (tree, shared_lookup_phiargs, 0);
2166 /* Canonicalize the SSA_NAME's to their value number. */
2167 for (i = 0; i < gimple_phi_num_args (phi); i++)
2169 tree def = PHI_ARG_DEF (phi, i);
2170 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2171 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
2173 vp1.phiargs = shared_lookup_phiargs;
2174 vp1.block = gimple_bb (phi);
2175 vp1.hashcode = vn_phi_compute_hash (&vp1);
2176 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
2177 NO_INSERT);
2178 if (!slot && current_info == optimistic_info)
2179 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
2180 NO_INSERT);
2181 if (!slot)
2182 return NULL_TREE;
2183 return ((vn_phi_t)*slot)->result;
2186 /* Insert PHI into the current hash table with a value number of
2187 RESULT. */
2189 static vn_phi_t
2190 vn_phi_insert (gimple phi, tree result)
2192 void **slot;
2193 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
2194 unsigned i;
2195 VEC (tree, heap) *args = NULL;
2197 /* Canonicalize the SSA_NAME's to their value number. */
2198 for (i = 0; i < gimple_phi_num_args (phi); i++)
2200 tree def = PHI_ARG_DEF (phi, i);
2201 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2202 VEC_safe_push (tree, heap, args, def);
2204 vp1->value_id = VN_INFO (result)->value_id;
2205 vp1->phiargs = args;
2206 vp1->block = gimple_bb (phi);
2207 vp1->result = result;
2208 vp1->hashcode = vn_phi_compute_hash (vp1);
2210 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
2211 INSERT);
2213 /* Because we iterate over phi operations more than once, it's
2214 possible the slot might already exist here, hence no assert.*/
2215 *slot = vp1;
2216 return vp1;
2220 /* Print set of components in strongly connected component SCC to OUT. */
2222 static void
2223 print_scc (FILE *out, VEC (tree, heap) *scc)
2225 tree var;
2226 unsigned int i;
2228 fprintf (out, "SCC consists of: ");
2229 FOR_EACH_VEC_ELT (tree, scc, i, var)
2231 print_generic_expr (out, var, 0);
2232 fprintf (out, " ");
2234 fprintf (out, "\n");
2237 /* Set the value number of FROM to TO, return true if it has changed
2238 as a result. */
2240 static inline bool
2241 set_ssa_val_to (tree from, tree to)
2243 tree currval;
2245 if (from != to
2246 && TREE_CODE (to) == SSA_NAME
2247 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2248 to = from;
2250 /* The only thing we allow as value numbers are VN_TOP, ssa_names
2251 and invariants. So assert that here. */
2252 gcc_assert (to != NULL_TREE
2253 && (to == VN_TOP
2254 || TREE_CODE (to) == SSA_NAME
2255 || is_gimple_min_invariant (to)));
2257 if (dump_file && (dump_flags & TDF_DETAILS))
2259 fprintf (dump_file, "Setting value number of ");
2260 print_generic_expr (dump_file, from, 0);
2261 fprintf (dump_file, " to ");
2262 print_generic_expr (dump_file, to, 0);
2265 currval = SSA_VAL (from);
2267 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
2269 VN_INFO (from)->valnum = to;
2270 if (dump_file && (dump_flags & TDF_DETAILS))
2271 fprintf (dump_file, " (changed)\n");
2272 return true;
2274 if (dump_file && (dump_flags & TDF_DETAILS))
2275 fprintf (dump_file, "\n");
2276 return false;
2279 /* Set all definitions in STMT to value number to themselves.
2280 Return true if a value number changed. */
2282 static bool
2283 defs_to_varying (gimple stmt)
2285 bool changed = false;
2286 ssa_op_iter iter;
2287 def_operand_p defp;
2289 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2291 tree def = DEF_FROM_PTR (defp);
2293 VN_INFO (def)->use_processed = true;
2294 changed |= set_ssa_val_to (def, def);
2296 return changed;
2299 static bool expr_has_constants (tree expr);
2300 static tree valueize_expr (tree expr);
2302 /* Visit a copy between LHS and RHS, return true if the value number
2303 changed. */
2305 static bool
2306 visit_copy (tree lhs, tree rhs)
2308 /* Follow chains of copies to their destination. */
2309 while (TREE_CODE (rhs) == SSA_NAME
2310 && SSA_VAL (rhs) != rhs)
2311 rhs = SSA_VAL (rhs);
2313 /* The copy may have a more interesting constant filled expression
2314 (we don't, since we know our RHS is just an SSA name). */
2315 if (TREE_CODE (rhs) == SSA_NAME)
2317 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2318 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2321 return set_ssa_val_to (lhs, rhs);
2324 /* Visit a nary operator RHS, value number it, and return true if the
2325 value number of LHS has changed as a result. */
2327 static bool
2328 visit_nary_op (tree lhs, gimple stmt)
2330 bool changed = false;
2331 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2333 if (result)
2334 changed = set_ssa_val_to (lhs, result);
2335 else
2337 changed = set_ssa_val_to (lhs, lhs);
2338 vn_nary_op_insert_stmt (stmt, lhs);
2341 return changed;
2344 /* Visit a call STMT storing into LHS. Return true if the value number
2345 of the LHS has changed as a result. */
2347 static bool
2348 visit_reference_op_call (tree lhs, gimple stmt)
2350 bool changed = false;
2351 struct vn_reference_s vr1;
2352 tree result;
2353 tree vuse = gimple_vuse (stmt);
2355 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2356 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
2357 vr1.type = gimple_expr_type (stmt);
2358 vr1.set = 0;
2359 vr1.hashcode = vn_reference_compute_hash (&vr1);
2360 result = vn_reference_lookup_1 (&vr1, NULL);
2361 if (result)
2363 changed = set_ssa_val_to (lhs, result);
2364 if (TREE_CODE (result) == SSA_NAME
2365 && VN_INFO (result)->has_constants)
2366 VN_INFO (lhs)->has_constants = true;
2368 else
2370 void **slot;
2371 vn_reference_t vr2;
2372 changed = set_ssa_val_to (lhs, lhs);
2373 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
2374 vr2->vuse = vr1.vuse;
2375 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
2376 vr2->type = vr1.type;
2377 vr2->set = vr1.set;
2378 vr2->hashcode = vr1.hashcode;
2379 vr2->result = lhs;
2380 slot = htab_find_slot_with_hash (current_info->references,
2381 vr2, vr2->hashcode, INSERT);
2382 if (*slot)
2383 free_reference (*slot);
2384 *slot = vr2;
2387 return changed;
2390 /* Visit a load from a reference operator RHS, part of STMT, value number it,
2391 and return true if the value number of the LHS has changed as a result. */
2393 static bool
2394 visit_reference_op_load (tree lhs, tree op, gimple stmt)
2396 bool changed = false;
2397 tree last_vuse;
2398 tree result;
2400 last_vuse = gimple_vuse (stmt);
2401 last_vuse_ptr = &last_vuse;
2402 result = vn_reference_lookup (op, gimple_vuse (stmt),
2403 default_vn_walk_kind, NULL);
2404 last_vuse_ptr = NULL;
2406 /* If we have a VCE, try looking up its operand as it might be stored in
2407 a different type. */
2408 if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
2409 result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
2410 default_vn_walk_kind, NULL);
2412 /* We handle type-punning through unions by value-numbering based
2413 on offset and size of the access. Be prepared to handle a
2414 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
2415 if (result
2416 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
2418 /* We will be setting the value number of lhs to the value number
2419 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2420 So first simplify and lookup this expression to see if it
2421 is already available. */
2422 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
2423 if ((CONVERT_EXPR_P (val)
2424 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
2425 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
2427 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
2428 if ((CONVERT_EXPR_P (tem)
2429 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
2430 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
2431 TREE_TYPE (val), tem)))
2432 val = tem;
2434 result = val;
2435 if (!is_gimple_min_invariant (val)
2436 && TREE_CODE (val) != SSA_NAME)
2437 result = vn_nary_op_lookup (val, NULL);
2438 /* If the expression is not yet available, value-number lhs to
2439 a new SSA_NAME we create. */
2440 if (!result)
2442 result = make_ssa_name (SSA_NAME_VAR (lhs), gimple_build_nop ());
2443 /* Initialize value-number information properly. */
2444 VN_INFO_GET (result)->valnum = result;
2445 VN_INFO (result)->value_id = get_next_value_id ();
2446 VN_INFO (result)->expr = val;
2447 VN_INFO (result)->has_constants = expr_has_constants (val);
2448 VN_INFO (result)->needs_insertion = true;
2449 /* As all "inserted" statements are singleton SCCs, insert
2450 to the valid table. This is strictly needed to
2451 avoid re-generating new value SSA_NAMEs for the same
2452 expression during SCC iteration over and over (the
2453 optimistic table gets cleared after each iteration).
2454 We do not need to insert into the optimistic table, as
2455 lookups there will fall back to the valid table. */
2456 if (current_info == optimistic_info)
2458 current_info = valid_info;
2459 vn_nary_op_insert (val, result);
2460 current_info = optimistic_info;
2462 else
2463 vn_nary_op_insert (val, result);
2464 if (dump_file && (dump_flags & TDF_DETAILS))
2466 fprintf (dump_file, "Inserting name ");
2467 print_generic_expr (dump_file, result, 0);
2468 fprintf (dump_file, " for expression ");
2469 print_generic_expr (dump_file, val, 0);
2470 fprintf (dump_file, "\n");
2475 if (result)
2477 changed = set_ssa_val_to (lhs, result);
2478 if (TREE_CODE (result) == SSA_NAME
2479 && VN_INFO (result)->has_constants)
2481 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2482 VN_INFO (lhs)->has_constants = true;
2485 else
2487 changed = set_ssa_val_to (lhs, lhs);
2488 vn_reference_insert (op, lhs, last_vuse);
2491 return changed;
2495 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2496 and return true if the value number of the LHS has changed as a result. */
2498 static bool
2499 visit_reference_op_store (tree lhs, tree op, gimple stmt)
2501 bool changed = false;
2502 tree result;
2503 bool resultsame = false;
2505 /* First we want to lookup using the *vuses* from the store and see
2506 if there the last store to this location with the same address
2507 had the same value.
2509 The vuses represent the memory state before the store. If the
2510 memory state, address, and value of the store is the same as the
2511 last store to this location, then this store will produce the
2512 same memory state as that store.
2514 In this case the vdef versions for this store are value numbered to those
2515 vuse versions, since they represent the same memory state after
2516 this store.
2518 Otherwise, the vdefs for the store are used when inserting into
2519 the table, since the store generates a new memory state. */
2521 result = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_NOWALK, NULL);
2523 if (result)
2525 if (TREE_CODE (result) == SSA_NAME)
2526 result = SSA_VAL (result);
2527 if (TREE_CODE (op) == SSA_NAME)
2528 op = SSA_VAL (op);
2529 resultsame = expressions_equal_p (result, op);
2532 if (!result || !resultsame)
2534 tree vdef;
2536 if (dump_file && (dump_flags & TDF_DETAILS))
2538 fprintf (dump_file, "No store match\n");
2539 fprintf (dump_file, "Value numbering store ");
2540 print_generic_expr (dump_file, lhs, 0);
2541 fprintf (dump_file, " to ");
2542 print_generic_expr (dump_file, op, 0);
2543 fprintf (dump_file, "\n");
2545 /* Have to set value numbers before insert, since insert is
2546 going to valueize the references in-place. */
2547 if ((vdef = gimple_vdef (stmt)))
2549 VN_INFO (vdef)->use_processed = true;
2550 changed |= set_ssa_val_to (vdef, vdef);
2553 /* Do not insert structure copies into the tables. */
2554 if (is_gimple_min_invariant (op)
2555 || is_gimple_reg (op))
2556 vn_reference_insert (lhs, op, vdef);
2558 else
2560 /* We had a match, so value number the vdef to have the value
2561 number of the vuse it came from. */
2562 tree def, use;
2564 if (dump_file && (dump_flags & TDF_DETAILS))
2565 fprintf (dump_file, "Store matched earlier value,"
2566 "value numbering store vdefs to matching vuses.\n");
2568 def = gimple_vdef (stmt);
2569 use = gimple_vuse (stmt);
2571 VN_INFO (def)->use_processed = true;
2572 changed |= set_ssa_val_to (def, SSA_VAL (use));
2575 return changed;
2578 /* Visit and value number PHI, return true if the value number
2579 changed. */
2581 static bool
2582 visit_phi (gimple phi)
2584 bool changed = false;
2585 tree result;
2586 tree sameval = VN_TOP;
2587 bool allsame = true;
2588 unsigned i;
2590 /* TODO: We could check for this in init_sccvn, and replace this
2591 with a gcc_assert. */
2592 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
2593 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2595 /* See if all non-TOP arguments have the same value. TOP is
2596 equivalent to everything, so we can ignore it. */
2597 for (i = 0; i < gimple_phi_num_args (phi); i++)
2599 tree def = PHI_ARG_DEF (phi, i);
2601 if (TREE_CODE (def) == SSA_NAME)
2602 def = SSA_VAL (def);
2603 if (def == VN_TOP)
2604 continue;
2605 if (sameval == VN_TOP)
2607 sameval = def;
2609 else
2611 if (!expressions_equal_p (def, sameval))
2613 allsame = false;
2614 break;
2619 /* If all value numbered to the same value, the phi node has that
2620 value. */
2621 if (allsame)
2623 if (is_gimple_min_invariant (sameval))
2625 VN_INFO (PHI_RESULT (phi))->has_constants = true;
2626 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2628 else
2630 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2631 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2634 if (TREE_CODE (sameval) == SSA_NAME)
2635 return visit_copy (PHI_RESULT (phi), sameval);
2637 return set_ssa_val_to (PHI_RESULT (phi), sameval);
2640 /* Otherwise, see if it is equivalent to a phi node in this block. */
2641 result = vn_phi_lookup (phi);
2642 if (result)
2644 if (TREE_CODE (result) == SSA_NAME)
2645 changed = visit_copy (PHI_RESULT (phi), result);
2646 else
2647 changed = set_ssa_val_to (PHI_RESULT (phi), result);
2649 else
2651 vn_phi_insert (phi, PHI_RESULT (phi));
2652 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2653 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
2654 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2657 return changed;
2660 /* Return true if EXPR contains constants. */
2662 static bool
2663 expr_has_constants (tree expr)
2665 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2667 case tcc_unary:
2668 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
2670 case tcc_binary:
2671 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
2672 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
2673 /* Constants inside reference ops are rarely interesting, but
2674 it can take a lot of looking to find them. */
2675 case tcc_reference:
2676 case tcc_declaration:
2677 return false;
2678 default:
2679 return is_gimple_min_invariant (expr);
2681 return false;
2684 /* Return true if STMT contains constants. */
2686 static bool
2687 stmt_has_constants (gimple stmt)
2689 if (gimple_code (stmt) != GIMPLE_ASSIGN)
2690 return false;
2692 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2694 case GIMPLE_UNARY_RHS:
2695 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2697 case GIMPLE_BINARY_RHS:
2698 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2699 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
2700 case GIMPLE_TERNARY_RHS:
2701 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2702 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt))
2703 || is_gimple_min_invariant (gimple_assign_rhs3 (stmt)));
2704 case GIMPLE_SINGLE_RHS:
2705 /* Constants inside reference ops are rarely interesting, but
2706 it can take a lot of looking to find them. */
2707 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2708 default:
2709 gcc_unreachable ();
2711 return false;
2714 /* Replace SSA_NAMES in expr with their value numbers, and return the
2715 result.
2716 This is performed in place. */
2718 static tree
2719 valueize_expr (tree expr)
2721 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2723 case tcc_unary:
2724 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2725 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2726 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2727 break;
2728 case tcc_binary:
2729 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2730 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2731 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2732 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
2733 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
2734 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
2735 break;
2736 default:
2737 break;
2739 return expr;
2742 /* Simplify the binary expression RHS, and return the result if
2743 simplified. */
2745 static tree
2746 simplify_binary_expression (gimple stmt)
2748 tree result = NULL_TREE;
2749 tree op0 = gimple_assign_rhs1 (stmt);
2750 tree op1 = gimple_assign_rhs2 (stmt);
2752 /* This will not catch every single case we could combine, but will
2753 catch those with constants. The goal here is to simultaneously
2754 combine constants between expressions, but avoid infinite
2755 expansion of expressions during simplification. */
2756 if (TREE_CODE (op0) == SSA_NAME)
2758 if (VN_INFO (op0)->has_constants
2759 || TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)) == tcc_comparison)
2760 op0 = valueize_expr (vn_get_expr_for (op0));
2761 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
2762 op0 = SSA_VAL (op0);
2765 if (TREE_CODE (op1) == SSA_NAME)
2767 if (VN_INFO (op1)->has_constants)
2768 op1 = valueize_expr (vn_get_expr_for (op1));
2769 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
2770 op1 = SSA_VAL (op1);
2773 /* Avoid folding if nothing changed. */
2774 if (op0 == gimple_assign_rhs1 (stmt)
2775 && op1 == gimple_assign_rhs2 (stmt))
2776 return NULL_TREE;
2778 fold_defer_overflow_warnings ();
2780 result = fold_binary (gimple_assign_rhs_code (stmt),
2781 gimple_expr_type (stmt), op0, op1);
2782 if (result)
2783 STRIP_USELESS_TYPE_CONVERSION (result);
2785 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
2786 stmt, 0);
2788 /* Make sure result is not a complex expression consisting
2789 of operators of operators (IE (a + b) + (a + c))
2790 Otherwise, we will end up with unbounded expressions if
2791 fold does anything at all. */
2792 if (result && valid_gimple_rhs_p (result))
2793 return result;
2795 return NULL_TREE;
2798 /* Simplify the unary expression RHS, and return the result if
2799 simplified. */
2801 static tree
2802 simplify_unary_expression (gimple stmt)
2804 tree result = NULL_TREE;
2805 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
2807 /* We handle some tcc_reference codes here that are all
2808 GIMPLE_ASSIGN_SINGLE codes. */
2809 if (gimple_assign_rhs_code (stmt) == REALPART_EXPR
2810 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2811 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2812 op0 = TREE_OPERAND (op0, 0);
2814 if (TREE_CODE (op0) != SSA_NAME)
2815 return NULL_TREE;
2817 orig_op0 = op0;
2818 if (VN_INFO (op0)->has_constants)
2819 op0 = valueize_expr (vn_get_expr_for (op0));
2820 else if (gimple_assign_cast_p (stmt)
2821 || gimple_assign_rhs_code (stmt) == REALPART_EXPR
2822 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2823 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2825 /* We want to do tree-combining on conversion-like expressions.
2826 Make sure we feed only SSA_NAMEs or constants to fold though. */
2827 tree tem = valueize_expr (vn_get_expr_for (op0));
2828 if (UNARY_CLASS_P (tem)
2829 || BINARY_CLASS_P (tem)
2830 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
2831 || TREE_CODE (tem) == SSA_NAME
2832 || is_gimple_min_invariant (tem))
2833 op0 = tem;
2836 /* Avoid folding if nothing changed, but remember the expression. */
2837 if (op0 == orig_op0)
2838 return NULL_TREE;
2840 result = fold_unary_ignore_overflow (gimple_assign_rhs_code (stmt),
2841 gimple_expr_type (stmt), op0);
2842 if (result)
2844 STRIP_USELESS_TYPE_CONVERSION (result);
2845 if (valid_gimple_rhs_p (result))
2846 return result;
2849 return NULL_TREE;
2852 /* Try to simplify RHS using equivalences and constant folding. */
2854 static tree
2855 try_to_simplify (gimple stmt)
2857 tree tem;
2859 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
2860 in this case, there is no point in doing extra work. */
2861 if (gimple_assign_copy_p (stmt)
2862 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2863 return NULL_TREE;
2865 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2867 case tcc_declaration:
2868 tem = get_symbol_constant_value (gimple_assign_rhs1 (stmt));
2869 if (tem)
2870 return tem;
2871 break;
2873 case tcc_reference:
2874 /* Do not do full-blown reference lookup here, but simplify
2875 reads from constant aggregates. */
2876 tem = fold_const_aggregate_ref (gimple_assign_rhs1 (stmt));
2877 if (tem)
2878 return tem;
2880 /* Fallthrough for some codes that can operate on registers. */
2881 if (!(TREE_CODE (gimple_assign_rhs1 (stmt)) == REALPART_EXPR
2882 || TREE_CODE (gimple_assign_rhs1 (stmt)) == IMAGPART_EXPR
2883 || TREE_CODE (gimple_assign_rhs1 (stmt)) == VIEW_CONVERT_EXPR))
2884 break;
2885 /* We could do a little more with unary ops, if they expand
2886 into binary ops, but it's debatable whether it is worth it. */
2887 case tcc_unary:
2888 return simplify_unary_expression (stmt);
2889 break;
2890 case tcc_comparison:
2891 case tcc_binary:
2892 return simplify_binary_expression (stmt);
2893 break;
2894 default:
2895 break;
2898 return NULL_TREE;
2901 /* Visit and value number USE, return true if the value number
2902 changed. */
2904 static bool
2905 visit_use (tree use)
2907 bool changed = false;
2908 gimple stmt = SSA_NAME_DEF_STMT (use);
2910 VN_INFO (use)->use_processed = true;
2912 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
2913 if (dump_file && (dump_flags & TDF_DETAILS)
2914 && !SSA_NAME_IS_DEFAULT_DEF (use))
2916 fprintf (dump_file, "Value numbering ");
2917 print_generic_expr (dump_file, use, 0);
2918 fprintf (dump_file, " stmt = ");
2919 print_gimple_stmt (dump_file, stmt, 0, 0);
2922 /* Handle uninitialized uses. */
2923 if (SSA_NAME_IS_DEFAULT_DEF (use))
2924 changed = set_ssa_val_to (use, use);
2925 else
2927 if (gimple_code (stmt) == GIMPLE_PHI)
2928 changed = visit_phi (stmt);
2929 else if (!gimple_has_lhs (stmt)
2930 || gimple_has_volatile_ops (stmt)
2931 || stmt_could_throw_p (stmt))
2932 changed = defs_to_varying (stmt);
2933 else if (is_gimple_assign (stmt))
2935 tree lhs = gimple_assign_lhs (stmt);
2936 tree simplified;
2938 /* Shortcut for copies. Simplifying copies is pointless,
2939 since we copy the expression and value they represent. */
2940 if (gimple_assign_copy_p (stmt)
2941 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2942 && TREE_CODE (lhs) == SSA_NAME)
2944 changed = visit_copy (lhs, gimple_assign_rhs1 (stmt));
2945 goto done;
2947 simplified = try_to_simplify (stmt);
2948 if (simplified)
2950 if (dump_file && (dump_flags & TDF_DETAILS))
2952 fprintf (dump_file, "RHS ");
2953 print_gimple_expr (dump_file, stmt, 0, 0);
2954 fprintf (dump_file, " simplified to ");
2955 print_generic_expr (dump_file, simplified, 0);
2956 if (TREE_CODE (lhs) == SSA_NAME)
2957 fprintf (dump_file, " has constants %d\n",
2958 expr_has_constants (simplified));
2959 else
2960 fprintf (dump_file, "\n");
2963 /* Setting value numbers to constants will occasionally
2964 screw up phi congruence because constants are not
2965 uniquely associated with a single ssa name that can be
2966 looked up. */
2967 if (simplified
2968 && is_gimple_min_invariant (simplified)
2969 && TREE_CODE (lhs) == SSA_NAME)
2971 VN_INFO (lhs)->expr = simplified;
2972 VN_INFO (lhs)->has_constants = true;
2973 changed = set_ssa_val_to (lhs, simplified);
2974 goto done;
2976 else if (simplified
2977 && TREE_CODE (simplified) == SSA_NAME
2978 && TREE_CODE (lhs) == SSA_NAME)
2980 changed = visit_copy (lhs, simplified);
2981 goto done;
2983 else if (simplified)
2985 if (TREE_CODE (lhs) == SSA_NAME)
2987 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
2988 /* We have to unshare the expression or else
2989 valuizing may change the IL stream. */
2990 VN_INFO (lhs)->expr = unshare_expr (simplified);
2993 else if (stmt_has_constants (stmt)
2994 && TREE_CODE (lhs) == SSA_NAME)
2995 VN_INFO (lhs)->has_constants = true;
2996 else if (TREE_CODE (lhs) == SSA_NAME)
2998 /* We reset expr and constantness here because we may
2999 have been value numbering optimistically, and
3000 iterating. They may become non-constant in this case,
3001 even if they were optimistically constant. */
3003 VN_INFO (lhs)->has_constants = false;
3004 VN_INFO (lhs)->expr = NULL_TREE;
3007 if ((TREE_CODE (lhs) == SSA_NAME
3008 /* We can substitute SSA_NAMEs that are live over
3009 abnormal edges with their constant value. */
3010 && !(gimple_assign_copy_p (stmt)
3011 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
3012 && !(simplified
3013 && is_gimple_min_invariant (simplified))
3014 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3015 /* Stores or copies from SSA_NAMEs that are live over
3016 abnormal edges are a problem. */
3017 || (gimple_assign_single_p (stmt)
3018 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
3019 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt))))
3020 changed = defs_to_varying (stmt);
3021 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
3023 changed = visit_reference_op_store (lhs, gimple_assign_rhs1 (stmt), stmt);
3025 else if (TREE_CODE (lhs) == SSA_NAME)
3027 if ((gimple_assign_copy_p (stmt)
3028 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
3029 || (simplified
3030 && is_gimple_min_invariant (simplified)))
3032 VN_INFO (lhs)->has_constants = true;
3033 if (simplified)
3034 changed = set_ssa_val_to (lhs, simplified);
3035 else
3036 changed = set_ssa_val_to (lhs, gimple_assign_rhs1 (stmt));
3038 else
3040 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
3042 case GIMPLE_UNARY_RHS:
3043 case GIMPLE_BINARY_RHS:
3044 case GIMPLE_TERNARY_RHS:
3045 changed = visit_nary_op (lhs, stmt);
3046 break;
3047 case GIMPLE_SINGLE_RHS:
3048 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
3050 case tcc_reference:
3051 /* VOP-less references can go through unary case. */
3052 if ((gimple_assign_rhs_code (stmt) == REALPART_EXPR
3053 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
3054 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
3055 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (stmt), 0)) == SSA_NAME)
3057 changed = visit_nary_op (lhs, stmt);
3058 break;
3060 /* Fallthrough. */
3061 case tcc_declaration:
3062 changed = visit_reference_op_load
3063 (lhs, gimple_assign_rhs1 (stmt), stmt);
3064 break;
3065 case tcc_expression:
3066 if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
3068 changed = visit_nary_op (lhs, stmt);
3069 break;
3071 /* Fallthrough. */
3072 default:
3073 changed = defs_to_varying (stmt);
3075 break;
3076 default:
3077 changed = defs_to_varying (stmt);
3078 break;
3082 else
3083 changed = defs_to_varying (stmt);
3085 else if (is_gimple_call (stmt))
3087 tree lhs = gimple_call_lhs (stmt);
3089 /* ??? We could try to simplify calls. */
3091 if (stmt_has_constants (stmt)
3092 && TREE_CODE (lhs) == SSA_NAME)
3093 VN_INFO (lhs)->has_constants = true;
3094 else if (TREE_CODE (lhs) == SSA_NAME)
3096 /* We reset expr and constantness here because we may
3097 have been value numbering optimistically, and
3098 iterating. They may become non-constant in this case,
3099 even if they were optimistically constant. */
3100 VN_INFO (lhs)->has_constants = false;
3101 VN_INFO (lhs)->expr = NULL_TREE;
3104 if (TREE_CODE (lhs) == SSA_NAME
3105 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3106 changed = defs_to_varying (stmt);
3107 /* ??? We should handle stores from calls. */
3108 else if (TREE_CODE (lhs) == SSA_NAME)
3110 if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
3111 changed = visit_reference_op_call (lhs, stmt);
3112 else
3113 changed = defs_to_varying (stmt);
3115 else
3116 changed = defs_to_varying (stmt);
3119 done:
3120 return changed;
3123 /* Compare two operands by reverse postorder index */
3125 static int
3126 compare_ops (const void *pa, const void *pb)
3128 const tree opa = *((const tree *)pa);
3129 const tree opb = *((const tree *)pb);
3130 gimple opstmta = SSA_NAME_DEF_STMT (opa);
3131 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
3132 basic_block bba;
3133 basic_block bbb;
3135 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3136 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3137 else if (gimple_nop_p (opstmta))
3138 return -1;
3139 else if (gimple_nop_p (opstmtb))
3140 return 1;
3142 bba = gimple_bb (opstmta);
3143 bbb = gimple_bb (opstmtb);
3145 if (!bba && !bbb)
3146 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3147 else if (!bba)
3148 return -1;
3149 else if (!bbb)
3150 return 1;
3152 if (bba == bbb)
3154 if (gimple_code (opstmta) == GIMPLE_PHI
3155 && gimple_code (opstmtb) == GIMPLE_PHI)
3156 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3157 else if (gimple_code (opstmta) == GIMPLE_PHI)
3158 return -1;
3159 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3160 return 1;
3161 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3162 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3163 else
3164 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3166 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3169 /* Sort an array containing members of a strongly connected component
3170 SCC so that the members are ordered by RPO number.
3171 This means that when the sort is complete, iterating through the
3172 array will give you the members in RPO order. */
3174 static void
3175 sort_scc (VEC (tree, heap) *scc)
3177 VEC_qsort (tree, scc, compare_ops);
3180 /* Insert the no longer used nary ONARY to the hash INFO. */
3182 static void
3183 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3185 size_t size = sizeof_vn_nary_op (onary->length);
3186 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3187 &info->nary_obstack);
3188 memcpy (nary, onary, size);
3189 vn_nary_op_insert_into (nary, info->nary, false);
3192 /* Insert the no longer used phi OPHI to the hash INFO. */
3194 static void
3195 copy_phi (vn_phi_t ophi, vn_tables_t info)
3197 vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
3198 void **slot;
3199 memcpy (phi, ophi, sizeof (*phi));
3200 ophi->phiargs = NULL;
3201 slot = htab_find_slot_with_hash (info->phis, phi, phi->hashcode, INSERT);
3202 gcc_assert (!*slot);
3203 *slot = phi;
3206 /* Insert the no longer used reference OREF to the hash INFO. */
3208 static void
3209 copy_reference (vn_reference_t oref, vn_tables_t info)
3211 vn_reference_t ref;
3212 void **slot;
3213 ref = (vn_reference_t) pool_alloc (info->references_pool);
3214 memcpy (ref, oref, sizeof (*ref));
3215 oref->operands = NULL;
3216 slot = htab_find_slot_with_hash (info->references, ref, ref->hashcode,
3217 INSERT);
3218 if (*slot)
3219 free_reference (*slot);
3220 *slot = ref;
3223 /* Process a strongly connected component in the SSA graph. */
3225 static void
3226 process_scc (VEC (tree, heap) *scc)
3228 tree var;
3229 unsigned int i;
3230 unsigned int iterations = 0;
3231 bool changed = true;
3232 htab_iterator hi;
3233 vn_nary_op_t nary;
3234 vn_phi_t phi;
3235 vn_reference_t ref;
3237 /* If the SCC has a single member, just visit it. */
3238 if (VEC_length (tree, scc) == 1)
3240 tree use = VEC_index (tree, scc, 0);
3241 if (VN_INFO (use)->use_processed)
3242 return;
3243 /* We need to make sure it doesn't form a cycle itself, which can
3244 happen for self-referential PHI nodes. In that case we would
3245 end up inserting an expression with VN_TOP operands into the
3246 valid table which makes us derive bogus equivalences later.
3247 The cheapest way to check this is to assume it for all PHI nodes. */
3248 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3249 /* Fallthru to iteration. */ ;
3250 else
3252 visit_use (use);
3253 return;
3257 /* Iterate over the SCC with the optimistic table until it stops
3258 changing. */
3259 current_info = optimistic_info;
3260 while (changed)
3262 changed = false;
3263 iterations++;
3264 /* As we are value-numbering optimistically we have to
3265 clear the expression tables and the simplified expressions
3266 in each iteration until we converge. */
3267 htab_empty (optimistic_info->nary);
3268 htab_empty (optimistic_info->phis);
3269 htab_empty (optimistic_info->references);
3270 obstack_free (&optimistic_info->nary_obstack, NULL);
3271 gcc_obstack_init (&optimistic_info->nary_obstack);
3272 empty_alloc_pool (optimistic_info->phis_pool);
3273 empty_alloc_pool (optimistic_info->references_pool);
3274 FOR_EACH_VEC_ELT (tree, scc, i, var)
3275 VN_INFO (var)->expr = NULL_TREE;
3276 FOR_EACH_VEC_ELT (tree, scc, i, var)
3277 changed |= visit_use (var);
3280 statistics_histogram_event (cfun, "SCC iterations", iterations);
3282 /* Finally, copy the contents of the no longer used optimistic
3283 table to the valid table. */
3284 FOR_EACH_HTAB_ELEMENT (optimistic_info->nary, nary, vn_nary_op_t, hi)
3285 copy_nary (nary, valid_info);
3286 FOR_EACH_HTAB_ELEMENT (optimistic_info->phis, phi, vn_phi_t, hi)
3287 copy_phi (phi, valid_info);
3288 FOR_EACH_HTAB_ELEMENT (optimistic_info->references, ref, vn_reference_t, hi)
3289 copy_reference (ref, valid_info);
3291 current_info = valid_info;
3294 DEF_VEC_O(ssa_op_iter);
3295 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
3297 /* Pop the components of the found SCC for NAME off the SCC stack
3298 and process them. Returns true if all went well, false if
3299 we run into resource limits. */
3301 static bool
3302 extract_and_process_scc_for_name (tree name)
3304 VEC (tree, heap) *scc = NULL;
3305 tree x;
3307 /* Found an SCC, pop the components off the SCC stack and
3308 process them. */
3311 x = VEC_pop (tree, sccstack);
3313 VN_INFO (x)->on_sccstack = false;
3314 VEC_safe_push (tree, heap, scc, x);
3315 } while (x != name);
3317 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
3318 if (VEC_length (tree, scc)
3319 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
3321 if (dump_file)
3322 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
3323 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
3324 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
3325 return false;
3328 if (VEC_length (tree, scc) > 1)
3329 sort_scc (scc);
3331 if (dump_file && (dump_flags & TDF_DETAILS))
3332 print_scc (dump_file, scc);
3334 process_scc (scc);
3336 VEC_free (tree, heap, scc);
3338 return true;
3341 /* Depth first search on NAME to discover and process SCC's in the SSA
3342 graph.
3343 Execution of this algorithm relies on the fact that the SCC's are
3344 popped off the stack in topological order.
3345 Returns true if successful, false if we stopped processing SCC's due
3346 to resource constraints. */
3348 static bool
3349 DFS (tree name)
3351 VEC(ssa_op_iter, heap) *itervec = NULL;
3352 VEC(tree, heap) *namevec = NULL;
3353 use_operand_p usep = NULL;
3354 gimple defstmt;
3355 tree use;
3356 ssa_op_iter iter;
3358 start_over:
3359 /* SCC info */
3360 VN_INFO (name)->dfsnum = next_dfs_num++;
3361 VN_INFO (name)->visited = true;
3362 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
3364 VEC_safe_push (tree, heap, sccstack, name);
3365 VN_INFO (name)->on_sccstack = true;
3366 defstmt = SSA_NAME_DEF_STMT (name);
3368 /* Recursively DFS on our operands, looking for SCC's. */
3369 if (!gimple_nop_p (defstmt))
3371 /* Push a new iterator. */
3372 if (gimple_code (defstmt) == GIMPLE_PHI)
3373 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
3374 else
3375 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
3377 else
3378 clear_and_done_ssa_iter (&iter);
3380 while (1)
3382 /* If we are done processing uses of a name, go up the stack
3383 of iterators and process SCCs as we found them. */
3384 if (op_iter_done (&iter))
3386 /* See if we found an SCC. */
3387 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
3388 if (!extract_and_process_scc_for_name (name))
3390 VEC_free (tree, heap, namevec);
3391 VEC_free (ssa_op_iter, heap, itervec);
3392 return false;
3395 /* Check if we are done. */
3396 if (VEC_empty (tree, namevec))
3398 VEC_free (tree, heap, namevec);
3399 VEC_free (ssa_op_iter, heap, itervec);
3400 return true;
3403 /* Restore the last use walker and continue walking there. */
3404 use = name;
3405 name = VEC_pop (tree, namevec);
3406 memcpy (&iter, VEC_last (ssa_op_iter, itervec),
3407 sizeof (ssa_op_iter));
3408 VEC_pop (ssa_op_iter, itervec);
3409 goto continue_walking;
3412 use = USE_FROM_PTR (usep);
3414 /* Since we handle phi nodes, we will sometimes get
3415 invariants in the use expression. */
3416 if (TREE_CODE (use) == SSA_NAME)
3418 if (! (VN_INFO (use)->visited))
3420 /* Recurse by pushing the current use walking state on
3421 the stack and starting over. */
3422 VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
3423 VEC_safe_push(tree, heap, namevec, name);
3424 name = use;
3425 goto start_over;
3427 continue_walking:
3428 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3429 VN_INFO (use)->low);
3431 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3432 && VN_INFO (use)->on_sccstack)
3434 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3435 VN_INFO (name)->low);
3439 usep = op_iter_next_use (&iter);
3443 /* Allocate a value number table. */
3445 static void
3446 allocate_vn_table (vn_tables_t table)
3448 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
3449 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
3450 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
3451 free_reference);
3453 gcc_obstack_init (&table->nary_obstack);
3454 table->phis_pool = create_alloc_pool ("VN phis",
3455 sizeof (struct vn_phi_s),
3456 30);
3457 table->references_pool = create_alloc_pool ("VN references",
3458 sizeof (struct vn_reference_s),
3459 30);
3462 /* Free a value number table. */
3464 static void
3465 free_vn_table (vn_tables_t table)
3467 htab_delete (table->phis);
3468 htab_delete (table->nary);
3469 htab_delete (table->references);
3470 obstack_free (&table->nary_obstack, NULL);
3471 free_alloc_pool (table->phis_pool);
3472 free_alloc_pool (table->references_pool);
3475 static void
3476 init_scc_vn (void)
3478 size_t i;
3479 int j;
3480 int *rpo_numbers_temp;
3482 calculate_dominance_info (CDI_DOMINATORS);
3483 sccstack = NULL;
3484 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
3485 free);
3487 constant_value_ids = BITMAP_ALLOC (NULL);
3489 next_dfs_num = 1;
3490 next_value_id = 1;
3492 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
3493 /* VEC_alloc doesn't actually grow it to the right size, it just
3494 preallocates the space to do so. */
3495 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
3496 gcc_obstack_init (&vn_ssa_aux_obstack);
3498 shared_lookup_phiargs = NULL;
3499 shared_lookup_references = NULL;
3500 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3501 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3502 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
3504 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3505 the i'th block in RPO order is bb. We want to map bb's to RPO
3506 numbers, so we need to rearrange this array. */
3507 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
3508 rpo_numbers[rpo_numbers_temp[j]] = j;
3510 XDELETE (rpo_numbers_temp);
3512 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
3514 /* Create the VN_INFO structures, and initialize value numbers to
3515 TOP. */
3516 for (i = 0; i < num_ssa_names; i++)
3518 tree name = ssa_name (i);
3519 if (name)
3521 VN_INFO_GET (name)->valnum = VN_TOP;
3522 VN_INFO (name)->expr = NULL_TREE;
3523 VN_INFO (name)->value_id = 0;
3527 renumber_gimple_stmt_uids ();
3529 /* Create the valid and optimistic value numbering tables. */
3530 valid_info = XCNEW (struct vn_tables_s);
3531 allocate_vn_table (valid_info);
3532 optimistic_info = XCNEW (struct vn_tables_s);
3533 allocate_vn_table (optimistic_info);
3536 void
3537 free_scc_vn (void)
3539 size_t i;
3541 htab_delete (constant_to_value_id);
3542 BITMAP_FREE (constant_value_ids);
3543 VEC_free (tree, heap, shared_lookup_phiargs);
3544 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
3545 XDELETEVEC (rpo_numbers);
3547 for (i = 0; i < num_ssa_names; i++)
3549 tree name = ssa_name (i);
3550 if (name
3551 && VN_INFO (name)->needs_insertion)
3552 release_ssa_name (name);
3554 obstack_free (&vn_ssa_aux_obstack, NULL);
3555 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
3557 VEC_free (tree, heap, sccstack);
3558 free_vn_table (valid_info);
3559 XDELETE (valid_info);
3560 free_vn_table (optimistic_info);
3561 XDELETE (optimistic_info);
3564 /* Set *ID if we computed something useful in RESULT. */
3566 static void
3567 set_value_id_for_result (tree result, unsigned int *id)
3569 if (result)
3571 if (TREE_CODE (result) == SSA_NAME)
3572 *id = VN_INFO (result)->value_id;
3573 else if (is_gimple_min_invariant (result))
3574 *id = get_or_alloc_constant_value_id (result);
3578 /* Set the value ids in the valid hash tables. */
3580 static void
3581 set_hashtable_value_ids (void)
3583 htab_iterator hi;
3584 vn_nary_op_t vno;
3585 vn_reference_t vr;
3586 vn_phi_t vp;
3588 /* Now set the value ids of the things we had put in the hash
3589 table. */
3591 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
3592 vno, vn_nary_op_t, hi)
3593 set_value_id_for_result (vno->result, &vno->value_id);
3595 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
3596 vp, vn_phi_t, hi)
3597 set_value_id_for_result (vp->result, &vp->value_id);
3599 FOR_EACH_HTAB_ELEMENT (valid_info->references,
3600 vr, vn_reference_t, hi)
3601 set_value_id_for_result (vr->result, &vr->value_id);
3604 /* Do SCCVN. Returns true if it finished, false if we bailed out
3605 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
3606 how we use the alias oracle walking during the VN process. */
3608 bool
3609 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
3611 size_t i;
3612 tree param;
3613 bool changed = true;
3615 default_vn_walk_kind = default_vn_walk_kind_;
3617 init_scc_vn ();
3618 current_info = valid_info;
3620 for (param = DECL_ARGUMENTS (current_function_decl);
3621 param;
3622 param = DECL_CHAIN (param))
3624 if (gimple_default_def (cfun, param) != NULL)
3626 tree def = gimple_default_def (cfun, param);
3627 VN_INFO (def)->valnum = def;
3631 for (i = 1; i < num_ssa_names; ++i)
3633 tree name = ssa_name (i);
3634 if (name
3635 && VN_INFO (name)->visited == false
3636 && !has_zero_uses (name))
3637 if (!DFS (name))
3639 free_scc_vn ();
3640 return false;
3644 /* Initialize the value ids. */
3646 for (i = 1; i < num_ssa_names; ++i)
3648 tree name = ssa_name (i);
3649 vn_ssa_aux_t info;
3650 if (!name)
3651 continue;
3652 info = VN_INFO (name);
3653 if (info->valnum == name
3654 || info->valnum == VN_TOP)
3655 info->value_id = get_next_value_id ();
3656 else if (is_gimple_min_invariant (info->valnum))
3657 info->value_id = get_or_alloc_constant_value_id (info->valnum);
3660 /* Propagate until they stop changing. */
3661 while (changed)
3663 changed = false;
3664 for (i = 1; i < num_ssa_names; ++i)
3666 tree name = ssa_name (i);
3667 vn_ssa_aux_t info;
3668 if (!name)
3669 continue;
3670 info = VN_INFO (name);
3671 if (TREE_CODE (info->valnum) == SSA_NAME
3672 && info->valnum != name
3673 && info->value_id != VN_INFO (info->valnum)->value_id)
3675 changed = true;
3676 info->value_id = VN_INFO (info->valnum)->value_id;
3681 set_hashtable_value_ids ();
3683 if (dump_file && (dump_flags & TDF_DETAILS))
3685 fprintf (dump_file, "Value numbers:\n");
3686 for (i = 0; i < num_ssa_names; i++)
3688 tree name = ssa_name (i);
3689 if (name
3690 && VN_INFO (name)->visited
3691 && SSA_VAL (name) != name)
3693 print_generic_expr (dump_file, name, 0);
3694 fprintf (dump_file, " = ");
3695 print_generic_expr (dump_file, SSA_VAL (name), 0);
3696 fprintf (dump_file, "\n");
3701 return true;
3704 /* Return the maximum value id we have ever seen. */
3706 unsigned int
3707 get_max_value_id (void)
3709 return next_value_id;
3712 /* Return the next unique value id. */
3714 unsigned int
3715 get_next_value_id (void)
3717 return next_value_id++;
3721 /* Compare two expressions E1 and E2 and return true if they are equal. */
3723 bool
3724 expressions_equal_p (tree e1, tree e2)
3726 /* The obvious case. */
3727 if (e1 == e2)
3728 return true;
3730 /* If only one of them is null, they cannot be equal. */
3731 if (!e1 || !e2)
3732 return false;
3734 /* Now perform the actual comparison. */
3735 if (TREE_CODE (e1) == TREE_CODE (e2)
3736 && operand_equal_p (e1, e2, OEP_PURE_SAME))
3737 return true;
3739 return false;
3743 /* Return true if the nary operation NARY may trap. This is a copy
3744 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
3746 bool
3747 vn_nary_may_trap (vn_nary_op_t nary)
3749 tree type;
3750 tree rhs2 = NULL_TREE;
3751 bool honor_nans = false;
3752 bool honor_snans = false;
3753 bool fp_operation = false;
3754 bool honor_trapv = false;
3755 bool handled, ret;
3756 unsigned i;
3758 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
3759 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
3760 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
3762 type = nary->type;
3763 fp_operation = FLOAT_TYPE_P (type);
3764 if (fp_operation)
3766 honor_nans = flag_trapping_math && !flag_finite_math_only;
3767 honor_snans = flag_signaling_nans != 0;
3769 else if (INTEGRAL_TYPE_P (type)
3770 && TYPE_OVERFLOW_TRAPS (type))
3771 honor_trapv = true;
3773 if (nary->length >= 2)
3774 rhs2 = nary->op[1];
3775 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
3776 honor_trapv,
3777 honor_nans, honor_snans, rhs2,
3778 &handled);
3779 if (handled
3780 && ret)
3781 return true;
3783 for (i = 0; i < nary->length; ++i)
3784 if (tree_could_trap_p (nary->op[i]))
3785 return true;
3787 return false;