Mark ChangeLog
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blobb2afbcdfea06ed5406d3e3032ec8652471c372ef
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2013 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "basic-block.h"
27 #include "gimple-pretty-print.h"
28 #include "tree-inline.h"
29 #include "tree-flow.h"
30 #include "gimple.h"
31 #include "dumpfile.h"
32 #include "hashtab.h"
33 #include "alloc-pool.h"
34 #include "flags.h"
35 #include "bitmap.h"
36 #include "cfgloop.h"
37 #include "params.h"
38 #include "tree-ssa-propagate.h"
39 #include "tree-ssa-sccvn.h"
40 #include "gimple-fold.h"
42 /* This algorithm is based on the SCC algorithm presented by Keith
43 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
44 (http://citeseer.ist.psu.edu/41805.html). In
45 straight line code, it is equivalent to a regular hash based value
46 numbering that is performed in reverse postorder.
48 For code with cycles, there are two alternatives, both of which
49 require keeping the hashtables separate from the actual list of
50 value numbers for SSA names.
52 1. Iterate value numbering in an RPO walk of the blocks, removing
53 all the entries from the hashtable after each iteration (but
54 keeping the SSA name->value number mapping between iterations).
55 Iterate until it does not change.
57 2. Perform value numbering as part of an SCC walk on the SSA graph,
58 iterating only the cycles in the SSA graph until they do not change
59 (using a separate, optimistic hashtable for value numbering the SCC
60 operands).
62 The second is not just faster in practice (because most SSA graph
63 cycles do not involve all the variables in the graph), it also has
64 some nice properties.
66 One of these nice properties is that when we pop an SCC off the
67 stack, we are guaranteed to have processed all the operands coming from
68 *outside of that SCC*, so we do not need to do anything special to
69 ensure they have value numbers.
71 Another nice property is that the SCC walk is done as part of a DFS
72 of the SSA graph, which makes it easy to perform combining and
73 simplifying operations at the same time.
75 The code below is deliberately written in a way that makes it easy
76 to separate the SCC walk from the other work it does.
78 In order to propagate constants through the code, we track which
79 expressions contain constants, and use those while folding. In
80 theory, we could also track expressions whose value numbers are
81 replaced, in case we end up folding based on expression
82 identities.
84 In order to value number memory, we assign value numbers to vuses.
85 This enables us to note that, for example, stores to the same
86 address of the same value from the same starting memory states are
87 equivalent.
88 TODO:
90 1. We can iterate only the changing portions of the SCC's, but
91 I have not seen an SCC big enough for this to be a win.
92 2. If you differentiate between phi nodes for loops and phi nodes
93 for if-then-else, you can properly consider phi nodes in different
94 blocks for equivalence.
95 3. We could value number vuses in more cases, particularly, whole
96 structure copies.
99 /* The set of hashtables and alloc_pool's for their items. */
101 typedef struct vn_tables_s
103 htab_t nary;
104 htab_t phis;
105 htab_t references;
106 struct obstack nary_obstack;
107 alloc_pool phis_pool;
108 alloc_pool references_pool;
109 } *vn_tables_t;
111 static htab_t constant_to_value_id;
112 static bitmap constant_value_ids;
115 /* Valid hashtables storing information we have proven to be
116 correct. */
118 static vn_tables_t valid_info;
120 /* Optimistic hashtables storing information we are making assumptions about
121 during iterations. */
123 static vn_tables_t optimistic_info;
125 /* Pointer to the set of hashtables that is currently being used.
126 Should always point to either the optimistic_info, or the
127 valid_info. */
129 static vn_tables_t current_info;
132 /* Reverse post order index for each basic block. */
134 static int *rpo_numbers;
136 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
138 /* This represents the top of the VN lattice, which is the universal
139 value. */
141 tree VN_TOP;
143 /* Unique counter for our value ids. */
145 static unsigned int next_value_id;
147 /* Next DFS number and the stack for strongly connected component
148 detection. */
150 static unsigned int next_dfs_num;
151 static vec<tree> sccstack;
155 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
156 are allocated on an obstack for locality reasons, and to free them
157 without looping over the vec. */
159 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
160 static struct obstack vn_ssa_aux_obstack;
162 /* Return the value numbering information for a given SSA name. */
164 vn_ssa_aux_t
165 VN_INFO (tree name)
167 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
168 gcc_checking_assert (res);
169 return res;
172 /* Set the value numbering info for a given SSA name to a given
173 value. */
175 static inline void
176 VN_INFO_SET (tree name, vn_ssa_aux_t value)
178 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
181 /* Initialize the value numbering info for a given SSA name.
182 This should be called just once for every SSA name. */
184 vn_ssa_aux_t
185 VN_INFO_GET (tree name)
187 vn_ssa_aux_t newinfo;
189 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
190 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
191 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
192 vn_ssa_aux_table.safe_grow (SSA_NAME_VERSION (name) + 1);
193 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
194 return newinfo;
198 /* Get the representative expression for the SSA_NAME NAME. Returns
199 the representative SSA_NAME if there is no expression associated with it. */
201 tree
202 vn_get_expr_for (tree name)
204 vn_ssa_aux_t vn = VN_INFO (name);
205 gimple def_stmt;
206 tree expr = NULL_TREE;
207 enum tree_code code;
209 if (vn->valnum == VN_TOP)
210 return name;
212 /* If the value-number is a constant it is the representative
213 expression. */
214 if (TREE_CODE (vn->valnum) != SSA_NAME)
215 return vn->valnum;
217 /* Get to the information of the value of this SSA_NAME. */
218 vn = VN_INFO (vn->valnum);
220 /* If the value-number is a constant it is the representative
221 expression. */
222 if (TREE_CODE (vn->valnum) != SSA_NAME)
223 return vn->valnum;
225 /* Else if we have an expression, return it. */
226 if (vn->expr != NULL_TREE)
227 return vn->expr;
229 /* Otherwise use the defining statement to build the expression. */
230 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
232 /* If the value number is not an assignment use it directly. */
233 if (!is_gimple_assign (def_stmt))
234 return vn->valnum;
236 /* FIXME tuples. This is incomplete and likely will miss some
237 simplifications. */
238 code = gimple_assign_rhs_code (def_stmt);
239 switch (TREE_CODE_CLASS (code))
241 case tcc_reference:
242 if ((code == REALPART_EXPR
243 || code == IMAGPART_EXPR
244 || code == VIEW_CONVERT_EXPR)
245 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (def_stmt),
246 0)) == SSA_NAME)
247 expr = fold_build1 (code,
248 gimple_expr_type (def_stmt),
249 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
250 break;
252 case tcc_unary:
253 expr = fold_build1 (code,
254 gimple_expr_type (def_stmt),
255 gimple_assign_rhs1 (def_stmt));
256 break;
258 case tcc_binary:
259 expr = fold_build2 (code,
260 gimple_expr_type (def_stmt),
261 gimple_assign_rhs1 (def_stmt),
262 gimple_assign_rhs2 (def_stmt));
263 break;
265 case tcc_exceptional:
266 if (code == CONSTRUCTOR
267 && TREE_CODE
268 (TREE_TYPE (gimple_assign_rhs1 (def_stmt))) == VECTOR_TYPE)
269 expr = gimple_assign_rhs1 (def_stmt);
270 break;
272 default:;
274 if (expr == NULL_TREE)
275 return vn->valnum;
277 /* Cache the expression. */
278 vn->expr = expr;
280 return expr;
283 /* Return the vn_kind the expression computed by the stmt should be
284 associated with. */
286 enum vn_kind
287 vn_get_stmt_kind (gimple stmt)
289 switch (gimple_code (stmt))
291 case GIMPLE_CALL:
292 return VN_REFERENCE;
293 case GIMPLE_PHI:
294 return VN_PHI;
295 case GIMPLE_ASSIGN:
297 enum tree_code code = gimple_assign_rhs_code (stmt);
298 tree rhs1 = gimple_assign_rhs1 (stmt);
299 switch (get_gimple_rhs_class (code))
301 case GIMPLE_UNARY_RHS:
302 case GIMPLE_BINARY_RHS:
303 case GIMPLE_TERNARY_RHS:
304 return VN_NARY;
305 case GIMPLE_SINGLE_RHS:
306 switch (TREE_CODE_CLASS (code))
308 case tcc_reference:
309 /* VOP-less references can go through unary case. */
310 if ((code == REALPART_EXPR
311 || code == IMAGPART_EXPR
312 || code == VIEW_CONVERT_EXPR
313 || code == BIT_FIELD_REF)
314 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
315 return VN_NARY;
317 /* Fallthrough. */
318 case tcc_declaration:
319 return VN_REFERENCE;
321 case tcc_constant:
322 return VN_CONSTANT;
324 default:
325 if (code == ADDR_EXPR)
326 return (is_gimple_min_invariant (rhs1)
327 ? VN_CONSTANT : VN_REFERENCE);
328 else if (code == CONSTRUCTOR)
329 return VN_NARY;
330 return VN_NONE;
332 default:
333 return VN_NONE;
336 default:
337 return VN_NONE;
341 /* Free a phi operation structure VP. */
343 static void
344 free_phi (void *vp)
346 vn_phi_t phi = (vn_phi_t) vp;
347 phi->phiargs.release ();
350 /* Free a reference operation structure VP. */
352 static void
353 free_reference (void *vp)
355 vn_reference_t vr = (vn_reference_t) vp;
356 vr->operands.release ();
359 /* Hash table equality function for vn_constant_t. */
361 static int
362 vn_constant_eq (const void *p1, const void *p2)
364 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
365 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
367 if (vc1->hashcode != vc2->hashcode)
368 return false;
370 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
373 /* Hash table hash function for vn_constant_t. */
375 static hashval_t
376 vn_constant_hash (const void *p1)
378 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
379 return vc1->hashcode;
382 /* Lookup a value id for CONSTANT and return it. If it does not
383 exist returns 0. */
385 unsigned int
386 get_constant_value_id (tree constant)
388 void **slot;
389 struct vn_constant_s vc;
391 vc.hashcode = vn_hash_constant_with_type (constant);
392 vc.constant = constant;
393 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
394 vc.hashcode, NO_INSERT);
395 if (slot)
396 return ((vn_constant_t)*slot)->value_id;
397 return 0;
400 /* Lookup a value id for CONSTANT, and if it does not exist, create a
401 new one and return it. If it does exist, return it. */
403 unsigned int
404 get_or_alloc_constant_value_id (tree constant)
406 void **slot;
407 struct vn_constant_s vc;
408 vn_constant_t vcp;
410 vc.hashcode = vn_hash_constant_with_type (constant);
411 vc.constant = constant;
412 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
413 vc.hashcode, INSERT);
414 if (*slot)
415 return ((vn_constant_t)*slot)->value_id;
417 vcp = XNEW (struct vn_constant_s);
418 vcp->hashcode = vc.hashcode;
419 vcp->constant = constant;
420 vcp->value_id = get_next_value_id ();
421 *slot = (void *) vcp;
422 bitmap_set_bit (constant_value_ids, vcp->value_id);
423 return vcp->value_id;
426 /* Return true if V is a value id for a constant. */
428 bool
429 value_id_constant_p (unsigned int v)
431 return bitmap_bit_p (constant_value_ids, v);
434 /* Compare two reference operands P1 and P2 for equality. Return true if
435 they are equal, and false otherwise. */
437 static int
438 vn_reference_op_eq (const void *p1, const void *p2)
440 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
441 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
443 return (vro1->opcode == vro2->opcode
444 /* We do not care for differences in type qualification. */
445 && (vro1->type == vro2->type
446 || (vro1->type && vro2->type
447 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
448 TYPE_MAIN_VARIANT (vro2->type))))
449 && expressions_equal_p (vro1->op0, vro2->op0)
450 && expressions_equal_p (vro1->op1, vro2->op1)
451 && expressions_equal_p (vro1->op2, vro2->op2));
454 /* Compute the hash for a reference operand VRO1. */
456 static hashval_t
457 vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result)
459 result = iterative_hash_hashval_t (vro1->opcode, result);
460 if (vro1->op0)
461 result = iterative_hash_expr (vro1->op0, result);
462 if (vro1->op1)
463 result = iterative_hash_expr (vro1->op1, result);
464 if (vro1->op2)
465 result = iterative_hash_expr (vro1->op2, result);
466 return result;
469 /* Return the hashcode for a given reference operation P1. */
471 static hashval_t
472 vn_reference_hash (const void *p1)
474 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
475 return vr1->hashcode;
478 /* Compute a hash for the reference operation VR1 and return it. */
480 hashval_t
481 vn_reference_compute_hash (const vn_reference_t vr1)
483 hashval_t result = 0;
484 int i;
485 vn_reference_op_t vro;
486 HOST_WIDE_INT off = -1;
487 bool deref = false;
489 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
491 if (vro->opcode == MEM_REF)
492 deref = true;
493 else if (vro->opcode != ADDR_EXPR)
494 deref = false;
495 if (vro->off != -1)
497 if (off == -1)
498 off = 0;
499 off += vro->off;
501 else
503 if (off != -1
504 && off != 0)
505 result = iterative_hash_hashval_t (off, result);
506 off = -1;
507 if (deref
508 && vro->opcode == ADDR_EXPR)
510 if (vro->op0)
512 tree op = TREE_OPERAND (vro->op0, 0);
513 result = iterative_hash_hashval_t (TREE_CODE (op), result);
514 result = iterative_hash_expr (op, result);
517 else
518 result = vn_reference_op_compute_hash (vro, result);
521 if (vr1->vuse)
522 result += SSA_NAME_VERSION (vr1->vuse);
524 return result;
527 /* Return true if reference operations P1 and P2 are equivalent. This
528 means they have the same set of operands and vuses. */
531 vn_reference_eq (const void *p1, const void *p2)
533 unsigned i, j;
535 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
536 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
537 if (vr1->hashcode != vr2->hashcode)
538 return false;
540 /* Early out if this is not a hash collision. */
541 if (vr1->hashcode != vr2->hashcode)
542 return false;
544 /* The VOP needs to be the same. */
545 if (vr1->vuse != vr2->vuse)
546 return false;
548 /* If the operands are the same we are done. */
549 if (vr1->operands == vr2->operands)
550 return true;
552 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
553 return false;
555 if (INTEGRAL_TYPE_P (vr1->type)
556 && INTEGRAL_TYPE_P (vr2->type))
558 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
559 return false;
561 else if (INTEGRAL_TYPE_P (vr1->type)
562 && (TYPE_PRECISION (vr1->type)
563 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
564 return false;
565 else if (INTEGRAL_TYPE_P (vr2->type)
566 && (TYPE_PRECISION (vr2->type)
567 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
568 return false;
570 i = 0;
571 j = 0;
574 HOST_WIDE_INT off1 = 0, off2 = 0;
575 vn_reference_op_t vro1, vro2;
576 vn_reference_op_s tem1, tem2;
577 bool deref1 = false, deref2 = false;
578 for (; vr1->operands.iterate (i, &vro1); i++)
580 if (vro1->opcode == MEM_REF)
581 deref1 = true;
582 if (vro1->off == -1)
583 break;
584 off1 += vro1->off;
586 for (; vr2->operands.iterate (j, &vro2); j++)
588 if (vro2->opcode == MEM_REF)
589 deref2 = true;
590 if (vro2->off == -1)
591 break;
592 off2 += vro2->off;
594 if (off1 != off2)
595 return false;
596 if (deref1 && vro1->opcode == ADDR_EXPR)
598 memset (&tem1, 0, sizeof (tem1));
599 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
600 tem1.type = TREE_TYPE (tem1.op0);
601 tem1.opcode = TREE_CODE (tem1.op0);
602 vro1 = &tem1;
603 deref1 = false;
605 if (deref2 && vro2->opcode == ADDR_EXPR)
607 memset (&tem2, 0, sizeof (tem2));
608 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
609 tem2.type = TREE_TYPE (tem2.op0);
610 tem2.opcode = TREE_CODE (tem2.op0);
611 vro2 = &tem2;
612 deref2 = false;
614 if (deref1 != deref2)
615 return false;
616 if (!vn_reference_op_eq (vro1, vro2))
617 return false;
618 ++j;
619 ++i;
621 while (vr1->operands.length () != i
622 || vr2->operands.length () != j);
624 return true;
627 /* Copy the operations present in load/store REF into RESULT, a vector of
628 vn_reference_op_s's. */
630 void
631 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
633 if (TREE_CODE (ref) == TARGET_MEM_REF)
635 vn_reference_op_s temp;
637 memset (&temp, 0, sizeof (temp));
638 temp.type = TREE_TYPE (ref);
639 temp.opcode = TREE_CODE (ref);
640 temp.op0 = TMR_INDEX (ref);
641 temp.op1 = TMR_STEP (ref);
642 temp.op2 = TMR_OFFSET (ref);
643 temp.off = -1;
644 result->safe_push (temp);
646 memset (&temp, 0, sizeof (temp));
647 temp.type = NULL_TREE;
648 temp.opcode = ERROR_MARK;
649 temp.op0 = TMR_INDEX2 (ref);
650 temp.off = -1;
651 result->safe_push (temp);
653 memset (&temp, 0, sizeof (temp));
654 temp.type = NULL_TREE;
655 temp.opcode = TREE_CODE (TMR_BASE (ref));
656 temp.op0 = TMR_BASE (ref);
657 temp.off = -1;
658 result->safe_push (temp);
659 return;
662 /* For non-calls, store the information that makes up the address. */
663 tree orig = ref;
664 while (ref)
666 vn_reference_op_s temp;
668 memset (&temp, 0, sizeof (temp));
669 temp.type = TREE_TYPE (ref);
670 temp.opcode = TREE_CODE (ref);
671 temp.off = -1;
673 switch (temp.opcode)
675 case MODIFY_EXPR:
676 temp.op0 = TREE_OPERAND (ref, 1);
677 break;
678 case WITH_SIZE_EXPR:
679 temp.op0 = TREE_OPERAND (ref, 1);
680 temp.off = 0;
681 break;
682 case MEM_REF:
683 /* The base address gets its own vn_reference_op_s structure. */
684 temp.op0 = TREE_OPERAND (ref, 1);
685 if (host_integerp (TREE_OPERAND (ref, 1), 0))
686 temp.off = TREE_INT_CST_LOW (TREE_OPERAND (ref, 1));
687 break;
688 case BIT_FIELD_REF:
689 /* Record bits and position. */
690 temp.op0 = TREE_OPERAND (ref, 1);
691 temp.op1 = TREE_OPERAND (ref, 2);
692 break;
693 case COMPONENT_REF:
694 /* The field decl is enough to unambiguously specify the field,
695 a matching type is not necessary and a mismatching type
696 is always a spurious difference. */
697 temp.type = NULL_TREE;
698 temp.op0 = TREE_OPERAND (ref, 1);
699 temp.op1 = TREE_OPERAND (ref, 2);
701 tree this_offset = component_ref_field_offset (ref);
702 if (this_offset
703 && TREE_CODE (this_offset) == INTEGER_CST)
705 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
706 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
708 double_int off
709 = tree_to_double_int (this_offset)
710 + tree_to_double_int (bit_offset)
711 .arshift (BITS_PER_UNIT == 8
712 ? 3 : exact_log2 (BITS_PER_UNIT),
713 HOST_BITS_PER_DOUBLE_INT);
714 if (off.fits_shwi ()
715 /* Probibit value-numbering zero offset components
716 of addresses the same before the pass folding
717 __builtin_object_size had a chance to run
718 (checking cfun->after_inlining does the
719 trick here). */
720 && (TREE_CODE (orig) != ADDR_EXPR
721 || !off.is_zero ()
722 || cfun->after_inlining))
723 temp.off = off.low;
727 break;
728 case ARRAY_RANGE_REF:
729 case ARRAY_REF:
730 /* Record index as operand. */
731 temp.op0 = TREE_OPERAND (ref, 1);
732 /* Always record lower bounds and element size. */
733 temp.op1 = array_ref_low_bound (ref);
734 temp.op2 = array_ref_element_size (ref);
735 if (TREE_CODE (temp.op0) == INTEGER_CST
736 && TREE_CODE (temp.op1) == INTEGER_CST
737 && TREE_CODE (temp.op2) == INTEGER_CST)
739 double_int off = tree_to_double_int (temp.op0);
740 off += -tree_to_double_int (temp.op1);
741 off *= tree_to_double_int (temp.op2);
742 if (off.fits_shwi ())
743 temp.off = off.low;
745 break;
746 case VAR_DECL:
747 if (DECL_HARD_REGISTER (ref))
749 temp.op0 = ref;
750 break;
752 /* Fallthru. */
753 case PARM_DECL:
754 case CONST_DECL:
755 case RESULT_DECL:
756 /* Canonicalize decls to MEM[&decl] which is what we end up with
757 when valueizing MEM[ptr] with ptr = &decl. */
758 temp.opcode = MEM_REF;
759 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
760 temp.off = 0;
761 result->safe_push (temp);
762 temp.opcode = ADDR_EXPR;
763 temp.op0 = build_fold_addr_expr (ref);
764 temp.type = TREE_TYPE (temp.op0);
765 temp.off = -1;
766 break;
767 case STRING_CST:
768 case INTEGER_CST:
769 case COMPLEX_CST:
770 case VECTOR_CST:
771 case REAL_CST:
772 case FIXED_CST:
773 case CONSTRUCTOR:
774 case SSA_NAME:
775 temp.op0 = ref;
776 break;
777 case ADDR_EXPR:
778 if (is_gimple_min_invariant (ref))
780 temp.op0 = ref;
781 break;
783 /* Fallthrough. */
784 /* These are only interesting for their operands, their
785 existence, and their type. They will never be the last
786 ref in the chain of references (IE they require an
787 operand), so we don't have to put anything
788 for op* as it will be handled by the iteration */
789 case REALPART_EXPR:
790 case VIEW_CONVERT_EXPR:
791 temp.off = 0;
792 break;
793 case IMAGPART_EXPR:
794 /* This is only interesting for its constant offset. */
795 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
796 break;
797 default:
798 gcc_unreachable ();
800 result->safe_push (temp);
802 if (REFERENCE_CLASS_P (ref)
803 || TREE_CODE (ref) == MODIFY_EXPR
804 || TREE_CODE (ref) == WITH_SIZE_EXPR
805 || (TREE_CODE (ref) == ADDR_EXPR
806 && !is_gimple_min_invariant (ref)))
807 ref = TREE_OPERAND (ref, 0);
808 else
809 ref = NULL_TREE;
813 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
814 operands in *OPS, the reference alias set SET and the reference type TYPE.
815 Return true if something useful was produced. */
817 bool
818 ao_ref_init_from_vn_reference (ao_ref *ref,
819 alias_set_type set, tree type,
820 vec<vn_reference_op_s> ops)
822 vn_reference_op_t op;
823 unsigned i;
824 tree base = NULL_TREE;
825 tree *op0_p = &base;
826 HOST_WIDE_INT offset = 0;
827 HOST_WIDE_INT max_size;
828 HOST_WIDE_INT size = -1;
829 tree size_tree = NULL_TREE;
830 alias_set_type base_alias_set = -1;
832 /* First get the final access size from just the outermost expression. */
833 op = &ops[0];
834 if (op->opcode == COMPONENT_REF)
835 size_tree = DECL_SIZE (op->op0);
836 else if (op->opcode == BIT_FIELD_REF)
837 size_tree = op->op0;
838 else
840 enum machine_mode mode = TYPE_MODE (type);
841 if (mode == BLKmode)
842 size_tree = TYPE_SIZE (type);
843 else
844 size = GET_MODE_BITSIZE (mode);
846 if (size_tree != NULL_TREE)
848 if (!host_integerp (size_tree, 1))
849 size = -1;
850 else
851 size = TREE_INT_CST_LOW (size_tree);
854 /* Initially, maxsize is the same as the accessed element size.
855 In the following it will only grow (or become -1). */
856 max_size = size;
858 /* Compute cumulative bit-offset for nested component-refs and array-refs,
859 and find the ultimate containing object. */
860 FOR_EACH_VEC_ELT (ops, i, op)
862 switch (op->opcode)
864 /* These may be in the reference ops, but we cannot do anything
865 sensible with them here. */
866 case ADDR_EXPR:
867 /* Apart from ADDR_EXPR arguments to MEM_REF. */
868 if (base != NULL_TREE
869 && TREE_CODE (base) == MEM_REF
870 && op->op0
871 && DECL_P (TREE_OPERAND (op->op0, 0)))
873 vn_reference_op_t pop = &ops[i-1];
874 base = TREE_OPERAND (op->op0, 0);
875 if (pop->off == -1)
877 max_size = -1;
878 offset = 0;
880 else
881 offset += pop->off * BITS_PER_UNIT;
882 op0_p = NULL;
883 break;
885 /* Fallthru. */
886 case CALL_EXPR:
887 return false;
889 /* Record the base objects. */
890 case MEM_REF:
891 base_alias_set = get_deref_alias_set (op->op0);
892 *op0_p = build2 (MEM_REF, op->type,
893 NULL_TREE, op->op0);
894 op0_p = &TREE_OPERAND (*op0_p, 0);
895 break;
897 case VAR_DECL:
898 case PARM_DECL:
899 case RESULT_DECL:
900 case SSA_NAME:
901 *op0_p = op->op0;
902 op0_p = NULL;
903 break;
905 /* And now the usual component-reference style ops. */
906 case BIT_FIELD_REF:
907 offset += tree_low_cst (op->op1, 0);
908 break;
910 case COMPONENT_REF:
912 tree field = op->op0;
913 /* We do not have a complete COMPONENT_REF tree here so we
914 cannot use component_ref_field_offset. Do the interesting
915 parts manually. */
917 if (op->op1
918 || !host_integerp (DECL_FIELD_OFFSET (field), 1))
919 max_size = -1;
920 else
922 offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
923 * BITS_PER_UNIT);
924 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
926 break;
929 case ARRAY_RANGE_REF:
930 case ARRAY_REF:
931 /* We recorded the lower bound and the element size. */
932 if (!host_integerp (op->op0, 0)
933 || !host_integerp (op->op1, 0)
934 || !host_integerp (op->op2, 0))
935 max_size = -1;
936 else
938 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
939 hindex -= TREE_INT_CST_LOW (op->op1);
940 hindex *= TREE_INT_CST_LOW (op->op2);
941 hindex *= BITS_PER_UNIT;
942 offset += hindex;
944 break;
946 case REALPART_EXPR:
947 break;
949 case IMAGPART_EXPR:
950 offset += size;
951 break;
953 case VIEW_CONVERT_EXPR:
954 break;
956 case STRING_CST:
957 case INTEGER_CST:
958 case COMPLEX_CST:
959 case VECTOR_CST:
960 case REAL_CST:
961 case CONSTRUCTOR:
962 case CONST_DECL:
963 return false;
965 default:
966 return false;
970 if (base == NULL_TREE)
971 return false;
973 ref->ref = NULL_TREE;
974 ref->base = base;
975 ref->offset = offset;
976 ref->size = size;
977 ref->max_size = max_size;
978 ref->ref_alias_set = set;
979 if (base_alias_set != -1)
980 ref->base_alias_set = base_alias_set;
981 else
982 ref->base_alias_set = get_alias_set (base);
983 /* We discount volatiles from value-numbering elsewhere. */
984 ref->volatile_p = false;
986 return true;
989 /* Copy the operations present in load/store/call REF into RESULT, a vector of
990 vn_reference_op_s's. */
992 void
993 copy_reference_ops_from_call (gimple call,
994 vec<vn_reference_op_s> *result)
996 vn_reference_op_s temp;
997 unsigned i;
998 tree lhs = gimple_call_lhs (call);
1000 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1001 different. By adding the lhs here in the vector, we ensure that the
1002 hashcode is different, guaranteeing a different value number. */
1003 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1005 memset (&temp, 0, sizeof (temp));
1006 temp.opcode = MODIFY_EXPR;
1007 temp.type = TREE_TYPE (lhs);
1008 temp.op0 = lhs;
1009 temp.off = -1;
1010 result->safe_push (temp);
1013 /* Copy the type, opcode, function being called and static chain. */
1014 memset (&temp, 0, sizeof (temp));
1015 temp.type = gimple_call_return_type (call);
1016 temp.opcode = CALL_EXPR;
1017 temp.op0 = gimple_call_fn (call);
1018 temp.op1 = gimple_call_chain (call);
1019 temp.off = -1;
1020 result->safe_push (temp);
1022 /* Copy the call arguments. As they can be references as well,
1023 just chain them together. */
1024 for (i = 0; i < gimple_call_num_args (call); ++i)
1026 tree callarg = gimple_call_arg (call, i);
1027 copy_reference_ops_from_ref (callarg, result);
1031 /* Create a vector of vn_reference_op_s structures from REF, a
1032 REFERENCE_CLASS_P tree. The vector is not shared. */
1034 static vec<vn_reference_op_s>
1035 create_reference_ops_from_ref (tree ref)
1037 vec<vn_reference_op_s> result = vNULL;
1039 copy_reference_ops_from_ref (ref, &result);
1040 return result;
1043 /* Create a vector of vn_reference_op_s structures from CALL, a
1044 call statement. The vector is not shared. */
1046 static vec<vn_reference_op_s>
1047 create_reference_ops_from_call (gimple call)
1049 vec<vn_reference_op_s> result = vNULL;
1051 copy_reference_ops_from_call (call, &result);
1052 return result;
1055 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1056 *I_P to point to the last element of the replacement. */
1057 void
1058 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1059 unsigned int *i_p)
1061 unsigned int i = *i_p;
1062 vn_reference_op_t op = &(*ops)[i];
1063 vn_reference_op_t mem_op = &(*ops)[i - 1];
1064 tree addr_base;
1065 HOST_WIDE_INT addr_offset = 0;
1067 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1068 from .foo.bar to the preceding MEM_REF offset and replace the
1069 address with &OBJ. */
1070 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1071 &addr_offset);
1072 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1073 if (addr_base != TREE_OPERAND (op->op0, 0))
1075 double_int off = tree_to_double_int (mem_op->op0);
1076 off = off.sext (TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1077 off += double_int::from_shwi (addr_offset);
1078 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1079 op->op0 = build_fold_addr_expr (addr_base);
1080 if (host_integerp (mem_op->op0, 0))
1081 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1082 else
1083 mem_op->off = -1;
1087 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1088 *I_P to point to the last element of the replacement. */
1089 static void
1090 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1091 unsigned int *i_p)
1093 unsigned int i = *i_p;
1094 vn_reference_op_t op = &(*ops)[i];
1095 vn_reference_op_t mem_op = &(*ops)[i - 1];
1096 gimple def_stmt;
1097 enum tree_code code;
1098 double_int off;
1100 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1101 if (!is_gimple_assign (def_stmt))
1102 return;
1104 code = gimple_assign_rhs_code (def_stmt);
1105 if (code != ADDR_EXPR
1106 && code != POINTER_PLUS_EXPR)
1107 return;
1109 off = tree_to_double_int (mem_op->op0);
1110 off = off.sext (TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1112 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1113 from .foo.bar to the preceding MEM_REF offset and replace the
1114 address with &OBJ. */
1115 if (code == ADDR_EXPR)
1117 tree addr, addr_base;
1118 HOST_WIDE_INT addr_offset;
1120 addr = gimple_assign_rhs1 (def_stmt);
1121 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1122 &addr_offset);
1123 if (!addr_base
1124 || TREE_CODE (addr_base) != MEM_REF)
1125 return;
1127 off += double_int::from_shwi (addr_offset);
1128 off += mem_ref_offset (addr_base);
1129 op->op0 = TREE_OPERAND (addr_base, 0);
1131 else
1133 tree ptr, ptroff;
1134 ptr = gimple_assign_rhs1 (def_stmt);
1135 ptroff = gimple_assign_rhs2 (def_stmt);
1136 if (TREE_CODE (ptr) != SSA_NAME
1137 || TREE_CODE (ptroff) != INTEGER_CST)
1138 return;
1140 off += tree_to_double_int (ptroff);
1141 op->op0 = ptr;
1144 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1145 if (host_integerp (mem_op->op0, 0))
1146 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1147 else
1148 mem_op->off = -1;
1149 if (TREE_CODE (op->op0) == SSA_NAME)
1150 op->op0 = SSA_VAL (op->op0);
1151 if (TREE_CODE (op->op0) != SSA_NAME)
1152 op->opcode = TREE_CODE (op->op0);
1154 /* And recurse. */
1155 if (TREE_CODE (op->op0) == SSA_NAME)
1156 vn_reference_maybe_forwprop_address (ops, i_p);
1157 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1158 vn_reference_fold_indirect (ops, i_p);
1161 /* Optimize the reference REF to a constant if possible or return
1162 NULL_TREE if not. */
1164 tree
1165 fully_constant_vn_reference_p (vn_reference_t ref)
1167 vec<vn_reference_op_s> operands = ref->operands;
1168 vn_reference_op_t op;
1170 /* Try to simplify the translated expression if it is
1171 a call to a builtin function with at most two arguments. */
1172 op = &operands[0];
1173 if (op->opcode == CALL_EXPR
1174 && TREE_CODE (op->op0) == ADDR_EXPR
1175 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1176 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1177 && operands.length () >= 2
1178 && operands.length () <= 3)
1180 vn_reference_op_t arg0, arg1 = NULL;
1181 bool anyconst = false;
1182 arg0 = &operands[1];
1183 if (operands.length () > 2)
1184 arg1 = &operands[2];
1185 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1186 || (arg0->opcode == ADDR_EXPR
1187 && is_gimple_min_invariant (arg0->op0)))
1188 anyconst = true;
1189 if (arg1
1190 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1191 || (arg1->opcode == ADDR_EXPR
1192 && is_gimple_min_invariant (arg1->op0))))
1193 anyconst = true;
1194 if (anyconst)
1196 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1197 arg1 ? 2 : 1,
1198 arg0->op0,
1199 arg1 ? arg1->op0 : NULL);
1200 if (folded
1201 && TREE_CODE (folded) == NOP_EXPR)
1202 folded = TREE_OPERAND (folded, 0);
1203 if (folded
1204 && is_gimple_min_invariant (folded))
1205 return folded;
1209 /* Simplify reads from constant strings. */
1210 else if (op->opcode == ARRAY_REF
1211 && TREE_CODE (op->op0) == INTEGER_CST
1212 && integer_zerop (op->op1)
1213 && operands.length () == 2)
1215 vn_reference_op_t arg0;
1216 arg0 = &operands[1];
1217 if (arg0->opcode == STRING_CST
1218 && (TYPE_MODE (op->type)
1219 == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
1220 && GET_MODE_CLASS (TYPE_MODE (op->type)) == MODE_INT
1221 && GET_MODE_SIZE (TYPE_MODE (op->type)) == 1
1222 && tree_int_cst_sgn (op->op0) >= 0
1223 && compare_tree_int (op->op0, TREE_STRING_LENGTH (arg0->op0)) < 0)
1224 return build_int_cst_type (op->type,
1225 (TREE_STRING_POINTER (arg0->op0)
1226 [TREE_INT_CST_LOW (op->op0)]));
1229 return NULL_TREE;
1232 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1233 structures into their value numbers. This is done in-place, and
1234 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1235 whether any operands were valueized. */
1237 static vec<vn_reference_op_s>
1238 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1240 vn_reference_op_t vro;
1241 unsigned int i;
1243 *valueized_anything = false;
1245 FOR_EACH_VEC_ELT (orig, i, vro)
1247 if (vro->opcode == SSA_NAME
1248 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1250 tree tem = SSA_VAL (vro->op0);
1251 if (tem != vro->op0)
1253 *valueized_anything = true;
1254 vro->op0 = tem;
1256 /* If it transforms from an SSA_NAME to a constant, update
1257 the opcode. */
1258 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1259 vro->opcode = TREE_CODE (vro->op0);
1261 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1263 tree tem = SSA_VAL (vro->op1);
1264 if (tem != vro->op1)
1266 *valueized_anything = true;
1267 vro->op1 = tem;
1270 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1272 tree tem = SSA_VAL (vro->op2);
1273 if (tem != vro->op2)
1275 *valueized_anything = true;
1276 vro->op2 = tem;
1279 /* If it transforms from an SSA_NAME to an address, fold with
1280 a preceding indirect reference. */
1281 if (i > 0
1282 && vro->op0
1283 && TREE_CODE (vro->op0) == ADDR_EXPR
1284 && orig[i - 1].opcode == MEM_REF)
1285 vn_reference_fold_indirect (&orig, &i);
1286 else if (i > 0
1287 && vro->opcode == SSA_NAME
1288 && orig[i - 1].opcode == MEM_REF)
1289 vn_reference_maybe_forwprop_address (&orig, &i);
1290 /* If it transforms a non-constant ARRAY_REF into a constant
1291 one, adjust the constant offset. */
1292 else if (vro->opcode == ARRAY_REF
1293 && vro->off == -1
1294 && TREE_CODE (vro->op0) == INTEGER_CST
1295 && TREE_CODE (vro->op1) == INTEGER_CST
1296 && TREE_CODE (vro->op2) == INTEGER_CST)
1298 double_int off = tree_to_double_int (vro->op0);
1299 off += -tree_to_double_int (vro->op1);
1300 off *= tree_to_double_int (vro->op2);
1301 if (off.fits_shwi ())
1302 vro->off = off.low;
1306 return orig;
1309 static vec<vn_reference_op_s>
1310 valueize_refs (vec<vn_reference_op_s> orig)
1312 bool tem;
1313 return valueize_refs_1 (orig, &tem);
1316 static vec<vn_reference_op_s> shared_lookup_references;
1318 /* Create a vector of vn_reference_op_s structures from REF, a
1319 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1320 this function. *VALUEIZED_ANYTHING will specify whether any
1321 operands were valueized. */
1323 static vec<vn_reference_op_s>
1324 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1326 if (!ref)
1327 return vNULL;
1328 shared_lookup_references.truncate (0);
1329 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1330 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1331 valueized_anything);
1332 return shared_lookup_references;
1335 /* Create a vector of vn_reference_op_s structures from CALL, a
1336 call statement. The vector is shared among all callers of
1337 this function. */
1339 static vec<vn_reference_op_s>
1340 valueize_shared_reference_ops_from_call (gimple call)
1342 if (!call)
1343 return vNULL;
1344 shared_lookup_references.truncate (0);
1345 copy_reference_ops_from_call (call, &shared_lookup_references);
1346 shared_lookup_references = valueize_refs (shared_lookup_references);
1347 return shared_lookup_references;
1350 /* Lookup a SCCVN reference operation VR in the current hash table.
1351 Returns the resulting value number if it exists in the hash table,
1352 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1353 vn_reference_t stored in the hashtable if something is found. */
1355 static tree
1356 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1358 void **slot;
1359 hashval_t hash;
1361 hash = vr->hashcode;
1362 slot = htab_find_slot_with_hash (current_info->references, vr,
1363 hash, NO_INSERT);
1364 if (!slot && current_info == optimistic_info)
1365 slot = htab_find_slot_with_hash (valid_info->references, vr,
1366 hash, NO_INSERT);
1367 if (slot)
1369 if (vnresult)
1370 *vnresult = (vn_reference_t)*slot;
1371 return ((vn_reference_t)*slot)->result;
1374 return NULL_TREE;
1377 static tree *last_vuse_ptr;
1378 static vn_lookup_kind vn_walk_kind;
1379 static vn_lookup_kind default_vn_walk_kind;
1381 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1382 with the current VUSE and performs the expression lookup. */
1384 static void *
1385 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1386 unsigned int cnt, void *vr_)
1388 vn_reference_t vr = (vn_reference_t)vr_;
1389 void **slot;
1390 hashval_t hash;
1392 /* This bounds the stmt walks we perform on reference lookups
1393 to O(1) instead of O(N) where N is the number of dominating
1394 stores. */
1395 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1396 return (void *)-1;
1398 if (last_vuse_ptr)
1399 *last_vuse_ptr = vuse;
1401 /* Fixup vuse and hash. */
1402 if (vr->vuse)
1403 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1404 vr->vuse = SSA_VAL (vuse);
1405 if (vr->vuse)
1406 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1408 hash = vr->hashcode;
1409 slot = htab_find_slot_with_hash (current_info->references, vr,
1410 hash, NO_INSERT);
1411 if (!slot && current_info == optimistic_info)
1412 slot = htab_find_slot_with_hash (valid_info->references, vr,
1413 hash, NO_INSERT);
1414 if (slot)
1415 return *slot;
1417 return NULL;
1420 /* Lookup an existing or insert a new vn_reference entry into the
1421 value table for the VUSE, SET, TYPE, OPERANDS reference which
1422 has the value VALUE which is either a constant or an SSA name. */
1424 static vn_reference_t
1425 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1426 alias_set_type set,
1427 tree type,
1428 vec<vn_reference_op_s,
1429 va_heap> operands,
1430 tree value)
1432 struct vn_reference_s vr1;
1433 vn_reference_t result;
1434 unsigned value_id;
1435 vr1.vuse = vuse;
1436 vr1.operands = operands;
1437 vr1.type = type;
1438 vr1.set = set;
1439 vr1.hashcode = vn_reference_compute_hash (&vr1);
1440 if (vn_reference_lookup_1 (&vr1, &result))
1441 return result;
1442 if (TREE_CODE (value) == SSA_NAME)
1443 value_id = VN_INFO (value)->value_id;
1444 else
1445 value_id = get_or_alloc_constant_value_id (value);
1446 return vn_reference_insert_pieces (vuse, set, type,
1447 operands.copy (), value, value_id);
1450 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1451 from the statement defining VUSE and if not successful tries to
1452 translate *REFP and VR_ through an aggregate copy at the definition
1453 of VUSE. */
1455 static void *
1456 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
1458 vn_reference_t vr = (vn_reference_t)vr_;
1459 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1460 tree base;
1461 HOST_WIDE_INT offset, maxsize;
1462 static vec<vn_reference_op_s>
1463 lhs_ops = vNULL;
1464 ao_ref lhs_ref;
1465 bool lhs_ref_ok = false;
1467 /* First try to disambiguate after value-replacing in the definitions LHS. */
1468 if (is_gimple_assign (def_stmt))
1470 vec<vn_reference_op_s> tem;
1471 tree lhs = gimple_assign_lhs (def_stmt);
1472 bool valueized_anything = false;
1473 /* Avoid re-allocation overhead. */
1474 lhs_ops.truncate (0);
1475 copy_reference_ops_from_ref (lhs, &lhs_ops);
1476 tem = lhs_ops;
1477 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1478 gcc_assert (lhs_ops == tem);
1479 if (valueized_anything)
1481 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1482 get_alias_set (lhs),
1483 TREE_TYPE (lhs), lhs_ops);
1484 if (lhs_ref_ok
1485 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1486 return NULL;
1488 else
1490 ao_ref_init (&lhs_ref, lhs);
1491 lhs_ref_ok = true;
1495 base = ao_ref_base (ref);
1496 offset = ref->offset;
1497 maxsize = ref->max_size;
1499 /* If we cannot constrain the size of the reference we cannot
1500 test if anything kills it. */
1501 if (maxsize == -1)
1502 return (void *)-1;
1504 /* We can't deduce anything useful from clobbers. */
1505 if (gimple_clobber_p (def_stmt))
1506 return (void *)-1;
1508 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1509 from that definition.
1510 1) Memset. */
1511 if (is_gimple_reg_type (vr->type)
1512 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1513 && integer_zerop (gimple_call_arg (def_stmt, 1))
1514 && host_integerp (gimple_call_arg (def_stmt, 2), 1)
1515 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1517 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1518 tree base2;
1519 HOST_WIDE_INT offset2, size2, maxsize2;
1520 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1521 size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
1522 if ((unsigned HOST_WIDE_INT)size2 / 8
1523 == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
1524 && maxsize2 != -1
1525 && operand_equal_p (base, base2, 0)
1526 && offset2 <= offset
1527 && offset2 + size2 >= offset + maxsize)
1529 tree val = build_zero_cst (vr->type);
1530 return vn_reference_lookup_or_insert_for_pieces
1531 (vuse, vr->set, vr->type, vr->operands, val);
1535 /* 2) Assignment from an empty CONSTRUCTOR. */
1536 else if (is_gimple_reg_type (vr->type)
1537 && gimple_assign_single_p (def_stmt)
1538 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1539 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1541 tree base2;
1542 HOST_WIDE_INT offset2, size2, maxsize2;
1543 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1544 &offset2, &size2, &maxsize2);
1545 if (maxsize2 != -1
1546 && operand_equal_p (base, base2, 0)
1547 && offset2 <= offset
1548 && offset2 + size2 >= offset + maxsize)
1550 tree val = build_zero_cst (vr->type);
1551 return vn_reference_lookup_or_insert_for_pieces
1552 (vuse, vr->set, vr->type, vr->operands, val);
1556 /* 3) Assignment from a constant. We can use folds native encode/interpret
1557 routines to extract the assigned bits. */
1558 else if (vn_walk_kind == VN_WALKREWRITE
1559 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1560 && ref->size == maxsize
1561 && maxsize % BITS_PER_UNIT == 0
1562 && offset % BITS_PER_UNIT == 0
1563 && is_gimple_reg_type (vr->type)
1564 && gimple_assign_single_p (def_stmt)
1565 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
1567 tree base2;
1568 HOST_WIDE_INT offset2, size2, maxsize2;
1569 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1570 &offset2, &size2, &maxsize2);
1571 if (maxsize2 != -1
1572 && maxsize2 == size2
1573 && size2 % BITS_PER_UNIT == 0
1574 && offset2 % BITS_PER_UNIT == 0
1575 && operand_equal_p (base, base2, 0)
1576 && offset2 <= offset
1577 && offset2 + size2 >= offset + maxsize)
1579 /* We support up to 512-bit values (for V8DFmode). */
1580 unsigned char buffer[64];
1581 int len;
1583 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1584 buffer, sizeof (buffer));
1585 if (len > 0)
1587 tree val = native_interpret_expr (vr->type,
1588 buffer
1589 + ((offset - offset2)
1590 / BITS_PER_UNIT),
1591 ref->size / BITS_PER_UNIT);
1592 if (val)
1593 return vn_reference_lookup_or_insert_for_pieces
1594 (vuse, vr->set, vr->type, vr->operands, val);
1599 /* 4) Assignment from an SSA name which definition we may be able
1600 to access pieces from. */
1601 else if (ref->size == maxsize
1602 && is_gimple_reg_type (vr->type)
1603 && gimple_assign_single_p (def_stmt)
1604 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
1606 tree rhs1 = gimple_assign_rhs1 (def_stmt);
1607 gimple def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
1608 if (is_gimple_assign (def_stmt2)
1609 && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
1610 || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
1611 && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1))))
1613 tree base2;
1614 HOST_WIDE_INT offset2, size2, maxsize2, off;
1615 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1616 &offset2, &size2, &maxsize2);
1617 off = offset - offset2;
1618 if (maxsize2 != -1
1619 && maxsize2 == size2
1620 && operand_equal_p (base, base2, 0)
1621 && offset2 <= offset
1622 && offset2 + size2 >= offset + maxsize)
1624 tree val = NULL_TREE;
1625 HOST_WIDE_INT elsz
1626 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1))));
1627 if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR)
1629 if (off == 0)
1630 val = gimple_assign_rhs1 (def_stmt2);
1631 else if (off == elsz)
1632 val = gimple_assign_rhs2 (def_stmt2);
1634 else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR
1635 && off % elsz == 0)
1637 tree ctor = gimple_assign_rhs1 (def_stmt2);
1638 unsigned i = off / elsz;
1639 if (i < CONSTRUCTOR_NELTS (ctor))
1641 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
1642 if (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
1644 if (TREE_CODE (TREE_TYPE (elt->value))
1645 != VECTOR_TYPE)
1646 val = elt->value;
1650 if (val)
1651 return vn_reference_lookup_or_insert_for_pieces
1652 (vuse, vr->set, vr->type, vr->operands, val);
1657 /* 5) For aggregate copies translate the reference through them if
1658 the copy kills ref. */
1659 else if (vn_walk_kind == VN_WALKREWRITE
1660 && gimple_assign_single_p (def_stmt)
1661 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1662 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1663 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1665 tree base2;
1666 HOST_WIDE_INT offset2, size2, maxsize2;
1667 int i, j;
1668 vec<vn_reference_op_s>
1669 rhs = vNULL;
1670 vn_reference_op_t vro;
1671 ao_ref r;
1673 if (!lhs_ref_ok)
1674 return (void *)-1;
1676 /* See if the assignment kills REF. */
1677 base2 = ao_ref_base (&lhs_ref);
1678 offset2 = lhs_ref.offset;
1679 size2 = lhs_ref.size;
1680 maxsize2 = lhs_ref.max_size;
1681 if (maxsize2 == -1
1682 || (base != base2 && !operand_equal_p (base, base2, 0))
1683 || offset2 > offset
1684 || offset2 + size2 < offset + maxsize)
1685 return (void *)-1;
1687 /* Find the common base of ref and the lhs. lhs_ops already
1688 contains valueized operands for the lhs. */
1689 i = vr->operands.length () - 1;
1690 j = lhs_ops.length () - 1;
1691 while (j >= 0 && i >= 0
1692 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
1694 i--;
1695 j--;
1698 /* ??? The innermost op should always be a MEM_REF and we already
1699 checked that the assignment to the lhs kills vr. Thus for
1700 aggregate copies using char[] types the vn_reference_op_eq
1701 may fail when comparing types for compatibility. But we really
1702 don't care here - further lookups with the rewritten operands
1703 will simply fail if we messed up types too badly. */
1704 if (j == 0 && i >= 0
1705 && lhs_ops[0].opcode == MEM_REF
1706 && lhs_ops[0].off != -1
1707 && (lhs_ops[0].off == vr->operands[i].off))
1708 i--, j--;
1710 /* i now points to the first additional op.
1711 ??? LHS may not be completely contained in VR, one or more
1712 VIEW_CONVERT_EXPRs could be in its way. We could at least
1713 try handling outermost VIEW_CONVERT_EXPRs. */
1714 if (j != -1)
1715 return (void *)-1;
1717 /* Now re-write REF to be based on the rhs of the assignment. */
1718 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1719 /* We need to pre-pend vr->operands[0..i] to rhs. */
1720 if (i + 1 + rhs.length () > vr->operands.length ())
1722 vec<vn_reference_op_s> old = vr->operands;
1723 vr->operands.safe_grow (i + 1 + rhs.length ());
1724 if (old == shared_lookup_references
1725 && vr->operands != old)
1726 shared_lookup_references = vNULL;
1728 else
1729 vr->operands.truncate (i + 1 + rhs.length ());
1730 FOR_EACH_VEC_ELT (rhs, j, vro)
1731 vr->operands[i + 1 + j] = *vro;
1732 rhs.release ();
1733 vr->operands = valueize_refs (vr->operands);
1734 vr->hashcode = vn_reference_compute_hash (vr);
1736 /* Adjust *ref from the new operands. */
1737 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1738 return (void *)-1;
1739 /* This can happen with bitfields. */
1740 if (ref->size != r.size)
1741 return (void *)-1;
1742 *ref = r;
1744 /* Do not update last seen VUSE after translating. */
1745 last_vuse_ptr = NULL;
1747 /* Keep looking for the adjusted *REF / VR pair. */
1748 return NULL;
1751 /* 6) For memcpy copies translate the reference through them if
1752 the copy kills ref. */
1753 else if (vn_walk_kind == VN_WALKREWRITE
1754 && is_gimple_reg_type (vr->type)
1755 /* ??? Handle BCOPY as well. */
1756 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
1757 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
1758 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
1759 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
1760 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
1761 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
1762 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
1763 && host_integerp (gimple_call_arg (def_stmt, 2), 1))
1765 tree lhs, rhs;
1766 ao_ref r;
1767 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
1768 vn_reference_op_s op;
1769 HOST_WIDE_INT at;
1772 /* Only handle non-variable, addressable refs. */
1773 if (ref->size != maxsize
1774 || offset % BITS_PER_UNIT != 0
1775 || ref->size % BITS_PER_UNIT != 0)
1776 return (void *)-1;
1778 /* Extract a pointer base and an offset for the destination. */
1779 lhs = gimple_call_arg (def_stmt, 0);
1780 lhs_offset = 0;
1781 if (TREE_CODE (lhs) == SSA_NAME)
1782 lhs = SSA_VAL (lhs);
1783 if (TREE_CODE (lhs) == ADDR_EXPR)
1785 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
1786 &lhs_offset);
1787 if (!tem)
1788 return (void *)-1;
1789 if (TREE_CODE (tem) == MEM_REF
1790 && host_integerp (TREE_OPERAND (tem, 1), 1))
1792 lhs = TREE_OPERAND (tem, 0);
1793 lhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1795 else if (DECL_P (tem))
1796 lhs = build_fold_addr_expr (tem);
1797 else
1798 return (void *)-1;
1800 if (TREE_CODE (lhs) != SSA_NAME
1801 && TREE_CODE (lhs) != ADDR_EXPR)
1802 return (void *)-1;
1804 /* Extract a pointer base and an offset for the source. */
1805 rhs = gimple_call_arg (def_stmt, 1);
1806 rhs_offset = 0;
1807 if (TREE_CODE (rhs) == SSA_NAME)
1808 rhs = SSA_VAL (rhs);
1809 if (TREE_CODE (rhs) == ADDR_EXPR)
1811 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
1812 &rhs_offset);
1813 if (!tem)
1814 return (void *)-1;
1815 if (TREE_CODE (tem) == MEM_REF
1816 && host_integerp (TREE_OPERAND (tem, 1), 1))
1818 rhs = TREE_OPERAND (tem, 0);
1819 rhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1821 else if (DECL_P (tem))
1822 rhs = build_fold_addr_expr (tem);
1823 else
1824 return (void *)-1;
1826 if (TREE_CODE (rhs) != SSA_NAME
1827 && TREE_CODE (rhs) != ADDR_EXPR)
1828 return (void *)-1;
1830 copy_size = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2));
1832 /* The bases of the destination and the references have to agree. */
1833 if ((TREE_CODE (base) != MEM_REF
1834 && !DECL_P (base))
1835 || (TREE_CODE (base) == MEM_REF
1836 && (TREE_OPERAND (base, 0) != lhs
1837 || !host_integerp (TREE_OPERAND (base, 1), 1)))
1838 || (DECL_P (base)
1839 && (TREE_CODE (lhs) != ADDR_EXPR
1840 || TREE_OPERAND (lhs, 0) != base)))
1841 return (void *)-1;
1843 /* And the access has to be contained within the memcpy destination. */
1844 at = offset / BITS_PER_UNIT;
1845 if (TREE_CODE (base) == MEM_REF)
1846 at += TREE_INT_CST_LOW (TREE_OPERAND (base, 1));
1847 if (lhs_offset > at
1848 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
1849 return (void *)-1;
1851 /* Make room for 2 operands in the new reference. */
1852 if (vr->operands.length () < 2)
1854 vec<vn_reference_op_s> old = vr->operands;
1855 vr->operands.safe_grow_cleared (2);
1856 if (old == shared_lookup_references
1857 && vr->operands != old)
1858 shared_lookup_references.create (0);
1860 else
1861 vr->operands.truncate (2);
1863 /* The looked-through reference is a simple MEM_REF. */
1864 memset (&op, 0, sizeof (op));
1865 op.type = vr->type;
1866 op.opcode = MEM_REF;
1867 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
1868 op.off = at - lhs_offset + rhs_offset;
1869 vr->operands[0] = op;
1870 op.type = TREE_TYPE (rhs);
1871 op.opcode = TREE_CODE (rhs);
1872 op.op0 = rhs;
1873 op.off = -1;
1874 vr->operands[1] = op;
1875 vr->hashcode = vn_reference_compute_hash (vr);
1877 /* Adjust *ref from the new operands. */
1878 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1879 return (void *)-1;
1880 /* This can happen with bitfields. */
1881 if (ref->size != r.size)
1882 return (void *)-1;
1883 *ref = r;
1885 /* Do not update last seen VUSE after translating. */
1886 last_vuse_ptr = NULL;
1888 /* Keep looking for the adjusted *REF / VR pair. */
1889 return NULL;
1892 /* Bail out and stop walking. */
1893 return (void *)-1;
1896 /* Lookup a reference operation by it's parts, in the current hash table.
1897 Returns the resulting value number if it exists in the hash table,
1898 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1899 vn_reference_t stored in the hashtable if something is found. */
1901 tree
1902 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
1903 vec<vn_reference_op_s> operands,
1904 vn_reference_t *vnresult, vn_lookup_kind kind)
1906 struct vn_reference_s vr1;
1907 vn_reference_t tmp;
1908 tree cst;
1910 if (!vnresult)
1911 vnresult = &tmp;
1912 *vnresult = NULL;
1914 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1915 shared_lookup_references.truncate (0);
1916 shared_lookup_references.safe_grow (operands.length ());
1917 memcpy (shared_lookup_references.address (),
1918 operands.address (),
1919 sizeof (vn_reference_op_s)
1920 * operands.length ());
1921 vr1.operands = operands = shared_lookup_references
1922 = valueize_refs (shared_lookup_references);
1923 vr1.type = type;
1924 vr1.set = set;
1925 vr1.hashcode = vn_reference_compute_hash (&vr1);
1926 if ((cst = fully_constant_vn_reference_p (&vr1)))
1927 return cst;
1929 vn_reference_lookup_1 (&vr1, vnresult);
1930 if (!*vnresult
1931 && kind != VN_NOWALK
1932 && vr1.vuse)
1934 ao_ref r;
1935 vn_walk_kind = kind;
1936 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
1937 *vnresult =
1938 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1939 vn_reference_lookup_2,
1940 vn_reference_lookup_3, &vr1);
1941 if (vr1.operands != operands)
1942 vr1.operands.release ();
1945 if (*vnresult)
1946 return (*vnresult)->result;
1948 return NULL_TREE;
1951 /* Lookup OP in the current hash table, and return the resulting value
1952 number if it exists in the hash table. Return NULL_TREE if it does
1953 not exist in the hash table or if the result field of the structure
1954 was NULL.. VNRESULT will be filled in with the vn_reference_t
1955 stored in the hashtable if one exists. */
1957 tree
1958 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
1959 vn_reference_t *vnresult)
1961 vec<vn_reference_op_s> operands;
1962 struct vn_reference_s vr1;
1963 tree cst;
1964 bool valuezied_anything;
1966 if (vnresult)
1967 *vnresult = NULL;
1969 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1970 vr1.operands = operands
1971 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
1972 vr1.type = TREE_TYPE (op);
1973 vr1.set = get_alias_set (op);
1974 vr1.hashcode = vn_reference_compute_hash (&vr1);
1975 if ((cst = fully_constant_vn_reference_p (&vr1)))
1976 return cst;
1978 if (kind != VN_NOWALK
1979 && vr1.vuse)
1981 vn_reference_t wvnresult;
1982 ao_ref r;
1983 /* Make sure to use a valueized reference if we valueized anything.
1984 Otherwise preserve the full reference for advanced TBAA. */
1985 if (!valuezied_anything
1986 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
1987 vr1.operands))
1988 ao_ref_init (&r, op);
1989 vn_walk_kind = kind;
1990 wvnresult =
1991 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1992 vn_reference_lookup_2,
1993 vn_reference_lookup_3, &vr1);
1994 if (vr1.operands != operands)
1995 vr1.operands.release ();
1996 if (wvnresult)
1998 if (vnresult)
1999 *vnresult = wvnresult;
2000 return wvnresult->result;
2003 return NULL_TREE;
2006 return vn_reference_lookup_1 (&vr1, vnresult);
2010 /* Insert OP into the current hash table with a value number of
2011 RESULT, and return the resulting reference structure we created. */
2013 vn_reference_t
2014 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2016 void **slot;
2017 vn_reference_t vr1;
2019 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
2020 if (TREE_CODE (result) == SSA_NAME)
2021 vr1->value_id = VN_INFO (result)->value_id;
2022 else
2023 vr1->value_id = get_or_alloc_constant_value_id (result);
2024 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2025 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
2026 vr1->type = TREE_TYPE (op);
2027 vr1->set = get_alias_set (op);
2028 vr1->hashcode = vn_reference_compute_hash (vr1);
2029 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2030 vr1->result_vdef = vdef;
2032 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
2033 INSERT);
2035 /* Because we lookup stores using vuses, and value number failures
2036 using the vdefs (see visit_reference_op_store for how and why),
2037 it's possible that on failure we may try to insert an already
2038 inserted store. This is not wrong, there is no ssa name for a
2039 store that we could use as a differentiator anyway. Thus, unlike
2040 the other lookup functions, you cannot gcc_assert (!*slot)
2041 here. */
2043 /* But free the old slot in case of a collision. */
2044 if (*slot)
2045 free_reference (*slot);
2047 *slot = vr1;
2048 return vr1;
2051 /* Insert a reference by it's pieces into the current hash table with
2052 a value number of RESULT. Return the resulting reference
2053 structure we created. */
2055 vn_reference_t
2056 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2057 vec<vn_reference_op_s> operands,
2058 tree result, unsigned int value_id)
2061 void **slot;
2062 vn_reference_t vr1;
2064 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
2065 vr1->value_id = value_id;
2066 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2067 vr1->operands = valueize_refs (operands);
2068 vr1->type = type;
2069 vr1->set = set;
2070 vr1->hashcode = vn_reference_compute_hash (vr1);
2071 if (result && TREE_CODE (result) == SSA_NAME)
2072 result = SSA_VAL (result);
2073 vr1->result = result;
2075 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
2076 INSERT);
2078 /* At this point we should have all the things inserted that we have
2079 seen before, and we should never try inserting something that
2080 already exists. */
2081 gcc_assert (!*slot);
2082 if (*slot)
2083 free_reference (*slot);
2085 *slot = vr1;
2086 return vr1;
2089 /* Compute and return the hash value for nary operation VBO1. */
2091 hashval_t
2092 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2094 hashval_t hash;
2095 unsigned i;
2097 for (i = 0; i < vno1->length; ++i)
2098 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2099 vno1->op[i] = SSA_VAL (vno1->op[i]);
2101 if (vno1->length == 2
2102 && commutative_tree_code (vno1->opcode)
2103 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2105 tree temp = vno1->op[0];
2106 vno1->op[0] = vno1->op[1];
2107 vno1->op[1] = temp;
2110 hash = iterative_hash_hashval_t (vno1->opcode, 0);
2111 for (i = 0; i < vno1->length; ++i)
2112 hash = iterative_hash_expr (vno1->op[i], hash);
2114 return hash;
2117 /* Return the computed hashcode for nary operation P1. */
2119 static hashval_t
2120 vn_nary_op_hash (const void *p1)
2122 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
2123 return vno1->hashcode;
2126 /* Compare nary operations P1 and P2 and return true if they are
2127 equivalent. */
2130 vn_nary_op_eq (const void *p1, const void *p2)
2132 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
2133 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
2134 unsigned i;
2136 if (vno1->hashcode != vno2->hashcode)
2137 return false;
2139 if (vno1->length != vno2->length)
2140 return false;
2142 if (vno1->opcode != vno2->opcode
2143 || !types_compatible_p (vno1->type, vno2->type))
2144 return false;
2146 for (i = 0; i < vno1->length; ++i)
2147 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2148 return false;
2150 return true;
2153 /* Initialize VNO from the pieces provided. */
2155 static void
2156 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2157 enum tree_code code, tree type, tree *ops)
2159 vno->opcode = code;
2160 vno->length = length;
2161 vno->type = type;
2162 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2165 /* Initialize VNO from OP. */
2167 static void
2168 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2170 unsigned i;
2172 vno->opcode = TREE_CODE (op);
2173 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2174 vno->type = TREE_TYPE (op);
2175 for (i = 0; i < vno->length; ++i)
2176 vno->op[i] = TREE_OPERAND (op, i);
2179 /* Return the number of operands for a vn_nary ops structure from STMT. */
2181 static unsigned int
2182 vn_nary_length_from_stmt (gimple stmt)
2184 switch (gimple_assign_rhs_code (stmt))
2186 case REALPART_EXPR:
2187 case IMAGPART_EXPR:
2188 case VIEW_CONVERT_EXPR:
2189 return 1;
2191 case BIT_FIELD_REF:
2192 return 3;
2194 case CONSTRUCTOR:
2195 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2197 default:
2198 return gimple_num_ops (stmt) - 1;
2202 /* Initialize VNO from STMT. */
2204 static void
2205 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
2207 unsigned i;
2209 vno->opcode = gimple_assign_rhs_code (stmt);
2210 vno->type = gimple_expr_type (stmt);
2211 switch (vno->opcode)
2213 case REALPART_EXPR:
2214 case IMAGPART_EXPR:
2215 case VIEW_CONVERT_EXPR:
2216 vno->length = 1;
2217 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2218 break;
2220 case BIT_FIELD_REF:
2221 vno->length = 3;
2222 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2223 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2224 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2225 break;
2227 case CONSTRUCTOR:
2228 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2229 for (i = 0; i < vno->length; ++i)
2230 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2231 break;
2233 default:
2234 gcc_checking_assert (!gimple_assign_single_p (stmt));
2235 vno->length = gimple_num_ops (stmt) - 1;
2236 for (i = 0; i < vno->length; ++i)
2237 vno->op[i] = gimple_op (stmt, i + 1);
2241 /* Compute the hashcode for VNO and look for it in the hash table;
2242 return the resulting value number if it exists in the hash table.
2243 Return NULL_TREE if it does not exist in the hash table or if the
2244 result field of the operation is NULL. VNRESULT will contain the
2245 vn_nary_op_t from the hashtable if it exists. */
2247 static tree
2248 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2250 void **slot;
2252 if (vnresult)
2253 *vnresult = NULL;
2255 vno->hashcode = vn_nary_op_compute_hash (vno);
2256 slot = htab_find_slot_with_hash (current_info->nary, vno, vno->hashcode,
2257 NO_INSERT);
2258 if (!slot && current_info == optimistic_info)
2259 slot = htab_find_slot_with_hash (valid_info->nary, vno, vno->hashcode,
2260 NO_INSERT);
2261 if (!slot)
2262 return NULL_TREE;
2263 if (vnresult)
2264 *vnresult = (vn_nary_op_t)*slot;
2265 return ((vn_nary_op_t)*slot)->result;
2268 /* Lookup a n-ary operation by its pieces and return the resulting value
2269 number if it exists in the hash table. Return NULL_TREE if it does
2270 not exist in the hash table or if the result field of the operation
2271 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2272 if it exists. */
2274 tree
2275 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2276 tree type, tree *ops, vn_nary_op_t *vnresult)
2278 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2279 sizeof_vn_nary_op (length));
2280 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2281 return vn_nary_op_lookup_1 (vno1, vnresult);
2284 /* Lookup OP in the current hash table, and return the resulting value
2285 number if it exists in the hash table. Return NULL_TREE if it does
2286 not exist in the hash table or if the result field of the operation
2287 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2288 if it exists. */
2290 tree
2291 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2293 vn_nary_op_t vno1
2294 = XALLOCAVAR (struct vn_nary_op_s,
2295 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2296 init_vn_nary_op_from_op (vno1, op);
2297 return vn_nary_op_lookup_1 (vno1, vnresult);
2300 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2301 value number if it exists in the hash table. Return NULL_TREE if
2302 it does not exist in the hash table. VNRESULT will contain the
2303 vn_nary_op_t from the hashtable if it exists. */
2305 tree
2306 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
2308 vn_nary_op_t vno1
2309 = XALLOCAVAR (struct vn_nary_op_s,
2310 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2311 init_vn_nary_op_from_stmt (vno1, stmt);
2312 return vn_nary_op_lookup_1 (vno1, vnresult);
2315 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2317 static vn_nary_op_t
2318 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2320 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2323 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2324 obstack. */
2326 static vn_nary_op_t
2327 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2329 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2330 &current_info->nary_obstack);
2332 vno1->value_id = value_id;
2333 vno1->length = length;
2334 vno1->result = result;
2336 return vno1;
2339 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2340 VNO->HASHCODE first. */
2342 static vn_nary_op_t
2343 vn_nary_op_insert_into (vn_nary_op_t vno, htab_t table, bool compute_hash)
2345 void **slot;
2347 if (compute_hash)
2348 vno->hashcode = vn_nary_op_compute_hash (vno);
2350 slot = htab_find_slot_with_hash (table, vno, vno->hashcode, INSERT);
2351 gcc_assert (!*slot);
2353 *slot = vno;
2354 return vno;
2357 /* Insert a n-ary operation into the current hash table using it's
2358 pieces. Return the vn_nary_op_t structure we created and put in
2359 the hashtable. */
2361 vn_nary_op_t
2362 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2363 tree type, tree *ops,
2364 tree result, unsigned int value_id)
2366 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2367 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2368 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2371 /* Insert OP into the current hash table with a value number of
2372 RESULT. Return the vn_nary_op_t structure we created and put in
2373 the hashtable. */
2375 vn_nary_op_t
2376 vn_nary_op_insert (tree op, tree result)
2378 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2379 vn_nary_op_t vno1;
2381 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2382 init_vn_nary_op_from_op (vno1, op);
2383 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2386 /* Insert the rhs of STMT into the current hash table with a value number of
2387 RESULT. */
2389 vn_nary_op_t
2390 vn_nary_op_insert_stmt (gimple stmt, tree result)
2392 vn_nary_op_t vno1
2393 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2394 result, VN_INFO (result)->value_id);
2395 init_vn_nary_op_from_stmt (vno1, stmt);
2396 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2399 /* Compute a hashcode for PHI operation VP1 and return it. */
2401 static inline hashval_t
2402 vn_phi_compute_hash (vn_phi_t vp1)
2404 hashval_t result;
2405 int i;
2406 tree phi1op;
2407 tree type;
2409 result = vp1->block->index;
2411 /* If all PHI arguments are constants we need to distinguish
2412 the PHI node via its type. */
2413 type = vp1->type;
2414 result += vn_hash_type (type);
2416 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2418 if (phi1op == VN_TOP)
2419 continue;
2420 result = iterative_hash_expr (phi1op, result);
2423 return result;
2426 /* Return the computed hashcode for phi operation P1. */
2428 static hashval_t
2429 vn_phi_hash (const void *p1)
2431 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
2432 return vp1->hashcode;
2435 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2437 static int
2438 vn_phi_eq (const void *p1, const void *p2)
2440 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
2441 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
2443 if (vp1->hashcode != vp2->hashcode)
2444 return false;
2446 if (vp1->block == vp2->block)
2448 int i;
2449 tree phi1op;
2451 /* If the PHI nodes do not have compatible types
2452 they are not the same. */
2453 if (!types_compatible_p (vp1->type, vp2->type))
2454 return false;
2456 /* Any phi in the same block will have it's arguments in the
2457 same edge order, because of how we store phi nodes. */
2458 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2460 tree phi2op = vp2->phiargs[i];
2461 if (phi1op == VN_TOP || phi2op == VN_TOP)
2462 continue;
2463 if (!expressions_equal_p (phi1op, phi2op))
2464 return false;
2466 return true;
2468 return false;
2471 static vec<tree> shared_lookup_phiargs;
2473 /* Lookup PHI in the current hash table, and return the resulting
2474 value number if it exists in the hash table. Return NULL_TREE if
2475 it does not exist in the hash table. */
2477 static tree
2478 vn_phi_lookup (gimple phi)
2480 void **slot;
2481 struct vn_phi_s vp1;
2482 unsigned i;
2484 shared_lookup_phiargs.truncate (0);
2486 /* Canonicalize the SSA_NAME's to their value number. */
2487 for (i = 0; i < gimple_phi_num_args (phi); i++)
2489 tree def = PHI_ARG_DEF (phi, i);
2490 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2491 shared_lookup_phiargs.safe_push (def);
2493 vp1.type = TREE_TYPE (gimple_phi_result (phi));
2494 vp1.phiargs = shared_lookup_phiargs;
2495 vp1.block = gimple_bb (phi);
2496 vp1.hashcode = vn_phi_compute_hash (&vp1);
2497 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
2498 NO_INSERT);
2499 if (!slot && current_info == optimistic_info)
2500 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
2501 NO_INSERT);
2502 if (!slot)
2503 return NULL_TREE;
2504 return ((vn_phi_t)*slot)->result;
2507 /* Insert PHI into the current hash table with a value number of
2508 RESULT. */
2510 static vn_phi_t
2511 vn_phi_insert (gimple phi, tree result)
2513 void **slot;
2514 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
2515 unsigned i;
2516 vec<tree> args = vNULL;
2518 /* Canonicalize the SSA_NAME's to their value number. */
2519 for (i = 0; i < gimple_phi_num_args (phi); i++)
2521 tree def = PHI_ARG_DEF (phi, i);
2522 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2523 args.safe_push (def);
2525 vp1->value_id = VN_INFO (result)->value_id;
2526 vp1->type = TREE_TYPE (gimple_phi_result (phi));
2527 vp1->phiargs = args;
2528 vp1->block = gimple_bb (phi);
2529 vp1->result = result;
2530 vp1->hashcode = vn_phi_compute_hash (vp1);
2532 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
2533 INSERT);
2535 /* Because we iterate over phi operations more than once, it's
2536 possible the slot might already exist here, hence no assert.*/
2537 *slot = vp1;
2538 return vp1;
2542 /* Print set of components in strongly connected component SCC to OUT. */
2544 static void
2545 print_scc (FILE *out, vec<tree> scc)
2547 tree var;
2548 unsigned int i;
2550 fprintf (out, "SCC consists of:");
2551 FOR_EACH_VEC_ELT (scc, i, var)
2553 fprintf (out, " ");
2554 print_generic_expr (out, var, 0);
2556 fprintf (out, "\n");
2559 /* Set the value number of FROM to TO, return true if it has changed
2560 as a result. */
2562 static inline bool
2563 set_ssa_val_to (tree from, tree to)
2565 tree currval = SSA_VAL (from);
2566 HOST_WIDE_INT toff, coff;
2568 if (from != to)
2570 if (currval == from)
2572 if (dump_file && (dump_flags & TDF_DETAILS))
2574 fprintf (dump_file, "Not changing value number of ");
2575 print_generic_expr (dump_file, from, 0);
2576 fprintf (dump_file, " from VARYING to ");
2577 print_generic_expr (dump_file, to, 0);
2578 fprintf (dump_file, "\n");
2580 return false;
2582 else if (TREE_CODE (to) == SSA_NAME
2583 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2584 to = from;
2587 /* The only thing we allow as value numbers are VN_TOP, ssa_names
2588 and invariants. So assert that here. */
2589 gcc_assert (to != NULL_TREE
2590 && (to == VN_TOP
2591 || TREE_CODE (to) == SSA_NAME
2592 || is_gimple_min_invariant (to)));
2594 if (dump_file && (dump_flags & TDF_DETAILS))
2596 fprintf (dump_file, "Setting value number of ");
2597 print_generic_expr (dump_file, from, 0);
2598 fprintf (dump_file, " to ");
2599 print_generic_expr (dump_file, to, 0);
2602 if (currval != to
2603 && !operand_equal_p (currval, to, 0)
2604 /* ??? For addresses involving volatile objects or types operand_equal_p
2605 does not reliably detect ADDR_EXPRs as equal. We know we are only
2606 getting invariant gimple addresses here, so can use
2607 get_addr_base_and_unit_offset to do this comparison. */
2608 && !(TREE_CODE (currval) == ADDR_EXPR
2609 && TREE_CODE (to) == ADDR_EXPR
2610 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
2611 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
2612 && coff == toff))
2614 VN_INFO (from)->valnum = to;
2615 if (dump_file && (dump_flags & TDF_DETAILS))
2616 fprintf (dump_file, " (changed)\n");
2617 return true;
2619 if (dump_file && (dump_flags & TDF_DETAILS))
2620 fprintf (dump_file, "\n");
2621 return false;
2624 /* Mark as processed all the definitions in the defining stmt of USE, or
2625 the USE itself. */
2627 static void
2628 mark_use_processed (tree use)
2630 ssa_op_iter iter;
2631 def_operand_p defp;
2632 gimple stmt = SSA_NAME_DEF_STMT (use);
2634 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
2636 VN_INFO (use)->use_processed = true;
2637 return;
2640 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2642 tree def = DEF_FROM_PTR (defp);
2644 VN_INFO (def)->use_processed = true;
2648 /* Set all definitions in STMT to value number to themselves.
2649 Return true if a value number changed. */
2651 static bool
2652 defs_to_varying (gimple stmt)
2654 bool changed = false;
2655 ssa_op_iter iter;
2656 def_operand_p defp;
2658 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2660 tree def = DEF_FROM_PTR (defp);
2661 changed |= set_ssa_val_to (def, def);
2663 return changed;
2666 static bool expr_has_constants (tree expr);
2667 static tree valueize_expr (tree expr);
2669 /* Visit a copy between LHS and RHS, return true if the value number
2670 changed. */
2672 static bool
2673 visit_copy (tree lhs, tree rhs)
2675 /* Follow chains of copies to their destination. */
2676 while (TREE_CODE (rhs) == SSA_NAME
2677 && SSA_VAL (rhs) != rhs)
2678 rhs = SSA_VAL (rhs);
2680 /* The copy may have a more interesting constant filled expression
2681 (we don't, since we know our RHS is just an SSA name). */
2682 if (TREE_CODE (rhs) == SSA_NAME)
2684 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2685 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2688 return set_ssa_val_to (lhs, rhs);
2691 /* Visit a nary operator RHS, value number it, and return true if the
2692 value number of LHS has changed as a result. */
2694 static bool
2695 visit_nary_op (tree lhs, gimple stmt)
2697 bool changed = false;
2698 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2700 if (result)
2701 changed = set_ssa_val_to (lhs, result);
2702 else
2704 changed = set_ssa_val_to (lhs, lhs);
2705 vn_nary_op_insert_stmt (stmt, lhs);
2708 return changed;
2711 /* Visit a call STMT storing into LHS. Return true if the value number
2712 of the LHS has changed as a result. */
2714 static bool
2715 visit_reference_op_call (tree lhs, gimple stmt)
2717 bool changed = false;
2718 struct vn_reference_s vr1;
2719 vn_reference_t vnresult = NULL;
2720 tree vuse = gimple_vuse (stmt);
2721 tree vdef = gimple_vdef (stmt);
2723 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
2724 if (lhs && TREE_CODE (lhs) != SSA_NAME)
2725 lhs = NULL_TREE;
2727 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2728 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
2729 vr1.type = gimple_expr_type (stmt);
2730 vr1.set = 0;
2731 vr1.hashcode = vn_reference_compute_hash (&vr1);
2732 vn_reference_lookup_1 (&vr1, &vnresult);
2734 if (vnresult)
2736 if (vnresult->result_vdef)
2737 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
2739 if (!vnresult->result && lhs)
2740 vnresult->result = lhs;
2742 if (vnresult->result && lhs)
2744 changed |= set_ssa_val_to (lhs, vnresult->result);
2746 if (VN_INFO (vnresult->result)->has_constants)
2747 VN_INFO (lhs)->has_constants = true;
2750 else
2752 void **slot;
2753 vn_reference_t vr2;
2754 if (vdef)
2755 changed |= set_ssa_val_to (vdef, vdef);
2756 if (lhs)
2757 changed |= set_ssa_val_to (lhs, lhs);
2758 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
2759 vr2->vuse = vr1.vuse;
2760 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
2761 vr2->type = vr1.type;
2762 vr2->set = vr1.set;
2763 vr2->hashcode = vr1.hashcode;
2764 vr2->result = lhs;
2765 vr2->result_vdef = vdef;
2766 slot = htab_find_slot_with_hash (current_info->references,
2767 vr2, vr2->hashcode, INSERT);
2768 if (*slot)
2769 free_reference (*slot);
2770 *slot = vr2;
2773 return changed;
2776 /* Visit a load from a reference operator RHS, part of STMT, value number it,
2777 and return true if the value number of the LHS has changed as a result. */
2779 static bool
2780 visit_reference_op_load (tree lhs, tree op, gimple stmt)
2782 bool changed = false;
2783 tree last_vuse;
2784 tree result;
2786 last_vuse = gimple_vuse (stmt);
2787 last_vuse_ptr = &last_vuse;
2788 result = vn_reference_lookup (op, gimple_vuse (stmt),
2789 default_vn_walk_kind, NULL);
2790 last_vuse_ptr = NULL;
2792 /* If we have a VCE, try looking up its operand as it might be stored in
2793 a different type. */
2794 if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
2795 result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
2796 default_vn_walk_kind, NULL);
2798 /* We handle type-punning through unions by value-numbering based
2799 on offset and size of the access. Be prepared to handle a
2800 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
2801 if (result
2802 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
2804 /* We will be setting the value number of lhs to the value number
2805 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2806 So first simplify and lookup this expression to see if it
2807 is already available. */
2808 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
2809 if ((CONVERT_EXPR_P (val)
2810 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
2811 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
2813 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
2814 if ((CONVERT_EXPR_P (tem)
2815 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
2816 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
2817 TREE_TYPE (val), tem)))
2818 val = tem;
2820 result = val;
2821 if (!is_gimple_min_invariant (val)
2822 && TREE_CODE (val) != SSA_NAME)
2823 result = vn_nary_op_lookup (val, NULL);
2824 /* If the expression is not yet available, value-number lhs to
2825 a new SSA_NAME we create. */
2826 if (!result)
2828 result = make_temp_ssa_name (TREE_TYPE (lhs), gimple_build_nop (),
2829 "vntemp");
2830 /* Initialize value-number information properly. */
2831 VN_INFO_GET (result)->valnum = result;
2832 VN_INFO (result)->value_id = get_next_value_id ();
2833 VN_INFO (result)->expr = val;
2834 VN_INFO (result)->has_constants = expr_has_constants (val);
2835 VN_INFO (result)->needs_insertion = true;
2836 /* As all "inserted" statements are singleton SCCs, insert
2837 to the valid table. This is strictly needed to
2838 avoid re-generating new value SSA_NAMEs for the same
2839 expression during SCC iteration over and over (the
2840 optimistic table gets cleared after each iteration).
2841 We do not need to insert into the optimistic table, as
2842 lookups there will fall back to the valid table. */
2843 if (current_info == optimistic_info)
2845 current_info = valid_info;
2846 vn_nary_op_insert (val, result);
2847 current_info = optimistic_info;
2849 else
2850 vn_nary_op_insert (val, result);
2851 if (dump_file && (dump_flags & TDF_DETAILS))
2853 fprintf (dump_file, "Inserting name ");
2854 print_generic_expr (dump_file, result, 0);
2855 fprintf (dump_file, " for expression ");
2856 print_generic_expr (dump_file, val, 0);
2857 fprintf (dump_file, "\n");
2862 if (result)
2864 changed = set_ssa_val_to (lhs, result);
2865 if (TREE_CODE (result) == SSA_NAME
2866 && VN_INFO (result)->has_constants)
2868 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2869 VN_INFO (lhs)->has_constants = true;
2872 else
2874 changed = set_ssa_val_to (lhs, lhs);
2875 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
2878 return changed;
2882 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2883 and return true if the value number of the LHS has changed as a result. */
2885 static bool
2886 visit_reference_op_store (tree lhs, tree op, gimple stmt)
2888 bool changed = false;
2889 vn_reference_t vnresult = NULL;
2890 tree result, assign;
2891 bool resultsame = false;
2892 tree vuse = gimple_vuse (stmt);
2893 tree vdef = gimple_vdef (stmt);
2895 /* First we want to lookup using the *vuses* from the store and see
2896 if there the last store to this location with the same address
2897 had the same value.
2899 The vuses represent the memory state before the store. If the
2900 memory state, address, and value of the store is the same as the
2901 last store to this location, then this store will produce the
2902 same memory state as that store.
2904 In this case the vdef versions for this store are value numbered to those
2905 vuse versions, since they represent the same memory state after
2906 this store.
2908 Otherwise, the vdefs for the store are used when inserting into
2909 the table, since the store generates a new memory state. */
2911 result = vn_reference_lookup (lhs, vuse, VN_NOWALK, NULL);
2913 if (result)
2915 if (TREE_CODE (result) == SSA_NAME)
2916 result = SSA_VAL (result);
2917 if (TREE_CODE (op) == SSA_NAME)
2918 op = SSA_VAL (op);
2919 resultsame = expressions_equal_p (result, op);
2922 if (!result || !resultsame)
2924 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
2925 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult);
2926 if (vnresult)
2928 VN_INFO (vdef)->use_processed = true;
2929 return set_ssa_val_to (vdef, vnresult->result_vdef);
2933 if (!result || !resultsame)
2935 if (dump_file && (dump_flags & TDF_DETAILS))
2937 fprintf (dump_file, "No store match\n");
2938 fprintf (dump_file, "Value numbering store ");
2939 print_generic_expr (dump_file, lhs, 0);
2940 fprintf (dump_file, " to ");
2941 print_generic_expr (dump_file, op, 0);
2942 fprintf (dump_file, "\n");
2944 /* Have to set value numbers before insert, since insert is
2945 going to valueize the references in-place. */
2946 if (vdef)
2948 changed |= set_ssa_val_to (vdef, vdef);
2951 /* Do not insert structure copies into the tables. */
2952 if (is_gimple_min_invariant (op)
2953 || is_gimple_reg (op))
2954 vn_reference_insert (lhs, op, vdef, NULL);
2956 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
2957 vn_reference_insert (assign, lhs, vuse, vdef);
2959 else
2961 /* We had a match, so value number the vdef to have the value
2962 number of the vuse it came from. */
2964 if (dump_file && (dump_flags & TDF_DETAILS))
2965 fprintf (dump_file, "Store matched earlier value,"
2966 "value numbering store vdefs to matching vuses.\n");
2968 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
2971 return changed;
2974 /* Visit and value number PHI, return true if the value number
2975 changed. */
2977 static bool
2978 visit_phi (gimple phi)
2980 bool changed = false;
2981 tree result;
2982 tree sameval = VN_TOP;
2983 bool allsame = true;
2984 unsigned i;
2986 /* TODO: We could check for this in init_sccvn, and replace this
2987 with a gcc_assert. */
2988 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
2989 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2991 /* See if all non-TOP arguments have the same value. TOP is
2992 equivalent to everything, so we can ignore it. */
2993 for (i = 0; i < gimple_phi_num_args (phi); i++)
2995 tree def = PHI_ARG_DEF (phi, i);
2997 if (TREE_CODE (def) == SSA_NAME)
2998 def = SSA_VAL (def);
2999 if (def == VN_TOP)
3000 continue;
3001 if (sameval == VN_TOP)
3003 sameval = def;
3005 else
3007 if (!expressions_equal_p (def, sameval))
3009 allsame = false;
3010 break;
3015 /* If all value numbered to the same value, the phi node has that
3016 value. */
3017 if (allsame)
3018 return set_ssa_val_to (PHI_RESULT (phi), sameval);
3020 /* Otherwise, see if it is equivalent to a phi node in this block. */
3021 result = vn_phi_lookup (phi);
3022 if (result)
3023 changed = set_ssa_val_to (PHI_RESULT (phi), result);
3024 else
3026 vn_phi_insert (phi, PHI_RESULT (phi));
3027 VN_INFO (PHI_RESULT (phi))->has_constants = false;
3028 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
3029 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3032 return changed;
3035 /* Return true if EXPR contains constants. */
3037 static bool
3038 expr_has_constants (tree expr)
3040 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
3042 case tcc_unary:
3043 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
3045 case tcc_binary:
3046 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
3047 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
3048 /* Constants inside reference ops are rarely interesting, but
3049 it can take a lot of looking to find them. */
3050 case tcc_reference:
3051 case tcc_declaration:
3052 return false;
3053 default:
3054 return is_gimple_min_invariant (expr);
3056 return false;
3059 /* Return true if STMT contains constants. */
3061 static bool
3062 stmt_has_constants (gimple stmt)
3064 if (gimple_code (stmt) != GIMPLE_ASSIGN)
3065 return false;
3067 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
3069 case GIMPLE_UNARY_RHS:
3070 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
3072 case GIMPLE_BINARY_RHS:
3073 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
3074 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
3075 case GIMPLE_TERNARY_RHS:
3076 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
3077 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt))
3078 || is_gimple_min_invariant (gimple_assign_rhs3 (stmt)));
3079 case GIMPLE_SINGLE_RHS:
3080 /* Constants inside reference ops are rarely interesting, but
3081 it can take a lot of looking to find them. */
3082 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
3083 default:
3084 gcc_unreachable ();
3086 return false;
3089 /* Replace SSA_NAMES in expr with their value numbers, and return the
3090 result.
3091 This is performed in place. */
3093 static tree
3094 valueize_expr (tree expr)
3096 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
3098 case tcc_binary:
3099 TREE_OPERAND (expr, 1) = vn_valueize (TREE_OPERAND (expr, 1));
3100 /* Fallthru. */
3101 case tcc_unary:
3102 TREE_OPERAND (expr, 0) = vn_valueize (TREE_OPERAND (expr, 0));
3103 break;
3104 default:;
3106 return expr;
3109 /* Simplify the binary expression RHS, and return the result if
3110 simplified. */
3112 static tree
3113 simplify_binary_expression (gimple stmt)
3115 tree result = NULL_TREE;
3116 tree op0 = gimple_assign_rhs1 (stmt);
3117 tree op1 = gimple_assign_rhs2 (stmt);
3118 enum tree_code code = gimple_assign_rhs_code (stmt);
3120 /* This will not catch every single case we could combine, but will
3121 catch those with constants. The goal here is to simultaneously
3122 combine constants between expressions, but avoid infinite
3123 expansion of expressions during simplification. */
3124 op0 = vn_valueize (op0);
3125 if (TREE_CODE (op0) == SSA_NAME
3126 && (VN_INFO (op0)->has_constants
3127 || TREE_CODE_CLASS (code) == tcc_comparison
3128 || code == COMPLEX_EXPR))
3129 op0 = valueize_expr (vn_get_expr_for (op0));
3131 op1 = vn_valueize (op1);
3132 if (TREE_CODE (op1) == SSA_NAME
3133 && (VN_INFO (op1)->has_constants
3134 || code == COMPLEX_EXPR))
3135 op1 = valueize_expr (vn_get_expr_for (op1));
3137 /* Pointer plus constant can be represented as invariant address.
3138 Do so to allow further propatation, see also tree forwprop. */
3139 if (code == POINTER_PLUS_EXPR
3140 && host_integerp (op1, 1)
3141 && TREE_CODE (op0) == ADDR_EXPR
3142 && is_gimple_min_invariant (op0))
3143 return build_invariant_address (TREE_TYPE (op0),
3144 TREE_OPERAND (op0, 0),
3145 TREE_INT_CST_LOW (op1));
3147 /* Avoid folding if nothing changed. */
3148 if (op0 == gimple_assign_rhs1 (stmt)
3149 && op1 == gimple_assign_rhs2 (stmt))
3150 return NULL_TREE;
3152 fold_defer_overflow_warnings ();
3154 result = fold_binary (code, gimple_expr_type (stmt), op0, op1);
3155 if (result)
3156 STRIP_USELESS_TYPE_CONVERSION (result);
3158 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
3159 stmt, 0);
3161 /* Make sure result is not a complex expression consisting
3162 of operators of operators (IE (a + b) + (a + c))
3163 Otherwise, we will end up with unbounded expressions if
3164 fold does anything at all. */
3165 if (result && valid_gimple_rhs_p (result))
3166 return result;
3168 return NULL_TREE;
3171 /* Simplify the unary expression RHS, and return the result if
3172 simplified. */
3174 static tree
3175 simplify_unary_expression (gimple stmt)
3177 tree result = NULL_TREE;
3178 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
3179 enum tree_code code = gimple_assign_rhs_code (stmt);
3181 /* We handle some tcc_reference codes here that are all
3182 GIMPLE_ASSIGN_SINGLE codes. */
3183 if (code == REALPART_EXPR
3184 || code == IMAGPART_EXPR
3185 || code == VIEW_CONVERT_EXPR
3186 || code == BIT_FIELD_REF)
3187 op0 = TREE_OPERAND (op0, 0);
3189 if (TREE_CODE (op0) != SSA_NAME)
3190 return NULL_TREE;
3192 orig_op0 = op0;
3193 op0 = vn_valueize (op0);
3194 if (TREE_CODE (op0) == SSA_NAME)
3196 if (VN_INFO (op0)->has_constants)
3197 op0 = valueize_expr (vn_get_expr_for (op0));
3198 else if (CONVERT_EXPR_CODE_P (code)
3199 || code == REALPART_EXPR
3200 || code == IMAGPART_EXPR
3201 || code == VIEW_CONVERT_EXPR
3202 || code == BIT_FIELD_REF)
3204 /* We want to do tree-combining on conversion-like expressions.
3205 Make sure we feed only SSA_NAMEs or constants to fold though. */
3206 tree tem = valueize_expr (vn_get_expr_for (op0));
3207 if (UNARY_CLASS_P (tem)
3208 || BINARY_CLASS_P (tem)
3209 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
3210 || TREE_CODE (tem) == SSA_NAME
3211 || TREE_CODE (tem) == CONSTRUCTOR
3212 || is_gimple_min_invariant (tem))
3213 op0 = tem;
3217 /* Avoid folding if nothing changed. */
3218 if (op0 == orig_op0)
3219 return NULL_TREE;
3221 if (code == BIT_FIELD_REF)
3223 tree rhs = gimple_assign_rhs1 (stmt);
3224 result = fold_ternary (BIT_FIELD_REF, TREE_TYPE (rhs),
3225 op0, TREE_OPERAND (rhs, 1), TREE_OPERAND (rhs, 2));
3227 else
3228 result = fold_unary_ignore_overflow (code, gimple_expr_type (stmt), op0);
3229 if (result)
3231 STRIP_USELESS_TYPE_CONVERSION (result);
3232 if (valid_gimple_rhs_p (result))
3233 return result;
3236 return NULL_TREE;
3239 /* Try to simplify RHS using equivalences and constant folding. */
3241 static tree
3242 try_to_simplify (gimple stmt)
3244 enum tree_code code = gimple_assign_rhs_code (stmt);
3245 tree tem;
3247 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3248 in this case, there is no point in doing extra work. */
3249 if (code == SSA_NAME)
3250 return NULL_TREE;
3252 /* First try constant folding based on our current lattice. */
3253 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize);
3254 if (tem
3255 && (TREE_CODE (tem) == SSA_NAME
3256 || is_gimple_min_invariant (tem)))
3257 return tem;
3259 /* If that didn't work try combining multiple statements. */
3260 switch (TREE_CODE_CLASS (code))
3262 case tcc_reference:
3263 /* Fallthrough for some unary codes that can operate on registers. */
3264 if (!(code == REALPART_EXPR
3265 || code == IMAGPART_EXPR
3266 || code == VIEW_CONVERT_EXPR
3267 || code == BIT_FIELD_REF))
3268 break;
3269 /* We could do a little more with unary ops, if they expand
3270 into binary ops, but it's debatable whether it is worth it. */
3271 case tcc_unary:
3272 return simplify_unary_expression (stmt);
3274 case tcc_comparison:
3275 case tcc_binary:
3276 return simplify_binary_expression (stmt);
3278 default:
3279 break;
3282 return NULL_TREE;
3285 /* Visit and value number USE, return true if the value number
3286 changed. */
3288 static bool
3289 visit_use (tree use)
3291 bool changed = false;
3292 gimple stmt = SSA_NAME_DEF_STMT (use);
3294 mark_use_processed (use);
3296 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3297 if (dump_file && (dump_flags & TDF_DETAILS)
3298 && !SSA_NAME_IS_DEFAULT_DEF (use))
3300 fprintf (dump_file, "Value numbering ");
3301 print_generic_expr (dump_file, use, 0);
3302 fprintf (dump_file, " stmt = ");
3303 print_gimple_stmt (dump_file, stmt, 0, 0);
3306 /* Handle uninitialized uses. */
3307 if (SSA_NAME_IS_DEFAULT_DEF (use))
3308 changed = set_ssa_val_to (use, use);
3309 else
3311 if (gimple_code (stmt) == GIMPLE_PHI)
3312 changed = visit_phi (stmt);
3313 else if (gimple_has_volatile_ops (stmt))
3314 changed = defs_to_varying (stmt);
3315 else if (is_gimple_assign (stmt))
3317 enum tree_code code = gimple_assign_rhs_code (stmt);
3318 tree lhs = gimple_assign_lhs (stmt);
3319 tree rhs1 = gimple_assign_rhs1 (stmt);
3320 tree simplified;
3322 /* Shortcut for copies. Simplifying copies is pointless,
3323 since we copy the expression and value they represent. */
3324 if (code == SSA_NAME
3325 && TREE_CODE (lhs) == SSA_NAME)
3327 changed = visit_copy (lhs, rhs1);
3328 goto done;
3330 simplified = try_to_simplify (stmt);
3331 if (simplified)
3333 if (dump_file && (dump_flags & TDF_DETAILS))
3335 fprintf (dump_file, "RHS ");
3336 print_gimple_expr (dump_file, stmt, 0, 0);
3337 fprintf (dump_file, " simplified to ");
3338 print_generic_expr (dump_file, simplified, 0);
3339 if (TREE_CODE (lhs) == SSA_NAME)
3340 fprintf (dump_file, " has constants %d\n",
3341 expr_has_constants (simplified));
3342 else
3343 fprintf (dump_file, "\n");
3346 /* Setting value numbers to constants will occasionally
3347 screw up phi congruence because constants are not
3348 uniquely associated with a single ssa name that can be
3349 looked up. */
3350 if (simplified
3351 && is_gimple_min_invariant (simplified)
3352 && TREE_CODE (lhs) == SSA_NAME)
3354 VN_INFO (lhs)->expr = simplified;
3355 VN_INFO (lhs)->has_constants = true;
3356 changed = set_ssa_val_to (lhs, simplified);
3357 goto done;
3359 else if (simplified
3360 && TREE_CODE (simplified) == SSA_NAME
3361 && TREE_CODE (lhs) == SSA_NAME)
3363 changed = visit_copy (lhs, simplified);
3364 goto done;
3366 else if (simplified)
3368 if (TREE_CODE (lhs) == SSA_NAME)
3370 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
3371 /* We have to unshare the expression or else
3372 valuizing may change the IL stream. */
3373 VN_INFO (lhs)->expr = unshare_expr (simplified);
3376 else if (stmt_has_constants (stmt)
3377 && TREE_CODE (lhs) == SSA_NAME)
3378 VN_INFO (lhs)->has_constants = true;
3379 else if (TREE_CODE (lhs) == SSA_NAME)
3381 /* We reset expr and constantness here because we may
3382 have been value numbering optimistically, and
3383 iterating. They may become non-constant in this case,
3384 even if they were optimistically constant. */
3386 VN_INFO (lhs)->has_constants = false;
3387 VN_INFO (lhs)->expr = NULL_TREE;
3390 if ((TREE_CODE (lhs) == SSA_NAME
3391 /* We can substitute SSA_NAMEs that are live over
3392 abnormal edges with their constant value. */
3393 && !(gimple_assign_copy_p (stmt)
3394 && is_gimple_min_invariant (rhs1))
3395 && !(simplified
3396 && is_gimple_min_invariant (simplified))
3397 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3398 /* Stores or copies from SSA_NAMEs that are live over
3399 abnormal edges are a problem. */
3400 || (code == SSA_NAME
3401 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3402 changed = defs_to_varying (stmt);
3403 else if (REFERENCE_CLASS_P (lhs)
3404 || DECL_P (lhs))
3405 changed = visit_reference_op_store (lhs, rhs1, stmt);
3406 else if (TREE_CODE (lhs) == SSA_NAME)
3408 if ((gimple_assign_copy_p (stmt)
3409 && is_gimple_min_invariant (rhs1))
3410 || (simplified
3411 && is_gimple_min_invariant (simplified)))
3413 VN_INFO (lhs)->has_constants = true;
3414 if (simplified)
3415 changed = set_ssa_val_to (lhs, simplified);
3416 else
3417 changed = set_ssa_val_to (lhs, rhs1);
3419 else
3421 /* First try to lookup the simplified expression. */
3422 if (simplified)
3424 enum gimple_rhs_class rhs_class;
3427 rhs_class = get_gimple_rhs_class (TREE_CODE (simplified));
3428 if ((rhs_class == GIMPLE_UNARY_RHS
3429 || rhs_class == GIMPLE_BINARY_RHS
3430 || rhs_class == GIMPLE_TERNARY_RHS)
3431 && valid_gimple_rhs_p (simplified))
3433 tree result = vn_nary_op_lookup (simplified, NULL);
3434 if (result)
3436 changed = set_ssa_val_to (lhs, result);
3437 goto done;
3442 /* Otherwise visit the original statement. */
3443 switch (vn_get_stmt_kind (stmt))
3445 case VN_NARY:
3446 changed = visit_nary_op (lhs, stmt);
3447 break;
3448 case VN_REFERENCE:
3449 changed = visit_reference_op_load (lhs, rhs1, stmt);
3450 break;
3451 default:
3452 changed = defs_to_varying (stmt);
3453 break;
3457 else
3458 changed = defs_to_varying (stmt);
3460 else if (is_gimple_call (stmt))
3462 tree lhs = gimple_call_lhs (stmt);
3464 /* ??? We could try to simplify calls. */
3466 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3468 if (stmt_has_constants (stmt))
3469 VN_INFO (lhs)->has_constants = true;
3470 else
3472 /* We reset expr and constantness here because we may
3473 have been value numbering optimistically, and
3474 iterating. They may become non-constant in this case,
3475 even if they were optimistically constant. */
3476 VN_INFO (lhs)->has_constants = false;
3477 VN_INFO (lhs)->expr = NULL_TREE;
3480 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3482 changed = defs_to_varying (stmt);
3483 goto done;
3487 if (!gimple_call_internal_p (stmt)
3488 && (/* Calls to the same function with the same vuse
3489 and the same operands do not necessarily return the same
3490 value, unless they're pure or const. */
3491 gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST)
3492 /* If calls have a vdef, subsequent calls won't have
3493 the same incoming vuse. So, if 2 calls with vdef have the
3494 same vuse, we know they're not subsequent.
3495 We can value number 2 calls to the same function with the
3496 same vuse and the same operands which are not subsequent
3497 the same, because there is no code in the program that can
3498 compare the 2 values... */
3499 || (gimple_vdef (stmt)
3500 /* ... unless the call returns a pointer which does
3501 not alias with anything else. In which case the
3502 information that the values are distinct are encoded
3503 in the IL. */
3504 && !(gimple_call_return_flags (stmt) & ERF_NOALIAS))))
3505 changed = visit_reference_op_call (lhs, stmt);
3506 else
3507 changed = defs_to_varying (stmt);
3509 else
3510 changed = defs_to_varying (stmt);
3512 done:
3513 return changed;
3516 /* Compare two operands by reverse postorder index */
3518 static int
3519 compare_ops (const void *pa, const void *pb)
3521 const tree opa = *((const tree *)pa);
3522 const tree opb = *((const tree *)pb);
3523 gimple opstmta = SSA_NAME_DEF_STMT (opa);
3524 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
3525 basic_block bba;
3526 basic_block bbb;
3528 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3529 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3530 else if (gimple_nop_p (opstmta))
3531 return -1;
3532 else if (gimple_nop_p (opstmtb))
3533 return 1;
3535 bba = gimple_bb (opstmta);
3536 bbb = gimple_bb (opstmtb);
3538 if (!bba && !bbb)
3539 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3540 else if (!bba)
3541 return -1;
3542 else if (!bbb)
3543 return 1;
3545 if (bba == bbb)
3547 if (gimple_code (opstmta) == GIMPLE_PHI
3548 && gimple_code (opstmtb) == GIMPLE_PHI)
3549 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3550 else if (gimple_code (opstmta) == GIMPLE_PHI)
3551 return -1;
3552 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3553 return 1;
3554 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3555 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3556 else
3557 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3559 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3562 /* Sort an array containing members of a strongly connected component
3563 SCC so that the members are ordered by RPO number.
3564 This means that when the sort is complete, iterating through the
3565 array will give you the members in RPO order. */
3567 static void
3568 sort_scc (vec<tree> scc)
3570 scc.qsort (compare_ops);
3573 /* Insert the no longer used nary ONARY to the hash INFO. */
3575 static void
3576 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3578 size_t size = sizeof_vn_nary_op (onary->length);
3579 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3580 &info->nary_obstack);
3581 memcpy (nary, onary, size);
3582 vn_nary_op_insert_into (nary, info->nary, false);
3585 /* Insert the no longer used phi OPHI to the hash INFO. */
3587 static void
3588 copy_phi (vn_phi_t ophi, vn_tables_t info)
3590 vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
3591 void **slot;
3592 memcpy (phi, ophi, sizeof (*phi));
3593 ophi->phiargs.create (0);
3594 slot = htab_find_slot_with_hash (info->phis, phi, phi->hashcode, INSERT);
3595 gcc_assert (!*slot);
3596 *slot = phi;
3599 /* Insert the no longer used reference OREF to the hash INFO. */
3601 static void
3602 copy_reference (vn_reference_t oref, vn_tables_t info)
3604 vn_reference_t ref;
3605 void **slot;
3606 ref = (vn_reference_t) pool_alloc (info->references_pool);
3607 memcpy (ref, oref, sizeof (*ref));
3608 oref->operands.create (0);
3609 slot = htab_find_slot_with_hash (info->references, ref, ref->hashcode,
3610 INSERT);
3611 if (*slot)
3612 free_reference (*slot);
3613 *slot = ref;
3616 /* Process a strongly connected component in the SSA graph. */
3618 static void
3619 process_scc (vec<tree> scc)
3621 tree var;
3622 unsigned int i;
3623 unsigned int iterations = 0;
3624 bool changed = true;
3625 htab_iterator hi;
3626 vn_nary_op_t nary;
3627 vn_phi_t phi;
3628 vn_reference_t ref;
3630 /* If the SCC has a single member, just visit it. */
3631 if (scc.length () == 1)
3633 tree use = scc[0];
3634 if (VN_INFO (use)->use_processed)
3635 return;
3636 /* We need to make sure it doesn't form a cycle itself, which can
3637 happen for self-referential PHI nodes. In that case we would
3638 end up inserting an expression with VN_TOP operands into the
3639 valid table which makes us derive bogus equivalences later.
3640 The cheapest way to check this is to assume it for all PHI nodes. */
3641 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3642 /* Fallthru to iteration. */ ;
3643 else
3645 visit_use (use);
3646 return;
3650 /* Iterate over the SCC with the optimistic table until it stops
3651 changing. */
3652 current_info = optimistic_info;
3653 while (changed)
3655 changed = false;
3656 iterations++;
3657 if (dump_file && (dump_flags & TDF_DETAILS))
3658 fprintf (dump_file, "Starting iteration %d\n", iterations);
3659 /* As we are value-numbering optimistically we have to
3660 clear the expression tables and the simplified expressions
3661 in each iteration until we converge. */
3662 htab_empty (optimistic_info->nary);
3663 htab_empty (optimistic_info->phis);
3664 htab_empty (optimistic_info->references);
3665 obstack_free (&optimistic_info->nary_obstack, NULL);
3666 gcc_obstack_init (&optimistic_info->nary_obstack);
3667 empty_alloc_pool (optimistic_info->phis_pool);
3668 empty_alloc_pool (optimistic_info->references_pool);
3669 FOR_EACH_VEC_ELT (scc, i, var)
3670 VN_INFO (var)->expr = NULL_TREE;
3671 FOR_EACH_VEC_ELT (scc, i, var)
3672 changed |= visit_use (var);
3675 statistics_histogram_event (cfun, "SCC iterations", iterations);
3677 /* Finally, copy the contents of the no longer used optimistic
3678 table to the valid table. */
3679 FOR_EACH_HTAB_ELEMENT (optimistic_info->nary, nary, vn_nary_op_t, hi)
3680 copy_nary (nary, valid_info);
3681 FOR_EACH_HTAB_ELEMENT (optimistic_info->phis, phi, vn_phi_t, hi)
3682 copy_phi (phi, valid_info);
3683 FOR_EACH_HTAB_ELEMENT (optimistic_info->references, ref, vn_reference_t, hi)
3684 copy_reference (ref, valid_info);
3686 current_info = valid_info;
3690 /* Pop the components of the found SCC for NAME off the SCC stack
3691 and process them. Returns true if all went well, false if
3692 we run into resource limits. */
3694 static bool
3695 extract_and_process_scc_for_name (tree name)
3697 vec<tree> scc = vNULL;
3698 tree x;
3700 /* Found an SCC, pop the components off the SCC stack and
3701 process them. */
3704 x = sccstack.pop ();
3706 VN_INFO (x)->on_sccstack = false;
3707 scc.safe_push (x);
3708 } while (x != name);
3710 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
3711 if (scc.length ()
3712 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
3714 if (dump_file)
3715 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
3716 "SCC size %u exceeding %u\n", scc.length (),
3717 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
3719 scc.release ();
3720 return false;
3723 if (scc.length () > 1)
3724 sort_scc (scc);
3726 if (dump_file && (dump_flags & TDF_DETAILS))
3727 print_scc (dump_file, scc);
3729 process_scc (scc);
3731 scc.release ();
3733 return true;
3736 /* Depth first search on NAME to discover and process SCC's in the SSA
3737 graph.
3738 Execution of this algorithm relies on the fact that the SCC's are
3739 popped off the stack in topological order.
3740 Returns true if successful, false if we stopped processing SCC's due
3741 to resource constraints. */
3743 static bool
3744 DFS (tree name)
3746 vec<ssa_op_iter> itervec = vNULL;
3747 vec<tree> namevec = vNULL;
3748 use_operand_p usep = NULL;
3749 gimple defstmt;
3750 tree use;
3751 ssa_op_iter iter;
3753 start_over:
3754 /* SCC info */
3755 VN_INFO (name)->dfsnum = next_dfs_num++;
3756 VN_INFO (name)->visited = true;
3757 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
3759 sccstack.safe_push (name);
3760 VN_INFO (name)->on_sccstack = true;
3761 defstmt = SSA_NAME_DEF_STMT (name);
3763 /* Recursively DFS on our operands, looking for SCC's. */
3764 if (!gimple_nop_p (defstmt))
3766 /* Push a new iterator. */
3767 if (gimple_code (defstmt) == GIMPLE_PHI)
3768 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
3769 else
3770 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
3772 else
3773 clear_and_done_ssa_iter (&iter);
3775 while (1)
3777 /* If we are done processing uses of a name, go up the stack
3778 of iterators and process SCCs as we found them. */
3779 if (op_iter_done (&iter))
3781 /* See if we found an SCC. */
3782 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
3783 if (!extract_and_process_scc_for_name (name))
3785 namevec.release ();
3786 itervec.release ();
3787 return false;
3790 /* Check if we are done. */
3791 if (namevec.is_empty ())
3793 namevec.release ();
3794 itervec.release ();
3795 return true;
3798 /* Restore the last use walker and continue walking there. */
3799 use = name;
3800 name = namevec.pop ();
3801 memcpy (&iter, &itervec.last (),
3802 sizeof (ssa_op_iter));
3803 itervec.pop ();
3804 goto continue_walking;
3807 use = USE_FROM_PTR (usep);
3809 /* Since we handle phi nodes, we will sometimes get
3810 invariants in the use expression. */
3811 if (TREE_CODE (use) == SSA_NAME)
3813 if (! (VN_INFO (use)->visited))
3815 /* Recurse by pushing the current use walking state on
3816 the stack and starting over. */
3817 itervec.safe_push (iter);
3818 namevec.safe_push (name);
3819 name = use;
3820 goto start_over;
3822 continue_walking:
3823 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3824 VN_INFO (use)->low);
3826 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3827 && VN_INFO (use)->on_sccstack)
3829 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3830 VN_INFO (name)->low);
3834 usep = op_iter_next_use (&iter);
3838 /* Allocate a value number table. */
3840 static void
3841 allocate_vn_table (vn_tables_t table)
3843 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
3844 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
3845 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
3846 free_reference);
3848 gcc_obstack_init (&table->nary_obstack);
3849 table->phis_pool = create_alloc_pool ("VN phis",
3850 sizeof (struct vn_phi_s),
3851 30);
3852 table->references_pool = create_alloc_pool ("VN references",
3853 sizeof (struct vn_reference_s),
3854 30);
3857 /* Free a value number table. */
3859 static void
3860 free_vn_table (vn_tables_t table)
3862 htab_delete (table->phis);
3863 htab_delete (table->nary);
3864 htab_delete (table->references);
3865 obstack_free (&table->nary_obstack, NULL);
3866 free_alloc_pool (table->phis_pool);
3867 free_alloc_pool (table->references_pool);
3870 static void
3871 init_scc_vn (void)
3873 size_t i;
3874 int j;
3875 int *rpo_numbers_temp;
3877 calculate_dominance_info (CDI_DOMINATORS);
3878 sccstack.create (0);
3879 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
3880 free);
3882 constant_value_ids = BITMAP_ALLOC (NULL);
3884 next_dfs_num = 1;
3885 next_value_id = 1;
3887 vn_ssa_aux_table.create (num_ssa_names + 1);
3888 /* VEC_alloc doesn't actually grow it to the right size, it just
3889 preallocates the space to do so. */
3890 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
3891 gcc_obstack_init (&vn_ssa_aux_obstack);
3893 shared_lookup_phiargs.create (0);
3894 shared_lookup_references.create (0);
3895 rpo_numbers = XNEWVEC (int, last_basic_block);
3896 rpo_numbers_temp = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
3897 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
3899 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3900 the i'th block in RPO order is bb. We want to map bb's to RPO
3901 numbers, so we need to rearrange this array. */
3902 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
3903 rpo_numbers[rpo_numbers_temp[j]] = j;
3905 XDELETE (rpo_numbers_temp);
3907 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
3909 /* Create the VN_INFO structures, and initialize value numbers to
3910 TOP. */
3911 for (i = 0; i < num_ssa_names; i++)
3913 tree name = ssa_name (i);
3914 if (name)
3916 VN_INFO_GET (name)->valnum = VN_TOP;
3917 VN_INFO (name)->expr = NULL_TREE;
3918 VN_INFO (name)->value_id = 0;
3922 renumber_gimple_stmt_uids ();
3924 /* Create the valid and optimistic value numbering tables. */
3925 valid_info = XCNEW (struct vn_tables_s);
3926 allocate_vn_table (valid_info);
3927 optimistic_info = XCNEW (struct vn_tables_s);
3928 allocate_vn_table (optimistic_info);
3931 void
3932 free_scc_vn (void)
3934 size_t i;
3936 htab_delete (constant_to_value_id);
3937 BITMAP_FREE (constant_value_ids);
3938 shared_lookup_phiargs.release ();
3939 shared_lookup_references.release ();
3940 XDELETEVEC (rpo_numbers);
3942 for (i = 0; i < num_ssa_names; i++)
3944 tree name = ssa_name (i);
3945 if (name
3946 && VN_INFO (name)->needs_insertion)
3947 release_ssa_name (name);
3949 obstack_free (&vn_ssa_aux_obstack, NULL);
3950 vn_ssa_aux_table.release ();
3952 sccstack.release ();
3953 free_vn_table (valid_info);
3954 XDELETE (valid_info);
3955 free_vn_table (optimistic_info);
3956 XDELETE (optimistic_info);
3959 /* Set *ID according to RESULT. */
3961 static void
3962 set_value_id_for_result (tree result, unsigned int *id)
3964 if (result && TREE_CODE (result) == SSA_NAME)
3965 *id = VN_INFO (result)->value_id;
3966 else if (result && is_gimple_min_invariant (result))
3967 *id = get_or_alloc_constant_value_id (result);
3968 else
3969 *id = get_next_value_id ();
3972 /* Set the value ids in the valid hash tables. */
3974 static void
3975 set_hashtable_value_ids (void)
3977 htab_iterator hi;
3978 vn_nary_op_t vno;
3979 vn_reference_t vr;
3980 vn_phi_t vp;
3982 /* Now set the value ids of the things we had put in the hash
3983 table. */
3985 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
3986 vno, vn_nary_op_t, hi)
3987 set_value_id_for_result (vno->result, &vno->value_id);
3989 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
3990 vp, vn_phi_t, hi)
3991 set_value_id_for_result (vp->result, &vp->value_id);
3993 FOR_EACH_HTAB_ELEMENT (valid_info->references,
3994 vr, vn_reference_t, hi)
3995 set_value_id_for_result (vr->result, &vr->value_id);
3998 /* Do SCCVN. Returns true if it finished, false if we bailed out
3999 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
4000 how we use the alias oracle walking during the VN process. */
4002 bool
4003 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
4005 size_t i;
4006 tree param;
4008 default_vn_walk_kind = default_vn_walk_kind_;
4010 init_scc_vn ();
4011 current_info = valid_info;
4013 for (param = DECL_ARGUMENTS (current_function_decl);
4014 param;
4015 param = DECL_CHAIN (param))
4017 tree def = ssa_default_def (cfun, param);
4018 if (def)
4019 VN_INFO (def)->valnum = def;
4022 for (i = 1; i < num_ssa_names; ++i)
4024 tree name = ssa_name (i);
4025 if (name
4026 && VN_INFO (name)->visited == false
4027 && !has_zero_uses (name))
4028 if (!DFS (name))
4030 free_scc_vn ();
4031 return false;
4035 /* Initialize the value ids. */
4037 for (i = 1; i < num_ssa_names; ++i)
4039 tree name = ssa_name (i);
4040 vn_ssa_aux_t info;
4041 if (!name)
4042 continue;
4043 info = VN_INFO (name);
4044 if (info->valnum == name
4045 || info->valnum == VN_TOP)
4046 info->value_id = get_next_value_id ();
4047 else if (is_gimple_min_invariant (info->valnum))
4048 info->value_id = get_or_alloc_constant_value_id (info->valnum);
4051 /* Propagate. */
4052 for (i = 1; i < num_ssa_names; ++i)
4054 tree name = ssa_name (i);
4055 vn_ssa_aux_t info;
4056 if (!name)
4057 continue;
4058 info = VN_INFO (name);
4059 if (TREE_CODE (info->valnum) == SSA_NAME
4060 && info->valnum != name
4061 && info->value_id != VN_INFO (info->valnum)->value_id)
4062 info->value_id = VN_INFO (info->valnum)->value_id;
4065 set_hashtable_value_ids ();
4067 if (dump_file && (dump_flags & TDF_DETAILS))
4069 fprintf (dump_file, "Value numbers:\n");
4070 for (i = 0; i < num_ssa_names; i++)
4072 tree name = ssa_name (i);
4073 if (name
4074 && VN_INFO (name)->visited
4075 && SSA_VAL (name) != name)
4077 print_generic_expr (dump_file, name, 0);
4078 fprintf (dump_file, " = ");
4079 print_generic_expr (dump_file, SSA_VAL (name), 0);
4080 fprintf (dump_file, "\n");
4085 return true;
4088 /* Return the maximum value id we have ever seen. */
4090 unsigned int
4091 get_max_value_id (void)
4093 return next_value_id;
4096 /* Return the next unique value id. */
4098 unsigned int
4099 get_next_value_id (void)
4101 return next_value_id++;
4105 /* Compare two expressions E1 and E2 and return true if they are equal. */
4107 bool
4108 expressions_equal_p (tree e1, tree e2)
4110 /* The obvious case. */
4111 if (e1 == e2)
4112 return true;
4114 /* If only one of them is null, they cannot be equal. */
4115 if (!e1 || !e2)
4116 return false;
4118 /* Now perform the actual comparison. */
4119 if (TREE_CODE (e1) == TREE_CODE (e2)
4120 && operand_equal_p (e1, e2, OEP_PURE_SAME))
4121 return true;
4123 return false;
4127 /* Return true if the nary operation NARY may trap. This is a copy
4128 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4130 bool
4131 vn_nary_may_trap (vn_nary_op_t nary)
4133 tree type;
4134 tree rhs2 = NULL_TREE;
4135 bool honor_nans = false;
4136 bool honor_snans = false;
4137 bool fp_operation = false;
4138 bool honor_trapv = false;
4139 bool handled, ret;
4140 unsigned i;
4142 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4143 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4144 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4146 type = nary->type;
4147 fp_operation = FLOAT_TYPE_P (type);
4148 if (fp_operation)
4150 honor_nans = flag_trapping_math && !flag_finite_math_only;
4151 honor_snans = flag_signaling_nans != 0;
4153 else if (INTEGRAL_TYPE_P (type)
4154 && TYPE_OVERFLOW_TRAPS (type))
4155 honor_trapv = true;
4157 if (nary->length >= 2)
4158 rhs2 = nary->op[1];
4159 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4160 honor_trapv,
4161 honor_nans, honor_snans, rhs2,
4162 &handled);
4163 if (handled
4164 && ret)
4165 return true;
4167 for (i = 0; i < nary->length; ++i)
4168 if (tree_could_trap_p (nary->op[i]))
4169 return true;
4171 return false;