Update count_scale for AutoFDO to prevent over-scale.
[official-gcc.git] / gcc-4_8 / gcc / tree-ssa-sccvn.c
blob3cbc97024e410f16e6b0793ba6940293d7abc860
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2013 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "basic-block.h"
27 #include "gimple-pretty-print.h"
28 #include "tree-inline.h"
29 #include "tree-flow.h"
30 #include "gimple.h"
31 #include "dumpfile.h"
32 #include "hashtab.h"
33 #include "alloc-pool.h"
34 #include "flags.h"
35 #include "bitmap.h"
36 #include "cfgloop.h"
37 #include "params.h"
38 #include "tree-ssa-propagate.h"
39 #include "tree-ssa-sccvn.h"
40 #include "gimple-fold.h"
42 /* This algorithm is based on the SCC algorithm presented by Keith
43 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
44 (http://citeseer.ist.psu.edu/41805.html). In
45 straight line code, it is equivalent to a regular hash based value
46 numbering that is performed in reverse postorder.
48 For code with cycles, there are two alternatives, both of which
49 require keeping the hashtables separate from the actual list of
50 value numbers for SSA names.
52 1. Iterate value numbering in an RPO walk of the blocks, removing
53 all the entries from the hashtable after each iteration (but
54 keeping the SSA name->value number mapping between iterations).
55 Iterate until it does not change.
57 2. Perform value numbering as part of an SCC walk on the SSA graph,
58 iterating only the cycles in the SSA graph until they do not change
59 (using a separate, optimistic hashtable for value numbering the SCC
60 operands).
62 The second is not just faster in practice (because most SSA graph
63 cycles do not involve all the variables in the graph), it also has
64 some nice properties.
66 One of these nice properties is that when we pop an SCC off the
67 stack, we are guaranteed to have processed all the operands coming from
68 *outside of that SCC*, so we do not need to do anything special to
69 ensure they have value numbers.
71 Another nice property is that the SCC walk is done as part of a DFS
72 of the SSA graph, which makes it easy to perform combining and
73 simplifying operations at the same time.
75 The code below is deliberately written in a way that makes it easy
76 to separate the SCC walk from the other work it does.
78 In order to propagate constants through the code, we track which
79 expressions contain constants, and use those while folding. In
80 theory, we could also track expressions whose value numbers are
81 replaced, in case we end up folding based on expression
82 identities.
84 In order to value number memory, we assign value numbers to vuses.
85 This enables us to note that, for example, stores to the same
86 address of the same value from the same starting memory states are
87 equivalent.
88 TODO:
90 1. We can iterate only the changing portions of the SCC's, but
91 I have not seen an SCC big enough for this to be a win.
92 2. If you differentiate between phi nodes for loops and phi nodes
93 for if-then-else, you can properly consider phi nodes in different
94 blocks for equivalence.
95 3. We could value number vuses in more cases, particularly, whole
96 structure copies.
99 /* The set of hashtables and alloc_pool's for their items. */
101 typedef struct vn_tables_s
103 htab_t nary;
104 htab_t phis;
105 htab_t references;
106 struct obstack nary_obstack;
107 alloc_pool phis_pool;
108 alloc_pool references_pool;
109 } *vn_tables_t;
111 static htab_t constant_to_value_id;
112 static bitmap constant_value_ids;
115 /* Valid hashtables storing information we have proven to be
116 correct. */
118 static vn_tables_t valid_info;
120 /* Optimistic hashtables storing information we are making assumptions about
121 during iterations. */
123 static vn_tables_t optimistic_info;
125 /* Pointer to the set of hashtables that is currently being used.
126 Should always point to either the optimistic_info, or the
127 valid_info. */
129 static vn_tables_t current_info;
132 /* Reverse post order index for each basic block. */
134 static int *rpo_numbers;
136 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
138 /* This represents the top of the VN lattice, which is the universal
139 value. */
141 tree VN_TOP;
143 /* Unique counter for our value ids. */
145 static unsigned int next_value_id;
147 /* Next DFS number and the stack for strongly connected component
148 detection. */
150 static unsigned int next_dfs_num;
151 static vec<tree> sccstack;
155 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
156 are allocated on an obstack for locality reasons, and to free them
157 without looping over the vec. */
159 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
160 static struct obstack vn_ssa_aux_obstack;
162 /* Return the value numbering information for a given SSA name. */
164 vn_ssa_aux_t
165 VN_INFO (tree name)
167 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
168 gcc_checking_assert (res);
169 return res;
172 /* Set the value numbering info for a given SSA name to a given
173 value. */
175 static inline void
176 VN_INFO_SET (tree name, vn_ssa_aux_t value)
178 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
181 /* Initialize the value numbering info for a given SSA name.
182 This should be called just once for every SSA name. */
184 vn_ssa_aux_t
185 VN_INFO_GET (tree name)
187 vn_ssa_aux_t newinfo;
189 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
190 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
191 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
192 vn_ssa_aux_table.safe_grow (SSA_NAME_VERSION (name) + 1);
193 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
194 return newinfo;
198 /* Get the representative expression for the SSA_NAME NAME. Returns
199 the representative SSA_NAME if there is no expression associated with it. */
201 tree
202 vn_get_expr_for (tree name)
204 vn_ssa_aux_t vn = VN_INFO (name);
205 gimple def_stmt;
206 tree expr = NULL_TREE;
207 enum tree_code code;
209 if (vn->valnum == VN_TOP)
210 return name;
212 /* If the value-number is a constant it is the representative
213 expression. */
214 if (TREE_CODE (vn->valnum) != SSA_NAME)
215 return vn->valnum;
217 /* Get to the information of the value of this SSA_NAME. */
218 vn = VN_INFO (vn->valnum);
220 /* If the value-number is a constant it is the representative
221 expression. */
222 if (TREE_CODE (vn->valnum) != SSA_NAME)
223 return vn->valnum;
225 /* Else if we have an expression, return it. */
226 if (vn->expr != NULL_TREE)
227 return vn->expr;
229 /* Otherwise use the defining statement to build the expression. */
230 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
232 /* If the value number is not an assignment use it directly. */
233 if (!is_gimple_assign (def_stmt))
234 return vn->valnum;
236 /* FIXME tuples. This is incomplete and likely will miss some
237 simplifications. */
238 code = gimple_assign_rhs_code (def_stmt);
239 switch (TREE_CODE_CLASS (code))
241 case tcc_reference:
242 if ((code == REALPART_EXPR
243 || code == IMAGPART_EXPR
244 || code == VIEW_CONVERT_EXPR)
245 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (def_stmt),
246 0)) == SSA_NAME)
247 expr = fold_build1 (code,
248 gimple_expr_type (def_stmt),
249 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
250 break;
252 case tcc_unary:
253 expr = fold_build1 (code,
254 gimple_expr_type (def_stmt),
255 gimple_assign_rhs1 (def_stmt));
256 break;
258 case tcc_binary:
259 expr = fold_build2 (code,
260 gimple_expr_type (def_stmt),
261 gimple_assign_rhs1 (def_stmt),
262 gimple_assign_rhs2 (def_stmt));
263 break;
265 case tcc_exceptional:
266 if (code == CONSTRUCTOR
267 && TREE_CODE
268 (TREE_TYPE (gimple_assign_rhs1 (def_stmt))) == VECTOR_TYPE)
269 expr = gimple_assign_rhs1 (def_stmt);
270 break;
272 default:;
274 if (expr == NULL_TREE)
275 return vn->valnum;
277 /* Cache the expression. */
278 vn->expr = expr;
280 return expr;
283 /* Return the vn_kind the expression computed by the stmt should be
284 associated with. */
286 enum vn_kind
287 vn_get_stmt_kind (gimple stmt)
289 switch (gimple_code (stmt))
291 case GIMPLE_CALL:
292 return VN_REFERENCE;
293 case GIMPLE_PHI:
294 return VN_PHI;
295 case GIMPLE_ASSIGN:
297 enum tree_code code = gimple_assign_rhs_code (stmt);
298 tree rhs1 = gimple_assign_rhs1 (stmt);
299 switch (get_gimple_rhs_class (code))
301 case GIMPLE_UNARY_RHS:
302 case GIMPLE_BINARY_RHS:
303 case GIMPLE_TERNARY_RHS:
304 return VN_NARY;
305 case GIMPLE_SINGLE_RHS:
306 switch (TREE_CODE_CLASS (code))
308 case tcc_reference:
309 /* VOP-less references can go through unary case. */
310 if ((code == REALPART_EXPR
311 || code == IMAGPART_EXPR
312 || code == VIEW_CONVERT_EXPR
313 || code == BIT_FIELD_REF)
314 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
315 return VN_NARY;
317 /* Fallthrough. */
318 case tcc_declaration:
319 return VN_REFERENCE;
321 case tcc_constant:
322 return VN_CONSTANT;
324 default:
325 if (code == ADDR_EXPR)
326 return (is_gimple_min_invariant (rhs1)
327 ? VN_CONSTANT : VN_REFERENCE);
328 else if (code == CONSTRUCTOR)
329 return VN_NARY;
330 return VN_NONE;
332 default:
333 return VN_NONE;
336 default:
337 return VN_NONE;
341 /* Free a phi operation structure VP. */
343 static void
344 free_phi (void *vp)
346 vn_phi_t phi = (vn_phi_t) vp;
347 phi->phiargs.release ();
350 /* Free a reference operation structure VP. */
352 static void
353 free_reference (void *vp)
355 vn_reference_t vr = (vn_reference_t) vp;
356 vr->operands.release ();
359 /* Hash table equality function for vn_constant_t. */
361 static int
362 vn_constant_eq (const void *p1, const void *p2)
364 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
365 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
367 if (vc1->hashcode != vc2->hashcode)
368 return false;
370 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
373 /* Hash table hash function for vn_constant_t. */
375 static hashval_t
376 vn_constant_hash (const void *p1)
378 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
379 return vc1->hashcode;
382 /* Lookup a value id for CONSTANT and return it. If it does not
383 exist returns 0. */
385 unsigned int
386 get_constant_value_id (tree constant)
388 void **slot;
389 struct vn_constant_s vc;
391 vc.hashcode = vn_hash_constant_with_type (constant);
392 vc.constant = constant;
393 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
394 vc.hashcode, NO_INSERT);
395 if (slot)
396 return ((vn_constant_t)*slot)->value_id;
397 return 0;
400 /* Lookup a value id for CONSTANT, and if it does not exist, create a
401 new one and return it. If it does exist, return it. */
403 unsigned int
404 get_or_alloc_constant_value_id (tree constant)
406 void **slot;
407 struct vn_constant_s vc;
408 vn_constant_t vcp;
410 vc.hashcode = vn_hash_constant_with_type (constant);
411 vc.constant = constant;
412 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
413 vc.hashcode, INSERT);
414 if (*slot)
415 return ((vn_constant_t)*slot)->value_id;
417 vcp = XNEW (struct vn_constant_s);
418 vcp->hashcode = vc.hashcode;
419 vcp->constant = constant;
420 vcp->value_id = get_next_value_id ();
421 *slot = (void *) vcp;
422 bitmap_set_bit (constant_value_ids, vcp->value_id);
423 return vcp->value_id;
426 /* Return true if V is a value id for a constant. */
428 bool
429 value_id_constant_p (unsigned int v)
431 return bitmap_bit_p (constant_value_ids, v);
434 /* Compare two reference operands P1 and P2 for equality. Return true if
435 they are equal, and false otherwise. */
437 static int
438 vn_reference_op_eq (const void *p1, const void *p2)
440 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
441 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
443 return (vro1->opcode == vro2->opcode
444 /* We do not care for differences in type qualification. */
445 && (vro1->type == vro2->type
446 || (vro1->type && vro2->type
447 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
448 TYPE_MAIN_VARIANT (vro2->type))))
449 && expressions_equal_p (vro1->op0, vro2->op0)
450 && expressions_equal_p (vro1->op1, vro2->op1)
451 && expressions_equal_p (vro1->op2, vro2->op2));
454 /* Compute the hash for a reference operand VRO1. */
456 static hashval_t
457 vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result)
459 result = iterative_hash_hashval_t (vro1->opcode, result);
460 if (vro1->op0)
461 result = iterative_hash_expr (vro1->op0, result);
462 if (vro1->op1)
463 result = iterative_hash_expr (vro1->op1, result);
464 if (vro1->op2)
465 result = iterative_hash_expr (vro1->op2, result);
466 return result;
469 /* Return the hashcode for a given reference operation P1. */
471 static hashval_t
472 vn_reference_hash (const void *p1)
474 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
475 return vr1->hashcode;
478 /* Compute a hash for the reference operation VR1 and return it. */
480 hashval_t
481 vn_reference_compute_hash (const vn_reference_t vr1)
483 hashval_t result = 0;
484 int i;
485 vn_reference_op_t vro;
486 HOST_WIDE_INT off = -1;
487 bool deref = false;
489 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
491 if (vro->opcode == MEM_REF)
492 deref = true;
493 else if (vro->opcode != ADDR_EXPR)
494 deref = false;
495 if (vro->off != -1)
497 if (off == -1)
498 off = 0;
499 off += vro->off;
501 else
503 if (off != -1
504 && off != 0)
505 result = iterative_hash_hashval_t (off, result);
506 off = -1;
507 if (deref
508 && vro->opcode == ADDR_EXPR)
510 if (vro->op0)
512 tree op = TREE_OPERAND (vro->op0, 0);
513 result = iterative_hash_hashval_t (TREE_CODE (op), result);
514 result = iterative_hash_expr (op, result);
517 else
518 result = vn_reference_op_compute_hash (vro, result);
521 if (vr1->vuse)
522 result += SSA_NAME_VERSION (vr1->vuse);
524 return result;
527 /* Return true if reference operations P1 and P2 are equivalent. This
528 means they have the same set of operands and vuses. */
531 vn_reference_eq (const void *p1, const void *p2)
533 unsigned i, j;
535 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
536 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
537 if (vr1->hashcode != vr2->hashcode)
538 return false;
540 /* Early out if this is not a hash collision. */
541 if (vr1->hashcode != vr2->hashcode)
542 return false;
544 /* The VOP needs to be the same. */
545 if (vr1->vuse != vr2->vuse)
546 return false;
548 /* If the operands are the same we are done. */
549 if (vr1->operands == vr2->operands)
550 return true;
552 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
553 return false;
555 if (INTEGRAL_TYPE_P (vr1->type)
556 && INTEGRAL_TYPE_P (vr2->type))
558 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
559 return false;
561 else if (INTEGRAL_TYPE_P (vr1->type)
562 && (TYPE_PRECISION (vr1->type)
563 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
564 return false;
565 else if (INTEGRAL_TYPE_P (vr2->type)
566 && (TYPE_PRECISION (vr2->type)
567 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
568 return false;
570 i = 0;
571 j = 0;
574 HOST_WIDE_INT off1 = 0, off2 = 0;
575 vn_reference_op_t vro1, vro2;
576 vn_reference_op_s tem1, tem2;
577 bool deref1 = false, deref2 = false;
578 for (; vr1->operands.iterate (i, &vro1); i++)
580 if (vro1->opcode == MEM_REF)
581 deref1 = true;
582 if (vro1->off == -1)
583 break;
584 off1 += vro1->off;
586 for (; vr2->operands.iterate (j, &vro2); j++)
588 if (vro2->opcode == MEM_REF)
589 deref2 = true;
590 if (vro2->off == -1)
591 break;
592 off2 += vro2->off;
594 if (off1 != off2)
595 return false;
596 if (deref1 && vro1->opcode == ADDR_EXPR)
598 memset (&tem1, 0, sizeof (tem1));
599 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
600 tem1.type = TREE_TYPE (tem1.op0);
601 tem1.opcode = TREE_CODE (tem1.op0);
602 vro1 = &tem1;
603 deref1 = false;
605 if (deref2 && vro2->opcode == ADDR_EXPR)
607 memset (&tem2, 0, sizeof (tem2));
608 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
609 tem2.type = TREE_TYPE (tem2.op0);
610 tem2.opcode = TREE_CODE (tem2.op0);
611 vro2 = &tem2;
612 deref2 = false;
614 if (deref1 != deref2)
615 return false;
616 if (!vn_reference_op_eq (vro1, vro2))
617 return false;
618 ++j;
619 ++i;
621 while (vr1->operands.length () != i
622 || vr2->operands.length () != j);
624 return true;
627 /* Copy the operations present in load/store REF into RESULT, a vector of
628 vn_reference_op_s's. */
630 void
631 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
633 if (TREE_CODE (ref) == TARGET_MEM_REF)
635 vn_reference_op_s temp;
637 memset (&temp, 0, sizeof (temp));
638 temp.type = TREE_TYPE (ref);
639 temp.opcode = TREE_CODE (ref);
640 temp.op0 = TMR_INDEX (ref);
641 temp.op1 = TMR_STEP (ref);
642 temp.op2 = TMR_OFFSET (ref);
643 temp.off = -1;
644 result->safe_push (temp);
646 memset (&temp, 0, sizeof (temp));
647 temp.type = NULL_TREE;
648 temp.opcode = ERROR_MARK;
649 temp.op0 = TMR_INDEX2 (ref);
650 temp.off = -1;
651 result->safe_push (temp);
653 memset (&temp, 0, sizeof (temp));
654 temp.type = NULL_TREE;
655 temp.opcode = TREE_CODE (TMR_BASE (ref));
656 temp.op0 = TMR_BASE (ref);
657 temp.off = -1;
658 result->safe_push (temp);
659 return;
662 /* For non-calls, store the information that makes up the address. */
664 while (ref)
666 vn_reference_op_s temp;
668 memset (&temp, 0, sizeof (temp));
669 temp.type = TREE_TYPE (ref);
670 temp.opcode = TREE_CODE (ref);
671 temp.off = -1;
673 switch (temp.opcode)
675 case MODIFY_EXPR:
676 temp.op0 = TREE_OPERAND (ref, 1);
677 break;
678 case WITH_SIZE_EXPR:
679 temp.op0 = TREE_OPERAND (ref, 1);
680 temp.off = 0;
681 break;
682 case MEM_REF:
683 /* The base address gets its own vn_reference_op_s structure. */
684 temp.op0 = TREE_OPERAND (ref, 1);
685 if (host_integerp (TREE_OPERAND (ref, 1), 0))
686 temp.off = TREE_INT_CST_LOW (TREE_OPERAND (ref, 1));
687 break;
688 case BIT_FIELD_REF:
689 /* Record bits and position. */
690 temp.op0 = TREE_OPERAND (ref, 1);
691 temp.op1 = TREE_OPERAND (ref, 2);
692 break;
693 case COMPONENT_REF:
694 /* The field decl is enough to unambiguously specify the field,
695 a matching type is not necessary and a mismatching type
696 is always a spurious difference. */
697 temp.type = NULL_TREE;
698 temp.op0 = TREE_OPERAND (ref, 1);
699 temp.op1 = TREE_OPERAND (ref, 2);
701 tree this_offset = component_ref_field_offset (ref);
702 if (this_offset
703 && TREE_CODE (this_offset) == INTEGER_CST)
705 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
706 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
708 double_int off
709 = tree_to_double_int (this_offset)
710 + tree_to_double_int (bit_offset)
711 .arshift (BITS_PER_UNIT == 8
712 ? 3 : exact_log2 (BITS_PER_UNIT),
713 HOST_BITS_PER_DOUBLE_INT);
714 if (off.fits_shwi ())
715 temp.off = off.low;
719 break;
720 case ARRAY_RANGE_REF:
721 case ARRAY_REF:
722 /* Record index as operand. */
723 temp.op0 = TREE_OPERAND (ref, 1);
724 /* Always record lower bounds and element size. */
725 temp.op1 = array_ref_low_bound (ref);
726 temp.op2 = array_ref_element_size (ref);
727 if (TREE_CODE (temp.op0) == INTEGER_CST
728 && TREE_CODE (temp.op1) == INTEGER_CST
729 && TREE_CODE (temp.op2) == INTEGER_CST)
731 double_int off = tree_to_double_int (temp.op0);
732 off += -tree_to_double_int (temp.op1);
733 off *= tree_to_double_int (temp.op2);
734 if (off.fits_shwi ())
735 temp.off = off.low;
737 break;
738 case VAR_DECL:
739 if (DECL_HARD_REGISTER (ref))
741 temp.op0 = ref;
742 break;
744 /* Fallthru. */
745 case PARM_DECL:
746 case CONST_DECL:
747 case RESULT_DECL:
748 /* Canonicalize decls to MEM[&decl] which is what we end up with
749 when valueizing MEM[ptr] with ptr = &decl. */
750 temp.opcode = MEM_REF;
751 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
752 temp.off = 0;
753 result->safe_push (temp);
754 temp.opcode = ADDR_EXPR;
755 temp.op0 = build_fold_addr_expr (ref);
756 temp.type = TREE_TYPE (temp.op0);
757 temp.off = -1;
758 break;
759 case STRING_CST:
760 case INTEGER_CST:
761 case COMPLEX_CST:
762 case VECTOR_CST:
763 case REAL_CST:
764 case FIXED_CST:
765 case CONSTRUCTOR:
766 case SSA_NAME:
767 temp.op0 = ref;
768 break;
769 case ADDR_EXPR:
770 if (is_gimple_min_invariant (ref))
772 temp.op0 = ref;
773 break;
775 /* Fallthrough. */
776 /* These are only interesting for their operands, their
777 existence, and their type. They will never be the last
778 ref in the chain of references (IE they require an
779 operand), so we don't have to put anything
780 for op* as it will be handled by the iteration */
781 case REALPART_EXPR:
782 case VIEW_CONVERT_EXPR:
783 temp.off = 0;
784 break;
785 case IMAGPART_EXPR:
786 /* This is only interesting for its constant offset. */
787 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
788 break;
789 default:
790 gcc_unreachable ();
792 result->safe_push (temp);
794 if (REFERENCE_CLASS_P (ref)
795 || TREE_CODE (ref) == MODIFY_EXPR
796 || TREE_CODE (ref) == WITH_SIZE_EXPR
797 || (TREE_CODE (ref) == ADDR_EXPR
798 && !is_gimple_min_invariant (ref)))
799 ref = TREE_OPERAND (ref, 0);
800 else
801 ref = NULL_TREE;
805 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
806 operands in *OPS, the reference alias set SET and the reference type TYPE.
807 Return true if something useful was produced. */
809 bool
810 ao_ref_init_from_vn_reference (ao_ref *ref,
811 alias_set_type set, tree type,
812 vec<vn_reference_op_s> ops)
814 vn_reference_op_t op;
815 unsigned i;
816 tree base = NULL_TREE;
817 tree *op0_p = &base;
818 HOST_WIDE_INT offset = 0;
819 HOST_WIDE_INT max_size;
820 HOST_WIDE_INT size = -1;
821 tree size_tree = NULL_TREE;
822 alias_set_type base_alias_set = -1;
824 /* First get the final access size from just the outermost expression. */
825 op = &ops[0];
826 if (op->opcode == COMPONENT_REF)
827 size_tree = DECL_SIZE (op->op0);
828 else if (op->opcode == BIT_FIELD_REF)
829 size_tree = op->op0;
830 else
832 enum machine_mode mode = TYPE_MODE (type);
833 if (mode == BLKmode)
834 size_tree = TYPE_SIZE (type);
835 else
836 size = GET_MODE_BITSIZE (mode);
838 if (size_tree != NULL_TREE)
840 if (!host_integerp (size_tree, 1))
841 size = -1;
842 else
843 size = TREE_INT_CST_LOW (size_tree);
846 /* Initially, maxsize is the same as the accessed element size.
847 In the following it will only grow (or become -1). */
848 max_size = size;
850 /* Compute cumulative bit-offset for nested component-refs and array-refs,
851 and find the ultimate containing object. */
852 FOR_EACH_VEC_ELT (ops, i, op)
854 switch (op->opcode)
856 /* These may be in the reference ops, but we cannot do anything
857 sensible with them here. */
858 case ADDR_EXPR:
859 /* Apart from ADDR_EXPR arguments to MEM_REF. */
860 if (base != NULL_TREE
861 && TREE_CODE (base) == MEM_REF
862 && op->op0
863 && DECL_P (TREE_OPERAND (op->op0, 0)))
865 vn_reference_op_t pop = &ops[i-1];
866 base = TREE_OPERAND (op->op0, 0);
867 if (pop->off == -1)
869 max_size = -1;
870 offset = 0;
872 else
873 offset += pop->off * BITS_PER_UNIT;
874 op0_p = NULL;
875 break;
877 /* Fallthru. */
878 case CALL_EXPR:
879 return false;
881 /* Record the base objects. */
882 case MEM_REF:
883 base_alias_set = get_deref_alias_set (op->op0);
884 *op0_p = build2 (MEM_REF, op->type,
885 NULL_TREE, op->op0);
886 op0_p = &TREE_OPERAND (*op0_p, 0);
887 break;
889 case VAR_DECL:
890 case PARM_DECL:
891 case RESULT_DECL:
892 case SSA_NAME:
893 *op0_p = op->op0;
894 op0_p = NULL;
895 break;
897 /* And now the usual component-reference style ops. */
898 case BIT_FIELD_REF:
899 offset += tree_low_cst (op->op1, 0);
900 break;
902 case COMPONENT_REF:
904 tree field = op->op0;
905 /* We do not have a complete COMPONENT_REF tree here so we
906 cannot use component_ref_field_offset. Do the interesting
907 parts manually. */
909 if (op->op1
910 || !host_integerp (DECL_FIELD_OFFSET (field), 1))
911 max_size = -1;
912 else
914 offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
915 * BITS_PER_UNIT);
916 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
918 break;
921 case ARRAY_RANGE_REF:
922 case ARRAY_REF:
923 /* We recorded the lower bound and the element size. */
924 if (!host_integerp (op->op0, 0)
925 || !host_integerp (op->op1, 0)
926 || !host_integerp (op->op2, 0))
927 max_size = -1;
928 else
930 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
931 hindex -= TREE_INT_CST_LOW (op->op1);
932 hindex *= TREE_INT_CST_LOW (op->op2);
933 hindex *= BITS_PER_UNIT;
934 offset += hindex;
936 break;
938 case REALPART_EXPR:
939 break;
941 case IMAGPART_EXPR:
942 offset += size;
943 break;
945 case VIEW_CONVERT_EXPR:
946 break;
948 case STRING_CST:
949 case INTEGER_CST:
950 case COMPLEX_CST:
951 case VECTOR_CST:
952 case REAL_CST:
953 case CONSTRUCTOR:
954 case CONST_DECL:
955 return false;
957 default:
958 return false;
962 if (base == NULL_TREE)
963 return false;
965 ref->ref = NULL_TREE;
966 ref->base = base;
967 ref->offset = offset;
968 ref->size = size;
969 ref->max_size = max_size;
970 ref->ref_alias_set = set;
971 if (base_alias_set != -1)
972 ref->base_alias_set = base_alias_set;
973 else
974 ref->base_alias_set = get_alias_set (base);
975 /* We discount volatiles from value-numbering elsewhere. */
976 ref->volatile_p = false;
978 return true;
981 /* Copy the operations present in load/store/call REF into RESULT, a vector of
982 vn_reference_op_s's. */
984 void
985 copy_reference_ops_from_call (gimple call,
986 vec<vn_reference_op_s> *result)
988 vn_reference_op_s temp;
989 unsigned i;
990 tree lhs = gimple_call_lhs (call);
992 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
993 different. By adding the lhs here in the vector, we ensure that the
994 hashcode is different, guaranteeing a different value number. */
995 if (lhs && TREE_CODE (lhs) != SSA_NAME)
997 memset (&temp, 0, sizeof (temp));
998 temp.opcode = MODIFY_EXPR;
999 temp.type = TREE_TYPE (lhs);
1000 temp.op0 = lhs;
1001 temp.off = -1;
1002 result->safe_push (temp);
1005 /* Copy the type, opcode, function being called and static chain. */
1006 memset (&temp, 0, sizeof (temp));
1007 temp.type = gimple_call_return_type (call);
1008 temp.opcode = CALL_EXPR;
1009 temp.op0 = gimple_call_fn (call);
1010 temp.op1 = gimple_call_chain (call);
1011 temp.off = -1;
1012 result->safe_push (temp);
1014 /* Copy the call arguments. As they can be references as well,
1015 just chain them together. */
1016 for (i = 0; i < gimple_call_num_args (call); ++i)
1018 tree callarg = gimple_call_arg (call, i);
1019 copy_reference_ops_from_ref (callarg, result);
1023 /* Create a vector of vn_reference_op_s structures from REF, a
1024 REFERENCE_CLASS_P tree. The vector is not shared. */
1026 static vec<vn_reference_op_s>
1027 create_reference_ops_from_ref (tree ref)
1029 vec<vn_reference_op_s> result = vNULL;
1031 copy_reference_ops_from_ref (ref, &result);
1032 return result;
1035 /* Create a vector of vn_reference_op_s structures from CALL, a
1036 call statement. The vector is not shared. */
1038 static vec<vn_reference_op_s>
1039 create_reference_ops_from_call (gimple call)
1041 vec<vn_reference_op_s> result = vNULL;
1043 copy_reference_ops_from_call (call, &result);
1044 return result;
1047 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1048 *I_P to point to the last element of the replacement. */
1049 void
1050 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1051 unsigned int *i_p)
1053 unsigned int i = *i_p;
1054 vn_reference_op_t op = &(*ops)[i];
1055 vn_reference_op_t mem_op = &(*ops)[i - 1];
1056 tree addr_base;
1057 HOST_WIDE_INT addr_offset = 0;
1059 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1060 from .foo.bar to the preceding MEM_REF offset and replace the
1061 address with &OBJ. */
1062 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1063 &addr_offset);
1064 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1065 if (addr_base != TREE_OPERAND (op->op0, 0))
1067 double_int off = tree_to_double_int (mem_op->op0);
1068 off = off.sext (TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1069 off += double_int::from_shwi (addr_offset);
1070 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1071 op->op0 = build_fold_addr_expr (addr_base);
1072 if (host_integerp (mem_op->op0, 0))
1073 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1074 else
1075 mem_op->off = -1;
1079 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1080 *I_P to point to the last element of the replacement. */
1081 static void
1082 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1083 unsigned int *i_p)
1085 unsigned int i = *i_p;
1086 vn_reference_op_t op = &(*ops)[i];
1087 vn_reference_op_t mem_op = &(*ops)[i - 1];
1088 gimple def_stmt;
1089 enum tree_code code;
1090 double_int off;
1092 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1093 if (!is_gimple_assign (def_stmt))
1094 return;
1096 code = gimple_assign_rhs_code (def_stmt);
1097 if (code != ADDR_EXPR
1098 && code != POINTER_PLUS_EXPR)
1099 return;
1101 off = tree_to_double_int (mem_op->op0);
1102 off = off.sext (TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1104 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1105 from .foo.bar to the preceding MEM_REF offset and replace the
1106 address with &OBJ. */
1107 if (code == ADDR_EXPR)
1109 tree addr, addr_base;
1110 HOST_WIDE_INT addr_offset;
1112 addr = gimple_assign_rhs1 (def_stmt);
1113 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1114 &addr_offset);
1115 if (!addr_base
1116 || TREE_CODE (addr_base) != MEM_REF)
1117 return;
1119 off += double_int::from_shwi (addr_offset);
1120 off += mem_ref_offset (addr_base);
1121 op->op0 = TREE_OPERAND (addr_base, 0);
1123 else
1125 tree ptr, ptroff;
1126 ptr = gimple_assign_rhs1 (def_stmt);
1127 ptroff = gimple_assign_rhs2 (def_stmt);
1128 if (TREE_CODE (ptr) != SSA_NAME
1129 || TREE_CODE (ptroff) != INTEGER_CST)
1130 return;
1132 off += tree_to_double_int (ptroff);
1133 op->op0 = ptr;
1136 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1137 if (host_integerp (mem_op->op0, 0))
1138 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1139 else
1140 mem_op->off = -1;
1141 if (TREE_CODE (op->op0) == SSA_NAME)
1142 op->op0 = SSA_VAL (op->op0);
1143 if (TREE_CODE (op->op0) != SSA_NAME)
1144 op->opcode = TREE_CODE (op->op0);
1146 /* And recurse. */
1147 if (TREE_CODE (op->op0) == SSA_NAME)
1148 vn_reference_maybe_forwprop_address (ops, i_p);
1149 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1150 vn_reference_fold_indirect (ops, i_p);
1153 /* Optimize the reference REF to a constant if possible or return
1154 NULL_TREE if not. */
1156 tree
1157 fully_constant_vn_reference_p (vn_reference_t ref)
1159 vec<vn_reference_op_s> operands = ref->operands;
1160 vn_reference_op_t op;
1162 /* Try to simplify the translated expression if it is
1163 a call to a builtin function with at most two arguments. */
1164 op = &operands[0];
1165 if (op->opcode == CALL_EXPR
1166 && TREE_CODE (op->op0) == ADDR_EXPR
1167 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1168 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1169 && operands.length () >= 2
1170 && operands.length () <= 3)
1172 vn_reference_op_t arg0, arg1 = NULL;
1173 bool anyconst = false;
1174 arg0 = &operands[1];
1175 if (operands.length () > 2)
1176 arg1 = &operands[2];
1177 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1178 || (arg0->opcode == ADDR_EXPR
1179 && is_gimple_min_invariant (arg0->op0)))
1180 anyconst = true;
1181 if (arg1
1182 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1183 || (arg1->opcode == ADDR_EXPR
1184 && is_gimple_min_invariant (arg1->op0))))
1185 anyconst = true;
1186 if (anyconst)
1188 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1189 arg1 ? 2 : 1,
1190 arg0->op0,
1191 arg1 ? arg1->op0 : NULL);
1192 if (folded
1193 && TREE_CODE (folded) == NOP_EXPR)
1194 folded = TREE_OPERAND (folded, 0);
1195 if (folded
1196 && is_gimple_min_invariant (folded))
1197 return folded;
1201 /* Simplify reads from constant strings. */
1202 else if (op->opcode == ARRAY_REF
1203 && TREE_CODE (op->op0) == INTEGER_CST
1204 && integer_zerop (op->op1)
1205 && operands.length () == 2)
1207 vn_reference_op_t arg0;
1208 arg0 = &operands[1];
1209 if (arg0->opcode == STRING_CST
1210 && (TYPE_MODE (op->type)
1211 == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
1212 && GET_MODE_CLASS (TYPE_MODE (op->type)) == MODE_INT
1213 && GET_MODE_SIZE (TYPE_MODE (op->type)) == 1
1214 && tree_int_cst_sgn (op->op0) >= 0
1215 && compare_tree_int (op->op0, TREE_STRING_LENGTH (arg0->op0)) < 0)
1216 return build_int_cst_type (op->type,
1217 (TREE_STRING_POINTER (arg0->op0)
1218 [TREE_INT_CST_LOW (op->op0)]));
1221 return NULL_TREE;
1224 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1225 structures into their value numbers. This is done in-place, and
1226 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1227 whether any operands were valueized. */
1229 static vec<vn_reference_op_s>
1230 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1232 vn_reference_op_t vro;
1233 unsigned int i;
1235 *valueized_anything = false;
1237 FOR_EACH_VEC_ELT (orig, i, vro)
1239 if (vro->opcode == SSA_NAME
1240 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1242 tree tem = SSA_VAL (vro->op0);
1243 if (tem != vro->op0)
1245 *valueized_anything = true;
1246 vro->op0 = tem;
1248 /* If it transforms from an SSA_NAME to a constant, update
1249 the opcode. */
1250 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1251 vro->opcode = TREE_CODE (vro->op0);
1253 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1255 tree tem = SSA_VAL (vro->op1);
1256 if (tem != vro->op1)
1258 *valueized_anything = true;
1259 vro->op1 = tem;
1262 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1264 tree tem = SSA_VAL (vro->op2);
1265 if (tem != vro->op2)
1267 *valueized_anything = true;
1268 vro->op2 = tem;
1271 /* If it transforms from an SSA_NAME to an address, fold with
1272 a preceding indirect reference. */
1273 if (i > 0
1274 && vro->op0
1275 && TREE_CODE (vro->op0) == ADDR_EXPR
1276 && orig[i - 1].opcode == MEM_REF)
1277 vn_reference_fold_indirect (&orig, &i);
1278 else if (i > 0
1279 && vro->opcode == SSA_NAME
1280 && orig[i - 1].opcode == MEM_REF)
1281 vn_reference_maybe_forwprop_address (&orig, &i);
1282 /* If it transforms a non-constant ARRAY_REF into a constant
1283 one, adjust the constant offset. */
1284 else if (vro->opcode == ARRAY_REF
1285 && vro->off == -1
1286 && TREE_CODE (vro->op0) == INTEGER_CST
1287 && TREE_CODE (vro->op1) == INTEGER_CST
1288 && TREE_CODE (vro->op2) == INTEGER_CST)
1290 double_int off = tree_to_double_int (vro->op0);
1291 off += -tree_to_double_int (vro->op1);
1292 off *= tree_to_double_int (vro->op2);
1293 if (off.fits_shwi ())
1294 vro->off = off.low;
1298 return orig;
1301 static vec<vn_reference_op_s>
1302 valueize_refs (vec<vn_reference_op_s> orig)
1304 bool tem;
1305 return valueize_refs_1 (orig, &tem);
1308 static vec<vn_reference_op_s> shared_lookup_references;
1310 /* Create a vector of vn_reference_op_s structures from REF, a
1311 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1312 this function. *VALUEIZED_ANYTHING will specify whether any
1313 operands were valueized. */
1315 static vec<vn_reference_op_s>
1316 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1318 if (!ref)
1319 return vNULL;
1320 shared_lookup_references.truncate (0);
1321 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1322 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1323 valueized_anything);
1324 return shared_lookup_references;
1327 /* Create a vector of vn_reference_op_s structures from CALL, a
1328 call statement. The vector is shared among all callers of
1329 this function. */
1331 static vec<vn_reference_op_s>
1332 valueize_shared_reference_ops_from_call (gimple call)
1334 if (!call)
1335 return vNULL;
1336 shared_lookup_references.truncate (0);
1337 copy_reference_ops_from_call (call, &shared_lookup_references);
1338 shared_lookup_references = valueize_refs (shared_lookup_references);
1339 return shared_lookup_references;
1342 /* Lookup a SCCVN reference operation VR in the current hash table.
1343 Returns the resulting value number if it exists in the hash table,
1344 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1345 vn_reference_t stored in the hashtable if something is found. */
1347 static tree
1348 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1350 void **slot;
1351 hashval_t hash;
1353 hash = vr->hashcode;
1354 slot = htab_find_slot_with_hash (current_info->references, vr,
1355 hash, NO_INSERT);
1356 if (!slot && current_info == optimistic_info)
1357 slot = htab_find_slot_with_hash (valid_info->references, vr,
1358 hash, NO_INSERT);
1359 if (slot)
1361 if (vnresult)
1362 *vnresult = (vn_reference_t)*slot;
1363 return ((vn_reference_t)*slot)->result;
1366 return NULL_TREE;
1369 static tree *last_vuse_ptr;
1370 static vn_lookup_kind vn_walk_kind;
1371 static vn_lookup_kind default_vn_walk_kind;
1373 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1374 with the current VUSE and performs the expression lookup. */
1376 static void *
1377 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1378 unsigned int cnt, void *vr_)
1380 vn_reference_t vr = (vn_reference_t)vr_;
1381 void **slot;
1382 hashval_t hash;
1384 /* This bounds the stmt walks we perform on reference lookups
1385 to O(1) instead of O(N) where N is the number of dominating
1386 stores. */
1387 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1388 return (void *)-1;
1390 if (last_vuse_ptr)
1391 *last_vuse_ptr = vuse;
1393 /* Fixup vuse and hash. */
1394 if (vr->vuse)
1395 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1396 vr->vuse = SSA_VAL (vuse);
1397 if (vr->vuse)
1398 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1400 hash = vr->hashcode;
1401 slot = htab_find_slot_with_hash (current_info->references, vr,
1402 hash, NO_INSERT);
1403 if (!slot && current_info == optimistic_info)
1404 slot = htab_find_slot_with_hash (valid_info->references, vr,
1405 hash, NO_INSERT);
1406 if (slot)
1407 return *slot;
1409 return NULL;
1412 /* Lookup an existing or insert a new vn_reference entry into the
1413 value table for the VUSE, SET, TYPE, OPERANDS reference which
1414 has the value VALUE which is either a constant or an SSA name. */
1416 static vn_reference_t
1417 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1418 alias_set_type set,
1419 tree type,
1420 vec<vn_reference_op_s,
1421 va_heap> operands,
1422 tree value)
1424 struct vn_reference_s vr1;
1425 vn_reference_t result;
1426 unsigned value_id;
1427 vr1.vuse = vuse;
1428 vr1.operands = operands;
1429 vr1.type = type;
1430 vr1.set = set;
1431 vr1.hashcode = vn_reference_compute_hash (&vr1);
1432 if (vn_reference_lookup_1 (&vr1, &result))
1433 return result;
1434 if (TREE_CODE (value) == SSA_NAME)
1435 value_id = VN_INFO (value)->value_id;
1436 else
1437 value_id = get_or_alloc_constant_value_id (value);
1438 return vn_reference_insert_pieces (vuse, set, type,
1439 operands.copy (), value, value_id);
1442 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1443 from the statement defining VUSE and if not successful tries to
1444 translate *REFP and VR_ through an aggregate copy at the definition
1445 of VUSE. */
1447 static void *
1448 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
1450 vn_reference_t vr = (vn_reference_t)vr_;
1451 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1452 tree base;
1453 HOST_WIDE_INT offset, maxsize;
1454 static vec<vn_reference_op_s>
1455 lhs_ops = vNULL;
1456 ao_ref lhs_ref;
1457 bool lhs_ref_ok = false;
1459 /* First try to disambiguate after value-replacing in the definitions LHS. */
1460 if (is_gimple_assign (def_stmt))
1462 vec<vn_reference_op_s> tem;
1463 tree lhs = gimple_assign_lhs (def_stmt);
1464 bool valueized_anything = false;
1465 /* Avoid re-allocation overhead. */
1466 lhs_ops.truncate (0);
1467 copy_reference_ops_from_ref (lhs, &lhs_ops);
1468 tem = lhs_ops;
1469 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1470 gcc_assert (lhs_ops == tem);
1471 if (valueized_anything)
1473 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1474 get_alias_set (lhs),
1475 TREE_TYPE (lhs), lhs_ops);
1476 if (lhs_ref_ok
1477 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1478 return NULL;
1480 else
1482 ao_ref_init (&lhs_ref, lhs);
1483 lhs_ref_ok = true;
1487 base = ao_ref_base (ref);
1488 offset = ref->offset;
1489 maxsize = ref->max_size;
1491 /* If we cannot constrain the size of the reference we cannot
1492 test if anything kills it. */
1493 if (maxsize == -1)
1494 return (void *)-1;
1496 /* We can't deduce anything useful from clobbers. */
1497 if (gimple_clobber_p (def_stmt))
1498 return (void *)-1;
1500 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1501 from that definition.
1502 1) Memset. */
1503 if (is_gimple_reg_type (vr->type)
1504 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1505 && integer_zerop (gimple_call_arg (def_stmt, 1))
1506 && host_integerp (gimple_call_arg (def_stmt, 2), 1)
1507 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1509 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1510 tree base2;
1511 HOST_WIDE_INT offset2, size2, maxsize2;
1512 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1513 size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
1514 if ((unsigned HOST_WIDE_INT)size2 / 8
1515 == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
1516 && maxsize2 != -1
1517 && operand_equal_p (base, base2, 0)
1518 && offset2 <= offset
1519 && offset2 + size2 >= offset + maxsize)
1521 tree val = build_zero_cst (vr->type);
1522 return vn_reference_lookup_or_insert_for_pieces
1523 (vuse, vr->set, vr->type, vr->operands, val);
1527 /* 2) Assignment from an empty CONSTRUCTOR. */
1528 else if (is_gimple_reg_type (vr->type)
1529 && gimple_assign_single_p (def_stmt)
1530 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1531 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1533 tree base2;
1534 HOST_WIDE_INT offset2, size2, maxsize2;
1535 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1536 &offset2, &size2, &maxsize2);
1537 if (maxsize2 != -1
1538 && operand_equal_p (base, base2, 0)
1539 && offset2 <= offset
1540 && offset2 + size2 >= offset + maxsize)
1542 tree val = build_zero_cst (vr->type);
1543 return vn_reference_lookup_or_insert_for_pieces
1544 (vuse, vr->set, vr->type, vr->operands, val);
1548 /* 3) Assignment from a constant. We can use folds native encode/interpret
1549 routines to extract the assigned bits. */
1550 else if (vn_walk_kind == VN_WALKREWRITE
1551 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1552 && ref->size == maxsize
1553 && maxsize % BITS_PER_UNIT == 0
1554 && offset % BITS_PER_UNIT == 0
1555 && is_gimple_reg_type (vr->type)
1556 && gimple_assign_single_p (def_stmt)
1557 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
1559 tree base2;
1560 HOST_WIDE_INT offset2, size2, maxsize2;
1561 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1562 &offset2, &size2, &maxsize2);
1563 if (maxsize2 != -1
1564 && maxsize2 == size2
1565 && size2 % BITS_PER_UNIT == 0
1566 && offset2 % BITS_PER_UNIT == 0
1567 && operand_equal_p (base, base2, 0)
1568 && offset2 <= offset
1569 && offset2 + size2 >= offset + maxsize)
1571 /* We support up to 512-bit values (for V8DFmode). */
1572 unsigned char buffer[64];
1573 int len;
1575 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1576 buffer, sizeof (buffer));
1577 if (len > 0)
1579 tree val = native_interpret_expr (vr->type,
1580 buffer
1581 + ((offset - offset2)
1582 / BITS_PER_UNIT),
1583 ref->size / BITS_PER_UNIT);
1584 if (val)
1585 return vn_reference_lookup_or_insert_for_pieces
1586 (vuse, vr->set, vr->type, vr->operands, val);
1591 /* 4) Assignment from an SSA name which definition we may be able
1592 to access pieces from. */
1593 else if (ref->size == maxsize
1594 && is_gimple_reg_type (vr->type)
1595 && gimple_assign_single_p (def_stmt)
1596 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
1598 tree rhs1 = gimple_assign_rhs1 (def_stmt);
1599 gimple def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
1600 if (is_gimple_assign (def_stmt2)
1601 && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
1602 || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
1603 && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1))))
1605 tree base2;
1606 HOST_WIDE_INT offset2, size2, maxsize2, off;
1607 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1608 &offset2, &size2, &maxsize2);
1609 off = offset - offset2;
1610 if (maxsize2 != -1
1611 && maxsize2 == size2
1612 && operand_equal_p (base, base2, 0)
1613 && offset2 <= offset
1614 && offset2 + size2 >= offset + maxsize)
1616 tree val = NULL_TREE;
1617 HOST_WIDE_INT elsz
1618 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1))));
1619 if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR)
1621 if (off == 0)
1622 val = gimple_assign_rhs1 (def_stmt2);
1623 else if (off == elsz)
1624 val = gimple_assign_rhs2 (def_stmt2);
1626 else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR
1627 && off % elsz == 0)
1629 tree ctor = gimple_assign_rhs1 (def_stmt2);
1630 unsigned i = off / elsz;
1631 if (i < CONSTRUCTOR_NELTS (ctor))
1633 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
1634 if (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
1636 if (TREE_CODE (TREE_TYPE (elt->value))
1637 != VECTOR_TYPE)
1638 val = elt->value;
1642 if (val)
1643 return vn_reference_lookup_or_insert_for_pieces
1644 (vuse, vr->set, vr->type, vr->operands, val);
1649 /* 5) For aggregate copies translate the reference through them if
1650 the copy kills ref. */
1651 else if (vn_walk_kind == VN_WALKREWRITE
1652 && gimple_assign_single_p (def_stmt)
1653 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1654 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1655 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1657 tree base2;
1658 HOST_WIDE_INT offset2, size2, maxsize2;
1659 int i, j;
1660 vec<vn_reference_op_s>
1661 rhs = vNULL;
1662 vn_reference_op_t vro;
1663 ao_ref r;
1665 if (!lhs_ref_ok)
1666 return (void *)-1;
1668 /* See if the assignment kills REF. */
1669 base2 = ao_ref_base (&lhs_ref);
1670 offset2 = lhs_ref.offset;
1671 size2 = lhs_ref.size;
1672 maxsize2 = lhs_ref.max_size;
1673 if (maxsize2 == -1
1674 || (base != base2 && !operand_equal_p (base, base2, 0))
1675 || offset2 > offset
1676 || offset2 + size2 < offset + maxsize)
1677 return (void *)-1;
1679 /* Find the common base of ref and the lhs. lhs_ops already
1680 contains valueized operands for the lhs. */
1681 i = vr->operands.length () - 1;
1682 j = lhs_ops.length () - 1;
1683 while (j >= 0 && i >= 0
1684 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
1686 i--;
1687 j--;
1690 /* ??? The innermost op should always be a MEM_REF and we already
1691 checked that the assignment to the lhs kills vr. Thus for
1692 aggregate copies using char[] types the vn_reference_op_eq
1693 may fail when comparing types for compatibility. But we really
1694 don't care here - further lookups with the rewritten operands
1695 will simply fail if we messed up types too badly. */
1696 if (j == 0 && i >= 0
1697 && lhs_ops[0].opcode == MEM_REF
1698 && lhs_ops[0].off != -1
1699 && (lhs_ops[0].off == vr->operands[i].off))
1700 i--, j--;
1702 /* i now points to the first additional op.
1703 ??? LHS may not be completely contained in VR, one or more
1704 VIEW_CONVERT_EXPRs could be in its way. We could at least
1705 try handling outermost VIEW_CONVERT_EXPRs. */
1706 if (j != -1)
1707 return (void *)-1;
1709 /* Now re-write REF to be based on the rhs of the assignment. */
1710 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1711 /* We need to pre-pend vr->operands[0..i] to rhs. */
1712 if (i + 1 + rhs.length () > vr->operands.length ())
1714 vec<vn_reference_op_s> old = vr->operands;
1715 vr->operands.safe_grow (i + 1 + rhs.length ());
1716 if (old == shared_lookup_references
1717 && vr->operands != old)
1718 shared_lookup_references = vNULL;
1720 else
1721 vr->operands.truncate (i + 1 + rhs.length ());
1722 FOR_EACH_VEC_ELT (rhs, j, vro)
1723 vr->operands[i + 1 + j] = *vro;
1724 rhs.release ();
1725 vr->operands = valueize_refs (vr->operands);
1726 vr->hashcode = vn_reference_compute_hash (vr);
1728 /* Adjust *ref from the new operands. */
1729 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1730 return (void *)-1;
1731 /* This can happen with bitfields. */
1732 if (ref->size != r.size)
1733 return (void *)-1;
1734 *ref = r;
1736 /* Do not update last seen VUSE after translating. */
1737 last_vuse_ptr = NULL;
1739 /* Keep looking for the adjusted *REF / VR pair. */
1740 return NULL;
1743 /* 6) For memcpy copies translate the reference through them if
1744 the copy kills ref. */
1745 else if (vn_walk_kind == VN_WALKREWRITE
1746 && is_gimple_reg_type (vr->type)
1747 /* ??? Handle BCOPY as well. */
1748 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
1749 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
1750 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
1751 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
1752 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
1753 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
1754 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
1755 && host_integerp (gimple_call_arg (def_stmt, 2), 1))
1757 tree lhs, rhs;
1758 ao_ref r;
1759 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
1760 vn_reference_op_s op;
1761 HOST_WIDE_INT at;
1764 /* Only handle non-variable, addressable refs. */
1765 if (ref->size != maxsize
1766 || offset % BITS_PER_UNIT != 0
1767 || ref->size % BITS_PER_UNIT != 0)
1768 return (void *)-1;
1770 /* Extract a pointer base and an offset for the destination. */
1771 lhs = gimple_call_arg (def_stmt, 0);
1772 lhs_offset = 0;
1773 if (TREE_CODE (lhs) == SSA_NAME)
1774 lhs = SSA_VAL (lhs);
1775 if (TREE_CODE (lhs) == ADDR_EXPR)
1777 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
1778 &lhs_offset);
1779 if (!tem)
1780 return (void *)-1;
1781 if (TREE_CODE (tem) == MEM_REF
1782 && host_integerp (TREE_OPERAND (tem, 1), 1))
1784 lhs = TREE_OPERAND (tem, 0);
1785 lhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1787 else if (DECL_P (tem))
1788 lhs = build_fold_addr_expr (tem);
1789 else
1790 return (void *)-1;
1792 if (TREE_CODE (lhs) != SSA_NAME
1793 && TREE_CODE (lhs) != ADDR_EXPR)
1794 return (void *)-1;
1796 /* Extract a pointer base and an offset for the source. */
1797 rhs = gimple_call_arg (def_stmt, 1);
1798 rhs_offset = 0;
1799 if (TREE_CODE (rhs) == SSA_NAME)
1800 rhs = SSA_VAL (rhs);
1801 if (TREE_CODE (rhs) == ADDR_EXPR)
1803 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
1804 &rhs_offset);
1805 if (!tem)
1806 return (void *)-1;
1807 if (TREE_CODE (tem) == MEM_REF
1808 && host_integerp (TREE_OPERAND (tem, 1), 1))
1810 rhs = TREE_OPERAND (tem, 0);
1811 rhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1813 else if (DECL_P (tem))
1814 rhs = build_fold_addr_expr (tem);
1815 else
1816 return (void *)-1;
1818 if (TREE_CODE (rhs) != SSA_NAME
1819 && TREE_CODE (rhs) != ADDR_EXPR)
1820 return (void *)-1;
1822 copy_size = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2));
1824 /* The bases of the destination and the references have to agree. */
1825 if ((TREE_CODE (base) != MEM_REF
1826 && !DECL_P (base))
1827 || (TREE_CODE (base) == MEM_REF
1828 && (TREE_OPERAND (base, 0) != lhs
1829 || !host_integerp (TREE_OPERAND (base, 1), 1)))
1830 || (DECL_P (base)
1831 && (TREE_CODE (lhs) != ADDR_EXPR
1832 || TREE_OPERAND (lhs, 0) != base)))
1833 return (void *)-1;
1835 /* And the access has to be contained within the memcpy destination. */
1836 at = offset / BITS_PER_UNIT;
1837 if (TREE_CODE (base) == MEM_REF)
1838 at += TREE_INT_CST_LOW (TREE_OPERAND (base, 1));
1839 if (lhs_offset > at
1840 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
1841 return (void *)-1;
1843 /* Make room for 2 operands in the new reference. */
1844 if (vr->operands.length () < 2)
1846 vec<vn_reference_op_s> old = vr->operands;
1847 vr->operands.safe_grow_cleared (2);
1848 if (old == shared_lookup_references
1849 && vr->operands != old)
1850 shared_lookup_references.create (0);
1852 else
1853 vr->operands.truncate (2);
1855 /* The looked-through reference is a simple MEM_REF. */
1856 memset (&op, 0, sizeof (op));
1857 op.type = vr->type;
1858 op.opcode = MEM_REF;
1859 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
1860 op.off = at - lhs_offset + rhs_offset;
1861 vr->operands[0] = op;
1862 op.type = TREE_TYPE (rhs);
1863 op.opcode = TREE_CODE (rhs);
1864 op.op0 = rhs;
1865 op.off = -1;
1866 vr->operands[1] = op;
1867 vr->hashcode = vn_reference_compute_hash (vr);
1869 /* Adjust *ref from the new operands. */
1870 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1871 return (void *)-1;
1872 /* This can happen with bitfields. */
1873 if (ref->size != r.size)
1874 return (void *)-1;
1875 *ref = r;
1877 /* Do not update last seen VUSE after translating. */
1878 last_vuse_ptr = NULL;
1880 /* Keep looking for the adjusted *REF / VR pair. */
1881 return NULL;
1884 /* Bail out and stop walking. */
1885 return (void *)-1;
1888 /* Lookup a reference operation by it's parts, in the current hash table.
1889 Returns the resulting value number if it exists in the hash table,
1890 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1891 vn_reference_t stored in the hashtable if something is found. */
1893 tree
1894 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
1895 vec<vn_reference_op_s> operands,
1896 vn_reference_t *vnresult, vn_lookup_kind kind)
1898 struct vn_reference_s vr1;
1899 vn_reference_t tmp;
1900 tree cst;
1902 if (!vnresult)
1903 vnresult = &tmp;
1904 *vnresult = NULL;
1906 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1907 shared_lookup_references.truncate (0);
1908 shared_lookup_references.safe_grow (operands.length ());
1909 memcpy (shared_lookup_references.address (),
1910 operands.address (),
1911 sizeof (vn_reference_op_s)
1912 * operands.length ());
1913 vr1.operands = operands = shared_lookup_references
1914 = valueize_refs (shared_lookup_references);
1915 vr1.type = type;
1916 vr1.set = set;
1917 vr1.hashcode = vn_reference_compute_hash (&vr1);
1918 if ((cst = fully_constant_vn_reference_p (&vr1)))
1919 return cst;
1921 vn_reference_lookup_1 (&vr1, vnresult);
1922 if (!*vnresult
1923 && kind != VN_NOWALK
1924 && vr1.vuse)
1926 ao_ref r;
1927 vn_walk_kind = kind;
1928 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
1929 *vnresult =
1930 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1931 vn_reference_lookup_2,
1932 vn_reference_lookup_3, &vr1);
1933 if (vr1.operands != operands)
1934 vr1.operands.release ();
1937 if (*vnresult)
1938 return (*vnresult)->result;
1940 return NULL_TREE;
1943 /* Lookup OP in the current hash table, and return the resulting value
1944 number if it exists in the hash table. Return NULL_TREE if it does
1945 not exist in the hash table or if the result field of the structure
1946 was NULL.. VNRESULT will be filled in with the vn_reference_t
1947 stored in the hashtable if one exists. */
1949 tree
1950 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
1951 vn_reference_t *vnresult)
1953 vec<vn_reference_op_s> operands;
1954 struct vn_reference_s vr1;
1955 tree cst;
1956 bool valuezied_anything;
1958 if (vnresult)
1959 *vnresult = NULL;
1961 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1962 vr1.operands = operands
1963 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
1964 vr1.type = TREE_TYPE (op);
1965 vr1.set = get_alias_set (op);
1966 vr1.hashcode = vn_reference_compute_hash (&vr1);
1967 if ((cst = fully_constant_vn_reference_p (&vr1)))
1968 return cst;
1970 if (kind != VN_NOWALK
1971 && vr1.vuse)
1973 vn_reference_t wvnresult;
1974 ao_ref r;
1975 /* Make sure to use a valueized reference if we valueized anything.
1976 Otherwise preserve the full reference for advanced TBAA. */
1977 if (!valuezied_anything
1978 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
1979 vr1.operands))
1980 ao_ref_init (&r, op);
1981 vn_walk_kind = kind;
1982 wvnresult =
1983 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1984 vn_reference_lookup_2,
1985 vn_reference_lookup_3, &vr1);
1986 if (vr1.operands != operands)
1987 vr1.operands.release ();
1988 if (wvnresult)
1990 if (vnresult)
1991 *vnresult = wvnresult;
1992 return wvnresult->result;
1995 return NULL_TREE;
1998 return vn_reference_lookup_1 (&vr1, vnresult);
2002 /* Insert OP into the current hash table with a value number of
2003 RESULT, and return the resulting reference structure we created. */
2005 vn_reference_t
2006 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2008 void **slot;
2009 vn_reference_t vr1;
2011 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
2012 if (TREE_CODE (result) == SSA_NAME)
2013 vr1->value_id = VN_INFO (result)->value_id;
2014 else
2015 vr1->value_id = get_or_alloc_constant_value_id (result);
2016 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2017 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
2018 vr1->type = TREE_TYPE (op);
2019 vr1->set = get_alias_set (op);
2020 vr1->hashcode = vn_reference_compute_hash (vr1);
2021 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2022 vr1->result_vdef = vdef;
2024 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
2025 INSERT);
2027 /* Because we lookup stores using vuses, and value number failures
2028 using the vdefs (see visit_reference_op_store for how and why),
2029 it's possible that on failure we may try to insert an already
2030 inserted store. This is not wrong, there is no ssa name for a
2031 store that we could use as a differentiator anyway. Thus, unlike
2032 the other lookup functions, you cannot gcc_assert (!*slot)
2033 here. */
2035 /* But free the old slot in case of a collision. */
2036 if (*slot)
2037 free_reference (*slot);
2039 *slot = vr1;
2040 return vr1;
2043 /* Insert a reference by it's pieces into the current hash table with
2044 a value number of RESULT. Return the resulting reference
2045 structure we created. */
2047 vn_reference_t
2048 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2049 vec<vn_reference_op_s> operands,
2050 tree result, unsigned int value_id)
2053 void **slot;
2054 vn_reference_t vr1;
2056 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
2057 vr1->value_id = value_id;
2058 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2059 vr1->operands = valueize_refs (operands);
2060 vr1->type = type;
2061 vr1->set = set;
2062 vr1->hashcode = vn_reference_compute_hash (vr1);
2063 if (result && TREE_CODE (result) == SSA_NAME)
2064 result = SSA_VAL (result);
2065 vr1->result = result;
2067 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
2068 INSERT);
2070 /* At this point we should have all the things inserted that we have
2071 seen before, and we should never try inserting something that
2072 already exists. */
2073 gcc_assert (!*slot);
2074 if (*slot)
2075 free_reference (*slot);
2077 *slot = vr1;
2078 return vr1;
2081 /* Compute and return the hash value for nary operation VBO1. */
2083 hashval_t
2084 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2086 hashval_t hash;
2087 unsigned i;
2089 for (i = 0; i < vno1->length; ++i)
2090 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2091 vno1->op[i] = SSA_VAL (vno1->op[i]);
2093 if (vno1->length == 2
2094 && commutative_tree_code (vno1->opcode)
2095 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2097 tree temp = vno1->op[0];
2098 vno1->op[0] = vno1->op[1];
2099 vno1->op[1] = temp;
2102 hash = iterative_hash_hashval_t (vno1->opcode, 0);
2103 for (i = 0; i < vno1->length; ++i)
2104 hash = iterative_hash_expr (vno1->op[i], hash);
2106 return hash;
2109 /* Return the computed hashcode for nary operation P1. */
2111 static hashval_t
2112 vn_nary_op_hash (const void *p1)
2114 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
2115 return vno1->hashcode;
2118 /* Compare nary operations P1 and P2 and return true if they are
2119 equivalent. */
2122 vn_nary_op_eq (const void *p1, const void *p2)
2124 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
2125 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
2126 unsigned i;
2128 if (vno1->hashcode != vno2->hashcode)
2129 return false;
2131 if (vno1->length != vno2->length)
2132 return false;
2134 if (vno1->opcode != vno2->opcode
2135 || !types_compatible_p (vno1->type, vno2->type))
2136 return false;
2138 for (i = 0; i < vno1->length; ++i)
2139 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2140 return false;
2142 return true;
2145 /* Initialize VNO from the pieces provided. */
2147 static void
2148 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2149 enum tree_code code, tree type, tree *ops)
2151 vno->opcode = code;
2152 vno->length = length;
2153 vno->type = type;
2154 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2157 /* Initialize VNO from OP. */
2159 static void
2160 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2162 unsigned i;
2164 vno->opcode = TREE_CODE (op);
2165 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2166 vno->type = TREE_TYPE (op);
2167 for (i = 0; i < vno->length; ++i)
2168 vno->op[i] = TREE_OPERAND (op, i);
2171 /* Return the number of operands for a vn_nary ops structure from STMT. */
2173 static unsigned int
2174 vn_nary_length_from_stmt (gimple stmt)
2176 switch (gimple_assign_rhs_code (stmt))
2178 case REALPART_EXPR:
2179 case IMAGPART_EXPR:
2180 case VIEW_CONVERT_EXPR:
2181 return 1;
2183 case BIT_FIELD_REF:
2184 return 3;
2186 case CONSTRUCTOR:
2187 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2189 default:
2190 return gimple_num_ops (stmt) - 1;
2194 /* Initialize VNO from STMT. */
2196 static void
2197 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
2199 unsigned i;
2201 vno->opcode = gimple_assign_rhs_code (stmt);
2202 vno->type = gimple_expr_type (stmt);
2203 switch (vno->opcode)
2205 case REALPART_EXPR:
2206 case IMAGPART_EXPR:
2207 case VIEW_CONVERT_EXPR:
2208 vno->length = 1;
2209 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2210 break;
2212 case BIT_FIELD_REF:
2213 vno->length = 3;
2214 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2215 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2216 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2217 break;
2219 case CONSTRUCTOR:
2220 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2221 for (i = 0; i < vno->length; ++i)
2222 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2223 break;
2225 default:
2226 gcc_checking_assert (!gimple_assign_single_p (stmt));
2227 vno->length = gimple_num_ops (stmt) - 1;
2228 for (i = 0; i < vno->length; ++i)
2229 vno->op[i] = gimple_op (stmt, i + 1);
2233 /* Compute the hashcode for VNO and look for it in the hash table;
2234 return the resulting value number if it exists in the hash table.
2235 Return NULL_TREE if it does not exist in the hash table or if the
2236 result field of the operation is NULL. VNRESULT will contain the
2237 vn_nary_op_t from the hashtable if it exists. */
2239 static tree
2240 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2242 void **slot;
2244 if (vnresult)
2245 *vnresult = NULL;
2247 vno->hashcode = vn_nary_op_compute_hash (vno);
2248 slot = htab_find_slot_with_hash (current_info->nary, vno, vno->hashcode,
2249 NO_INSERT);
2250 if (!slot && current_info == optimistic_info)
2251 slot = htab_find_slot_with_hash (valid_info->nary, vno, vno->hashcode,
2252 NO_INSERT);
2253 if (!slot)
2254 return NULL_TREE;
2255 if (vnresult)
2256 *vnresult = (vn_nary_op_t)*slot;
2257 return ((vn_nary_op_t)*slot)->result;
2260 /* Lookup a n-ary operation by its pieces and return the resulting value
2261 number if it exists in the hash table. Return NULL_TREE if it does
2262 not exist in the hash table or if the result field of the operation
2263 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2264 if it exists. */
2266 tree
2267 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2268 tree type, tree *ops, vn_nary_op_t *vnresult)
2270 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2271 sizeof_vn_nary_op (length));
2272 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2273 return vn_nary_op_lookup_1 (vno1, vnresult);
2276 /* Lookup OP in the current hash table, and return the resulting value
2277 number if it exists in the hash table. Return NULL_TREE if it does
2278 not exist in the hash table or if the result field of the operation
2279 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2280 if it exists. */
2282 tree
2283 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2285 vn_nary_op_t vno1
2286 = XALLOCAVAR (struct vn_nary_op_s,
2287 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2288 init_vn_nary_op_from_op (vno1, op);
2289 return vn_nary_op_lookup_1 (vno1, vnresult);
2292 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2293 value number if it exists in the hash table. Return NULL_TREE if
2294 it does not exist in the hash table. VNRESULT will contain the
2295 vn_nary_op_t from the hashtable if it exists. */
2297 tree
2298 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
2300 vn_nary_op_t vno1
2301 = XALLOCAVAR (struct vn_nary_op_s,
2302 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2303 init_vn_nary_op_from_stmt (vno1, stmt);
2304 return vn_nary_op_lookup_1 (vno1, vnresult);
2307 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2309 static vn_nary_op_t
2310 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2312 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2315 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2316 obstack. */
2318 static vn_nary_op_t
2319 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2321 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2322 &current_info->nary_obstack);
2324 vno1->value_id = value_id;
2325 vno1->length = length;
2326 vno1->result = result;
2328 return vno1;
2331 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2332 VNO->HASHCODE first. */
2334 static vn_nary_op_t
2335 vn_nary_op_insert_into (vn_nary_op_t vno, htab_t table, bool compute_hash)
2337 void **slot;
2339 if (compute_hash)
2340 vno->hashcode = vn_nary_op_compute_hash (vno);
2342 slot = htab_find_slot_with_hash (table, vno, vno->hashcode, INSERT);
2343 gcc_assert (!*slot);
2345 *slot = vno;
2346 return vno;
2349 /* Insert a n-ary operation into the current hash table using it's
2350 pieces. Return the vn_nary_op_t structure we created and put in
2351 the hashtable. */
2353 vn_nary_op_t
2354 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2355 tree type, tree *ops,
2356 tree result, unsigned int value_id)
2358 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2359 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2360 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2363 /* Insert OP into the current hash table with a value number of
2364 RESULT. Return the vn_nary_op_t structure we created and put in
2365 the hashtable. */
2367 vn_nary_op_t
2368 vn_nary_op_insert (tree op, tree result)
2370 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2371 vn_nary_op_t vno1;
2373 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2374 init_vn_nary_op_from_op (vno1, op);
2375 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2378 /* Insert the rhs of STMT into the current hash table with a value number of
2379 RESULT. */
2381 vn_nary_op_t
2382 vn_nary_op_insert_stmt (gimple stmt, tree result)
2384 vn_nary_op_t vno1
2385 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2386 result, VN_INFO (result)->value_id);
2387 init_vn_nary_op_from_stmt (vno1, stmt);
2388 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2391 /* Compute a hashcode for PHI operation VP1 and return it. */
2393 static inline hashval_t
2394 vn_phi_compute_hash (vn_phi_t vp1)
2396 hashval_t result;
2397 int i;
2398 tree phi1op;
2399 tree type;
2401 result = vp1->block->index;
2403 /* If all PHI arguments are constants we need to distinguish
2404 the PHI node via its type. */
2405 type = vp1->type;
2406 result += vn_hash_type (type);
2408 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2410 if (phi1op == VN_TOP)
2411 continue;
2412 result = iterative_hash_expr (phi1op, result);
2415 return result;
2418 /* Return the computed hashcode for phi operation P1. */
2420 static hashval_t
2421 vn_phi_hash (const void *p1)
2423 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
2424 return vp1->hashcode;
2427 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2429 static int
2430 vn_phi_eq (const void *p1, const void *p2)
2432 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
2433 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
2435 if (vp1->hashcode != vp2->hashcode)
2436 return false;
2438 if (vp1->block == vp2->block)
2440 int i;
2441 tree phi1op;
2443 /* If the PHI nodes do not have compatible types
2444 they are not the same. */
2445 if (!types_compatible_p (vp1->type, vp2->type))
2446 return false;
2448 /* Any phi in the same block will have it's arguments in the
2449 same edge order, because of how we store phi nodes. */
2450 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2452 tree phi2op = vp2->phiargs[i];
2453 if (phi1op == VN_TOP || phi2op == VN_TOP)
2454 continue;
2455 if (!expressions_equal_p (phi1op, phi2op))
2456 return false;
2458 return true;
2460 return false;
2463 static vec<tree> shared_lookup_phiargs;
2465 /* Lookup PHI in the current hash table, and return the resulting
2466 value number if it exists in the hash table. Return NULL_TREE if
2467 it does not exist in the hash table. */
2469 static tree
2470 vn_phi_lookup (gimple phi)
2472 void **slot;
2473 struct vn_phi_s vp1;
2474 unsigned i;
2476 shared_lookup_phiargs.truncate (0);
2478 /* Canonicalize the SSA_NAME's to their value number. */
2479 for (i = 0; i < gimple_phi_num_args (phi); i++)
2481 tree def = PHI_ARG_DEF (phi, i);
2482 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2483 shared_lookup_phiargs.safe_push (def);
2485 vp1.type = TREE_TYPE (gimple_phi_result (phi));
2486 vp1.phiargs = shared_lookup_phiargs;
2487 vp1.block = gimple_bb (phi);
2488 vp1.hashcode = vn_phi_compute_hash (&vp1);
2489 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
2490 NO_INSERT);
2491 if (!slot && current_info == optimistic_info)
2492 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
2493 NO_INSERT);
2494 if (!slot)
2495 return NULL_TREE;
2496 return ((vn_phi_t)*slot)->result;
2499 /* Insert PHI into the current hash table with a value number of
2500 RESULT. */
2502 static vn_phi_t
2503 vn_phi_insert (gimple phi, tree result)
2505 void **slot;
2506 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
2507 unsigned i;
2508 vec<tree> args = vNULL;
2510 /* Canonicalize the SSA_NAME's to their value number. */
2511 for (i = 0; i < gimple_phi_num_args (phi); i++)
2513 tree def = PHI_ARG_DEF (phi, i);
2514 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2515 args.safe_push (def);
2517 vp1->value_id = VN_INFO (result)->value_id;
2518 vp1->type = TREE_TYPE (gimple_phi_result (phi));
2519 vp1->phiargs = args;
2520 vp1->block = gimple_bb (phi);
2521 vp1->result = result;
2522 vp1->hashcode = vn_phi_compute_hash (vp1);
2524 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
2525 INSERT);
2527 /* Because we iterate over phi operations more than once, it's
2528 possible the slot might already exist here, hence no assert.*/
2529 *slot = vp1;
2530 return vp1;
2534 /* Print set of components in strongly connected component SCC to OUT. */
2536 static void
2537 print_scc (FILE *out, vec<tree> scc)
2539 tree var;
2540 unsigned int i;
2542 fprintf (out, "SCC consists of:");
2543 FOR_EACH_VEC_ELT (scc, i, var)
2545 fprintf (out, " ");
2546 print_generic_expr (out, var, 0);
2548 fprintf (out, "\n");
2551 /* Set the value number of FROM to TO, return true if it has changed
2552 as a result. */
2554 static inline bool
2555 set_ssa_val_to (tree from, tree to)
2557 tree currval = SSA_VAL (from);
2558 HOST_WIDE_INT toff, coff;
2560 if (from != to)
2562 if (currval == from)
2564 if (dump_file && (dump_flags & TDF_DETAILS))
2566 fprintf (dump_file, "Not changing value number of ");
2567 print_generic_expr (dump_file, from, 0);
2568 fprintf (dump_file, " from VARYING to ");
2569 print_generic_expr (dump_file, to, 0);
2570 fprintf (dump_file, "\n");
2572 return false;
2574 else if (TREE_CODE (to) == SSA_NAME
2575 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2576 to = from;
2579 /* The only thing we allow as value numbers are VN_TOP, ssa_names
2580 and invariants. So assert that here. */
2581 gcc_assert (to != NULL_TREE
2582 && (to == VN_TOP
2583 || TREE_CODE (to) == SSA_NAME
2584 || is_gimple_min_invariant (to)));
2586 if (dump_file && (dump_flags & TDF_DETAILS))
2588 fprintf (dump_file, "Setting value number of ");
2589 print_generic_expr (dump_file, from, 0);
2590 fprintf (dump_file, " to ");
2591 print_generic_expr (dump_file, to, 0);
2594 if (currval != to
2595 && !operand_equal_p (currval, to, 0)
2596 /* ??? For addresses involving volatile objects or types operand_equal_p
2597 does not reliably detect ADDR_EXPRs as equal. We know we are only
2598 getting invariant gimple addresses here, so can use
2599 get_addr_base_and_unit_offset to do this comparison. */
2600 && !(TREE_CODE (currval) == ADDR_EXPR
2601 && TREE_CODE (to) == ADDR_EXPR
2602 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
2603 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
2604 && coff == toff))
2606 VN_INFO (from)->valnum = to;
2607 if (dump_file && (dump_flags & TDF_DETAILS))
2608 fprintf (dump_file, " (changed)\n");
2609 return true;
2611 if (dump_file && (dump_flags & TDF_DETAILS))
2612 fprintf (dump_file, "\n");
2613 return false;
2616 /* Mark as processed all the definitions in the defining stmt of USE, or
2617 the USE itself. */
2619 static void
2620 mark_use_processed (tree use)
2622 ssa_op_iter iter;
2623 def_operand_p defp;
2624 gimple stmt = SSA_NAME_DEF_STMT (use);
2626 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
2628 VN_INFO (use)->use_processed = true;
2629 return;
2632 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2634 tree def = DEF_FROM_PTR (defp);
2636 VN_INFO (def)->use_processed = true;
2640 /* Set all definitions in STMT to value number to themselves.
2641 Return true if a value number changed. */
2643 static bool
2644 defs_to_varying (gimple stmt)
2646 bool changed = false;
2647 ssa_op_iter iter;
2648 def_operand_p defp;
2650 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2652 tree def = DEF_FROM_PTR (defp);
2653 changed |= set_ssa_val_to (def, def);
2655 return changed;
2658 static bool expr_has_constants (tree expr);
2659 static tree valueize_expr (tree expr);
2661 /* Visit a copy between LHS and RHS, return true if the value number
2662 changed. */
2664 static bool
2665 visit_copy (tree lhs, tree rhs)
2667 /* Follow chains of copies to their destination. */
2668 while (TREE_CODE (rhs) == SSA_NAME
2669 && SSA_VAL (rhs) != rhs)
2670 rhs = SSA_VAL (rhs);
2672 /* The copy may have a more interesting constant filled expression
2673 (we don't, since we know our RHS is just an SSA name). */
2674 if (TREE_CODE (rhs) == SSA_NAME)
2676 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2677 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2680 return set_ssa_val_to (lhs, rhs);
2683 /* Visit a nary operator RHS, value number it, and return true if the
2684 value number of LHS has changed as a result. */
2686 static bool
2687 visit_nary_op (tree lhs, gimple stmt)
2689 bool changed = false;
2690 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2692 if (result)
2693 changed = set_ssa_val_to (lhs, result);
2694 else
2696 changed = set_ssa_val_to (lhs, lhs);
2697 vn_nary_op_insert_stmt (stmt, lhs);
2700 return changed;
2703 /* Visit a call STMT storing into LHS. Return true if the value number
2704 of the LHS has changed as a result. */
2706 static bool
2707 visit_reference_op_call (tree lhs, gimple stmt)
2709 bool changed = false;
2710 struct vn_reference_s vr1;
2711 vn_reference_t vnresult = NULL;
2712 tree vuse = gimple_vuse (stmt);
2713 tree vdef = gimple_vdef (stmt);
2715 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
2716 if (lhs && TREE_CODE (lhs) != SSA_NAME)
2717 lhs = NULL_TREE;
2719 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2720 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
2721 vr1.type = gimple_expr_type (stmt);
2722 vr1.set = 0;
2723 vr1.hashcode = vn_reference_compute_hash (&vr1);
2724 vn_reference_lookup_1 (&vr1, &vnresult);
2726 if (vnresult)
2728 if (vnresult->result_vdef && vdef)
2729 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
2731 if (!vnresult->result && lhs)
2732 vnresult->result = lhs;
2734 if (vnresult->result && lhs)
2736 changed |= set_ssa_val_to (lhs, vnresult->result);
2738 if (VN_INFO (vnresult->result)->has_constants)
2739 VN_INFO (lhs)->has_constants = true;
2742 else
2744 void **slot;
2745 vn_reference_t vr2;
2746 if (vdef)
2747 changed |= set_ssa_val_to (vdef, vdef);
2748 if (lhs)
2749 changed |= set_ssa_val_to (lhs, lhs);
2750 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
2751 vr2->vuse = vr1.vuse;
2752 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
2753 vr2->type = vr1.type;
2754 vr2->set = vr1.set;
2755 vr2->hashcode = vr1.hashcode;
2756 vr2->result = lhs;
2757 vr2->result_vdef = vdef;
2758 slot = htab_find_slot_with_hash (current_info->references,
2759 vr2, vr2->hashcode, INSERT);
2760 if (*slot)
2761 free_reference (*slot);
2762 *slot = vr2;
2765 return changed;
2768 /* Visit a load from a reference operator RHS, part of STMT, value number it,
2769 and return true if the value number of the LHS has changed as a result. */
2771 static bool
2772 visit_reference_op_load (tree lhs, tree op, gimple stmt)
2774 bool changed = false;
2775 tree last_vuse;
2776 tree result;
2778 last_vuse = gimple_vuse (stmt);
2779 last_vuse_ptr = &last_vuse;
2780 result = vn_reference_lookup (op, gimple_vuse (stmt),
2781 default_vn_walk_kind, NULL);
2782 last_vuse_ptr = NULL;
2784 /* If we have a VCE, try looking up its operand as it might be stored in
2785 a different type. */
2786 if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
2787 result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
2788 default_vn_walk_kind, NULL);
2790 /* We handle type-punning through unions by value-numbering based
2791 on offset and size of the access. Be prepared to handle a
2792 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
2793 if (result
2794 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
2796 /* We will be setting the value number of lhs to the value number
2797 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2798 So first simplify and lookup this expression to see if it
2799 is already available. */
2800 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
2801 if ((CONVERT_EXPR_P (val)
2802 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
2803 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
2805 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
2806 if ((CONVERT_EXPR_P (tem)
2807 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
2808 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
2809 TREE_TYPE (val), tem)))
2810 val = tem;
2812 result = val;
2813 if (!is_gimple_min_invariant (val)
2814 && TREE_CODE (val) != SSA_NAME)
2815 result = vn_nary_op_lookup (val, NULL);
2816 /* If the expression is not yet available, value-number lhs to
2817 a new SSA_NAME we create. */
2818 if (!result)
2820 result = make_temp_ssa_name (TREE_TYPE (lhs), gimple_build_nop (),
2821 "vntemp");
2822 /* Initialize value-number information properly. */
2823 VN_INFO_GET (result)->valnum = result;
2824 VN_INFO (result)->value_id = get_next_value_id ();
2825 VN_INFO (result)->expr = val;
2826 VN_INFO (result)->has_constants = expr_has_constants (val);
2827 VN_INFO (result)->needs_insertion = true;
2828 /* As all "inserted" statements are singleton SCCs, insert
2829 to the valid table. This is strictly needed to
2830 avoid re-generating new value SSA_NAMEs for the same
2831 expression during SCC iteration over and over (the
2832 optimistic table gets cleared after each iteration).
2833 We do not need to insert into the optimistic table, as
2834 lookups there will fall back to the valid table. */
2835 if (current_info == optimistic_info)
2837 current_info = valid_info;
2838 vn_nary_op_insert (val, result);
2839 current_info = optimistic_info;
2841 else
2842 vn_nary_op_insert (val, result);
2843 if (dump_file && (dump_flags & TDF_DETAILS))
2845 fprintf (dump_file, "Inserting name ");
2846 print_generic_expr (dump_file, result, 0);
2847 fprintf (dump_file, " for expression ");
2848 print_generic_expr (dump_file, val, 0);
2849 fprintf (dump_file, "\n");
2854 if (result)
2856 changed = set_ssa_val_to (lhs, result);
2857 if (TREE_CODE (result) == SSA_NAME
2858 && VN_INFO (result)->has_constants)
2860 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2861 VN_INFO (lhs)->has_constants = true;
2864 else
2866 changed = set_ssa_val_to (lhs, lhs);
2867 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
2870 return changed;
2874 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2875 and return true if the value number of the LHS has changed as a result. */
2877 static bool
2878 visit_reference_op_store (tree lhs, tree op, gimple stmt)
2880 bool changed = false;
2881 vn_reference_t vnresult = NULL;
2882 tree result, assign;
2883 bool resultsame = false;
2884 tree vuse = gimple_vuse (stmt);
2885 tree vdef = gimple_vdef (stmt);
2887 /* First we want to lookup using the *vuses* from the store and see
2888 if there the last store to this location with the same address
2889 had the same value.
2891 The vuses represent the memory state before the store. If the
2892 memory state, address, and value of the store is the same as the
2893 last store to this location, then this store will produce the
2894 same memory state as that store.
2896 In this case the vdef versions for this store are value numbered to those
2897 vuse versions, since they represent the same memory state after
2898 this store.
2900 Otherwise, the vdefs for the store are used when inserting into
2901 the table, since the store generates a new memory state. */
2903 result = vn_reference_lookup (lhs, vuse, VN_NOWALK, NULL);
2905 if (result)
2907 if (TREE_CODE (result) == SSA_NAME)
2908 result = SSA_VAL (result);
2909 if (TREE_CODE (op) == SSA_NAME)
2910 op = SSA_VAL (op);
2911 resultsame = expressions_equal_p (result, op);
2914 if (!result || !resultsame)
2916 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
2917 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult);
2918 if (vnresult)
2920 VN_INFO (vdef)->use_processed = true;
2921 return set_ssa_val_to (vdef, vnresult->result_vdef);
2925 if (!result || !resultsame)
2927 if (dump_file && (dump_flags & TDF_DETAILS))
2929 fprintf (dump_file, "No store match\n");
2930 fprintf (dump_file, "Value numbering store ");
2931 print_generic_expr (dump_file, lhs, 0);
2932 fprintf (dump_file, " to ");
2933 print_generic_expr (dump_file, op, 0);
2934 fprintf (dump_file, "\n");
2936 /* Have to set value numbers before insert, since insert is
2937 going to valueize the references in-place. */
2938 if (vdef)
2940 changed |= set_ssa_val_to (vdef, vdef);
2943 /* Do not insert structure copies into the tables. */
2944 if (is_gimple_min_invariant (op)
2945 || is_gimple_reg (op))
2946 vn_reference_insert (lhs, op, vdef, NULL);
2948 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
2949 vn_reference_insert (assign, lhs, vuse, vdef);
2951 else
2953 /* We had a match, so value number the vdef to have the value
2954 number of the vuse it came from. */
2956 if (dump_file && (dump_flags & TDF_DETAILS))
2957 fprintf (dump_file, "Store matched earlier value,"
2958 "value numbering store vdefs to matching vuses.\n");
2960 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
2963 return changed;
2966 /* Visit and value number PHI, return true if the value number
2967 changed. */
2969 static bool
2970 visit_phi (gimple phi)
2972 bool changed = false;
2973 tree result;
2974 tree sameval = VN_TOP;
2975 bool allsame = true;
2976 unsigned i;
2978 /* TODO: We could check for this in init_sccvn, and replace this
2979 with a gcc_assert. */
2980 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
2981 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2983 /* See if all non-TOP arguments have the same value. TOP is
2984 equivalent to everything, so we can ignore it. */
2985 for (i = 0; i < gimple_phi_num_args (phi); i++)
2987 tree def = PHI_ARG_DEF (phi, i);
2989 if (TREE_CODE (def) == SSA_NAME)
2990 def = SSA_VAL (def);
2991 if (def == VN_TOP)
2992 continue;
2993 if (sameval == VN_TOP)
2995 sameval = def;
2997 else
2999 if (!expressions_equal_p (def, sameval))
3001 allsame = false;
3002 break;
3007 /* If all value numbered to the same value, the phi node has that
3008 value. */
3009 if (allsame)
3011 if (is_gimple_min_invariant (sameval))
3013 VN_INFO (PHI_RESULT (phi))->has_constants = true;
3014 VN_INFO (PHI_RESULT (phi))->expr = sameval;
3016 else
3018 VN_INFO (PHI_RESULT (phi))->has_constants = false;
3019 VN_INFO (PHI_RESULT (phi))->expr = sameval;
3022 if (TREE_CODE (sameval) == SSA_NAME)
3023 return visit_copy (PHI_RESULT (phi), sameval);
3025 return set_ssa_val_to (PHI_RESULT (phi), sameval);
3028 /* Otherwise, see if it is equivalent to a phi node in this block. */
3029 result = vn_phi_lookup (phi);
3030 if (result)
3032 if (TREE_CODE (result) == SSA_NAME)
3033 changed = visit_copy (PHI_RESULT (phi), result);
3034 else
3035 changed = set_ssa_val_to (PHI_RESULT (phi), result);
3037 else
3039 vn_phi_insert (phi, PHI_RESULT (phi));
3040 VN_INFO (PHI_RESULT (phi))->has_constants = false;
3041 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
3042 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3045 return changed;
3048 /* Return true if EXPR contains constants. */
3050 static bool
3051 expr_has_constants (tree expr)
3053 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
3055 case tcc_unary:
3056 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
3058 case tcc_binary:
3059 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
3060 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
3061 /* Constants inside reference ops are rarely interesting, but
3062 it can take a lot of looking to find them. */
3063 case tcc_reference:
3064 case tcc_declaration:
3065 return false;
3066 default:
3067 return is_gimple_min_invariant (expr);
3069 return false;
3072 /* Return true if STMT contains constants. */
3074 static bool
3075 stmt_has_constants (gimple stmt)
3077 if (gimple_code (stmt) != GIMPLE_ASSIGN)
3078 return false;
3080 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
3082 case GIMPLE_UNARY_RHS:
3083 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
3085 case GIMPLE_BINARY_RHS:
3086 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
3087 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
3088 case GIMPLE_TERNARY_RHS:
3089 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
3090 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt))
3091 || is_gimple_min_invariant (gimple_assign_rhs3 (stmt)));
3092 case GIMPLE_SINGLE_RHS:
3093 /* Constants inside reference ops are rarely interesting, but
3094 it can take a lot of looking to find them. */
3095 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
3096 default:
3097 gcc_unreachable ();
3099 return false;
3102 /* Replace SSA_NAMES in expr with their value numbers, and return the
3103 result.
3104 This is performed in place. */
3106 static tree
3107 valueize_expr (tree expr)
3109 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
3111 case tcc_binary:
3112 TREE_OPERAND (expr, 1) = vn_valueize (TREE_OPERAND (expr, 1));
3113 /* Fallthru. */
3114 case tcc_unary:
3115 TREE_OPERAND (expr, 0) = vn_valueize (TREE_OPERAND (expr, 0));
3116 break;
3117 default:;
3119 return expr;
3122 /* Simplify the binary expression RHS, and return the result if
3123 simplified. */
3125 static tree
3126 simplify_binary_expression (gimple stmt)
3128 tree result = NULL_TREE;
3129 tree op0 = gimple_assign_rhs1 (stmt);
3130 tree op1 = gimple_assign_rhs2 (stmt);
3131 enum tree_code code = gimple_assign_rhs_code (stmt);
3133 /* This will not catch every single case we could combine, but will
3134 catch those with constants. The goal here is to simultaneously
3135 combine constants between expressions, but avoid infinite
3136 expansion of expressions during simplification. */
3137 if (TREE_CODE (op0) == SSA_NAME)
3139 if (VN_INFO (op0)->has_constants
3140 || TREE_CODE_CLASS (code) == tcc_comparison
3141 || code == COMPLEX_EXPR)
3142 op0 = valueize_expr (vn_get_expr_for (op0));
3143 else
3144 op0 = vn_valueize (op0);
3147 if (TREE_CODE (op1) == SSA_NAME)
3149 if (VN_INFO (op1)->has_constants
3150 || code == COMPLEX_EXPR)
3151 op1 = valueize_expr (vn_get_expr_for (op1));
3152 else
3153 op1 = vn_valueize (op1);
3156 /* Pointer plus constant can be represented as invariant address.
3157 Do so to allow further propatation, see also tree forwprop. */
3158 if (code == POINTER_PLUS_EXPR
3159 && host_integerp (op1, 1)
3160 && TREE_CODE (op0) == ADDR_EXPR
3161 && is_gimple_min_invariant (op0))
3162 return build_invariant_address (TREE_TYPE (op0),
3163 TREE_OPERAND (op0, 0),
3164 TREE_INT_CST_LOW (op1));
3166 /* Avoid folding if nothing changed. */
3167 if (op0 == gimple_assign_rhs1 (stmt)
3168 && op1 == gimple_assign_rhs2 (stmt))
3169 return NULL_TREE;
3171 fold_defer_overflow_warnings ();
3173 result = fold_binary (code, gimple_expr_type (stmt), op0, op1);
3174 if (result)
3175 STRIP_USELESS_TYPE_CONVERSION (result);
3177 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
3178 stmt, 0);
3180 /* Make sure result is not a complex expression consisting
3181 of operators of operators (IE (a + b) + (a + c))
3182 Otherwise, we will end up with unbounded expressions if
3183 fold does anything at all. */
3184 if (result && valid_gimple_rhs_p (result))
3185 return result;
3187 return NULL_TREE;
3190 /* Simplify the unary expression RHS, and return the result if
3191 simplified. */
3193 static tree
3194 simplify_unary_expression (gimple stmt)
3196 tree result = NULL_TREE;
3197 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
3198 enum tree_code code = gimple_assign_rhs_code (stmt);
3200 /* We handle some tcc_reference codes here that are all
3201 GIMPLE_ASSIGN_SINGLE codes. */
3202 if (code == REALPART_EXPR
3203 || code == IMAGPART_EXPR
3204 || code == VIEW_CONVERT_EXPR
3205 || code == BIT_FIELD_REF)
3206 op0 = TREE_OPERAND (op0, 0);
3208 if (TREE_CODE (op0) != SSA_NAME)
3209 return NULL_TREE;
3211 orig_op0 = op0;
3212 if (VN_INFO (op0)->has_constants)
3213 op0 = valueize_expr (vn_get_expr_for (op0));
3214 else if (CONVERT_EXPR_CODE_P (code)
3215 || code == REALPART_EXPR
3216 || code == IMAGPART_EXPR
3217 || code == VIEW_CONVERT_EXPR
3218 || code == BIT_FIELD_REF)
3220 /* We want to do tree-combining on conversion-like expressions.
3221 Make sure we feed only SSA_NAMEs or constants to fold though. */
3222 tree tem = valueize_expr (vn_get_expr_for (op0));
3223 if (UNARY_CLASS_P (tem)
3224 || BINARY_CLASS_P (tem)
3225 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
3226 || TREE_CODE (tem) == SSA_NAME
3227 || TREE_CODE (tem) == CONSTRUCTOR
3228 || is_gimple_min_invariant (tem))
3229 op0 = tem;
3232 /* Avoid folding if nothing changed, but remember the expression. */
3233 if (op0 == orig_op0)
3234 return NULL_TREE;
3236 if (code == BIT_FIELD_REF)
3238 tree rhs = gimple_assign_rhs1 (stmt);
3239 result = fold_ternary (BIT_FIELD_REF, TREE_TYPE (rhs),
3240 op0, TREE_OPERAND (rhs, 1), TREE_OPERAND (rhs, 2));
3242 else
3243 result = fold_unary_ignore_overflow (code, gimple_expr_type (stmt), op0);
3244 if (result)
3246 STRIP_USELESS_TYPE_CONVERSION (result);
3247 if (valid_gimple_rhs_p (result))
3248 return result;
3251 return NULL_TREE;
3254 /* Try to simplify RHS using equivalences and constant folding. */
3256 static tree
3257 try_to_simplify (gimple stmt)
3259 enum tree_code code = gimple_assign_rhs_code (stmt);
3260 tree tem;
3262 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3263 in this case, there is no point in doing extra work. */
3264 if (code == SSA_NAME)
3265 return NULL_TREE;
3267 /* First try constant folding based on our current lattice. */
3268 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize);
3269 if (tem
3270 && (TREE_CODE (tem) == SSA_NAME
3271 || is_gimple_min_invariant (tem)))
3272 return tem;
3274 /* If that didn't work try combining multiple statements. */
3275 switch (TREE_CODE_CLASS (code))
3277 case tcc_reference:
3278 /* Fallthrough for some unary codes that can operate on registers. */
3279 if (!(code == REALPART_EXPR
3280 || code == IMAGPART_EXPR
3281 || code == VIEW_CONVERT_EXPR
3282 || code == BIT_FIELD_REF))
3283 break;
3284 /* We could do a little more with unary ops, if they expand
3285 into binary ops, but it's debatable whether it is worth it. */
3286 case tcc_unary:
3287 return simplify_unary_expression (stmt);
3289 case tcc_comparison:
3290 case tcc_binary:
3291 return simplify_binary_expression (stmt);
3293 default:
3294 break;
3297 return NULL_TREE;
3300 /* Visit and value number USE, return true if the value number
3301 changed. */
3303 static bool
3304 visit_use (tree use)
3306 bool changed = false;
3307 gimple stmt = SSA_NAME_DEF_STMT (use);
3309 mark_use_processed (use);
3311 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3312 if (dump_file && (dump_flags & TDF_DETAILS)
3313 && !SSA_NAME_IS_DEFAULT_DEF (use))
3315 fprintf (dump_file, "Value numbering ");
3316 print_generic_expr (dump_file, use, 0);
3317 fprintf (dump_file, " stmt = ");
3318 print_gimple_stmt (dump_file, stmt, 0, 0);
3321 /* Handle uninitialized uses. */
3322 if (SSA_NAME_IS_DEFAULT_DEF (use))
3323 changed = set_ssa_val_to (use, use);
3324 else
3326 if (gimple_code (stmt) == GIMPLE_PHI)
3327 changed = visit_phi (stmt);
3328 else if (gimple_has_volatile_ops (stmt))
3329 changed = defs_to_varying (stmt);
3330 else if (is_gimple_assign (stmt))
3332 enum tree_code code = gimple_assign_rhs_code (stmt);
3333 tree lhs = gimple_assign_lhs (stmt);
3334 tree rhs1 = gimple_assign_rhs1 (stmt);
3335 tree simplified;
3337 /* Shortcut for copies. Simplifying copies is pointless,
3338 since we copy the expression and value they represent. */
3339 if (code == SSA_NAME
3340 && TREE_CODE (lhs) == SSA_NAME)
3342 changed = visit_copy (lhs, rhs1);
3343 goto done;
3345 simplified = try_to_simplify (stmt);
3346 if (simplified)
3348 if (dump_file && (dump_flags & TDF_DETAILS))
3350 fprintf (dump_file, "RHS ");
3351 print_gimple_expr (dump_file, stmt, 0, 0);
3352 fprintf (dump_file, " simplified to ");
3353 print_generic_expr (dump_file, simplified, 0);
3354 if (TREE_CODE (lhs) == SSA_NAME)
3355 fprintf (dump_file, " has constants %d\n",
3356 expr_has_constants (simplified));
3357 else
3358 fprintf (dump_file, "\n");
3361 /* Setting value numbers to constants will occasionally
3362 screw up phi congruence because constants are not
3363 uniquely associated with a single ssa name that can be
3364 looked up. */
3365 if (simplified
3366 && is_gimple_min_invariant (simplified)
3367 && TREE_CODE (lhs) == SSA_NAME)
3369 VN_INFO (lhs)->expr = simplified;
3370 VN_INFO (lhs)->has_constants = true;
3371 changed = set_ssa_val_to (lhs, simplified);
3372 goto done;
3374 else if (simplified
3375 && TREE_CODE (simplified) == SSA_NAME
3376 && TREE_CODE (lhs) == SSA_NAME)
3378 changed = visit_copy (lhs, simplified);
3379 goto done;
3381 else if (simplified)
3383 if (TREE_CODE (lhs) == SSA_NAME)
3385 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
3386 /* We have to unshare the expression or else
3387 valuizing may change the IL stream. */
3388 VN_INFO (lhs)->expr = unshare_expr (simplified);
3391 else if (stmt_has_constants (stmt)
3392 && TREE_CODE (lhs) == SSA_NAME)
3393 VN_INFO (lhs)->has_constants = true;
3394 else if (TREE_CODE (lhs) == SSA_NAME)
3396 /* We reset expr and constantness here because we may
3397 have been value numbering optimistically, and
3398 iterating. They may become non-constant in this case,
3399 even if they were optimistically constant. */
3401 VN_INFO (lhs)->has_constants = false;
3402 VN_INFO (lhs)->expr = NULL_TREE;
3405 if ((TREE_CODE (lhs) == SSA_NAME
3406 /* We can substitute SSA_NAMEs that are live over
3407 abnormal edges with their constant value. */
3408 && !(gimple_assign_copy_p (stmt)
3409 && is_gimple_min_invariant (rhs1))
3410 && !(simplified
3411 && is_gimple_min_invariant (simplified))
3412 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3413 /* Stores or copies from SSA_NAMEs that are live over
3414 abnormal edges are a problem. */
3415 || (code == SSA_NAME
3416 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3417 changed = defs_to_varying (stmt);
3418 else if (REFERENCE_CLASS_P (lhs)
3419 || DECL_P (lhs))
3420 changed = visit_reference_op_store (lhs, rhs1, stmt);
3421 else if (TREE_CODE (lhs) == SSA_NAME)
3423 if ((gimple_assign_copy_p (stmt)
3424 && is_gimple_min_invariant (rhs1))
3425 || (simplified
3426 && is_gimple_min_invariant (simplified)))
3428 VN_INFO (lhs)->has_constants = true;
3429 if (simplified)
3430 changed = set_ssa_val_to (lhs, simplified);
3431 else
3432 changed = set_ssa_val_to (lhs, rhs1);
3434 else
3436 /* First try to lookup the simplified expression. */
3437 if (simplified)
3439 enum gimple_rhs_class rhs_class;
3442 rhs_class = get_gimple_rhs_class (TREE_CODE (simplified));
3443 if ((rhs_class == GIMPLE_UNARY_RHS
3444 || rhs_class == GIMPLE_BINARY_RHS
3445 || rhs_class == GIMPLE_TERNARY_RHS)
3446 && valid_gimple_rhs_p (simplified))
3448 tree result = vn_nary_op_lookup (simplified, NULL);
3449 if (result)
3451 changed = set_ssa_val_to (lhs, result);
3452 goto done;
3457 /* Otherwise visit the original statement. */
3458 switch (vn_get_stmt_kind (stmt))
3460 case VN_NARY:
3461 changed = visit_nary_op (lhs, stmt);
3462 break;
3463 case VN_REFERENCE:
3464 changed = visit_reference_op_load (lhs, rhs1, stmt);
3465 break;
3466 default:
3467 changed = defs_to_varying (stmt);
3468 break;
3472 else
3473 changed = defs_to_varying (stmt);
3475 else if (is_gimple_call (stmt))
3477 tree lhs = gimple_call_lhs (stmt);
3479 /* ??? We could try to simplify calls. */
3481 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3483 if (stmt_has_constants (stmt))
3484 VN_INFO (lhs)->has_constants = true;
3485 else
3487 /* We reset expr and constantness here because we may
3488 have been value numbering optimistically, and
3489 iterating. They may become non-constant in this case,
3490 even if they were optimistically constant. */
3491 VN_INFO (lhs)->has_constants = false;
3492 VN_INFO (lhs)->expr = NULL_TREE;
3495 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3497 changed = defs_to_varying (stmt);
3498 goto done;
3502 if (!gimple_call_internal_p (stmt)
3503 && (/* Calls to the same function with the same vuse
3504 and the same operands do not necessarily return the same
3505 value, unless they're pure or const. */
3506 gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST)
3507 /* If calls have a vdef, subsequent calls won't have
3508 the same incoming vuse. So, if 2 calls with vdef have the
3509 same vuse, we know they're not subsequent.
3510 We can value number 2 calls to the same function with the
3511 same vuse and the same operands which are not subsequent
3512 the same, because there is no code in the program that can
3513 compare the 2 values... */
3514 || (gimple_vdef (stmt)
3515 /* ... unless the call returns a pointer which does
3516 not alias with anything else. In which case the
3517 information that the values are distinct are encoded
3518 in the IL. */
3519 && !(gimple_call_return_flags (stmt) & ERF_NOALIAS))))
3520 changed = visit_reference_op_call (lhs, stmt);
3521 else
3522 changed = defs_to_varying (stmt);
3524 else
3525 changed = defs_to_varying (stmt);
3527 done:
3528 return changed;
3531 /* Compare two operands by reverse postorder index */
3533 static int
3534 compare_ops (const void *pa, const void *pb)
3536 const tree opa = *((const tree *)pa);
3537 const tree opb = *((const tree *)pb);
3538 gimple opstmta = SSA_NAME_DEF_STMT (opa);
3539 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
3540 basic_block bba;
3541 basic_block bbb;
3543 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3544 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3545 else if (gimple_nop_p (opstmta))
3546 return -1;
3547 else if (gimple_nop_p (opstmtb))
3548 return 1;
3550 bba = gimple_bb (opstmta);
3551 bbb = gimple_bb (opstmtb);
3553 if (!bba && !bbb)
3554 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3555 else if (!bba)
3556 return -1;
3557 else if (!bbb)
3558 return 1;
3560 if (bba == bbb)
3562 if (gimple_code (opstmta) == GIMPLE_PHI
3563 && gimple_code (opstmtb) == GIMPLE_PHI)
3564 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3565 else if (gimple_code (opstmta) == GIMPLE_PHI)
3566 return -1;
3567 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3568 return 1;
3569 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3570 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3571 else
3572 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3574 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3577 /* Sort an array containing members of a strongly connected component
3578 SCC so that the members are ordered by RPO number.
3579 This means that when the sort is complete, iterating through the
3580 array will give you the members in RPO order. */
3582 static void
3583 sort_scc (vec<tree> scc)
3585 scc.qsort (compare_ops);
3588 /* Insert the no longer used nary ONARY to the hash INFO. */
3590 static void
3591 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3593 size_t size = sizeof_vn_nary_op (onary->length);
3594 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3595 &info->nary_obstack);
3596 memcpy (nary, onary, size);
3597 vn_nary_op_insert_into (nary, info->nary, false);
3600 /* Insert the no longer used phi OPHI to the hash INFO. */
3602 static void
3603 copy_phi (vn_phi_t ophi, vn_tables_t info)
3605 vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
3606 void **slot;
3607 memcpy (phi, ophi, sizeof (*phi));
3608 ophi->phiargs.create (0);
3609 slot = htab_find_slot_with_hash (info->phis, phi, phi->hashcode, INSERT);
3610 gcc_assert (!*slot);
3611 *slot = phi;
3614 /* Insert the no longer used reference OREF to the hash INFO. */
3616 static void
3617 copy_reference (vn_reference_t oref, vn_tables_t info)
3619 vn_reference_t ref;
3620 void **slot;
3621 ref = (vn_reference_t) pool_alloc (info->references_pool);
3622 memcpy (ref, oref, sizeof (*ref));
3623 oref->operands.create (0);
3624 slot = htab_find_slot_with_hash (info->references, ref, ref->hashcode,
3625 INSERT);
3626 if (*slot)
3627 free_reference (*slot);
3628 *slot = ref;
3631 /* Process a strongly connected component in the SSA graph. */
3633 static void
3634 process_scc (vec<tree> scc)
3636 tree var;
3637 unsigned int i;
3638 unsigned int iterations = 0;
3639 bool changed = true;
3640 htab_iterator hi;
3641 vn_nary_op_t nary;
3642 vn_phi_t phi;
3643 vn_reference_t ref;
3645 /* If the SCC has a single member, just visit it. */
3646 if (scc.length () == 1)
3648 tree use = scc[0];
3649 if (VN_INFO (use)->use_processed)
3650 return;
3651 /* We need to make sure it doesn't form a cycle itself, which can
3652 happen for self-referential PHI nodes. In that case we would
3653 end up inserting an expression with VN_TOP operands into the
3654 valid table which makes us derive bogus equivalences later.
3655 The cheapest way to check this is to assume it for all PHI nodes. */
3656 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3657 /* Fallthru to iteration. */ ;
3658 else
3660 visit_use (use);
3661 return;
3665 /* Iterate over the SCC with the optimistic table until it stops
3666 changing. */
3667 current_info = optimistic_info;
3668 while (changed)
3670 changed = false;
3671 iterations++;
3672 if (dump_file && (dump_flags & TDF_DETAILS))
3673 fprintf (dump_file, "Starting iteration %d\n", iterations);
3674 /* As we are value-numbering optimistically we have to
3675 clear the expression tables and the simplified expressions
3676 in each iteration until we converge. */
3677 htab_empty (optimistic_info->nary);
3678 htab_empty (optimistic_info->phis);
3679 htab_empty (optimistic_info->references);
3680 obstack_free (&optimistic_info->nary_obstack, NULL);
3681 gcc_obstack_init (&optimistic_info->nary_obstack);
3682 empty_alloc_pool (optimistic_info->phis_pool);
3683 empty_alloc_pool (optimistic_info->references_pool);
3684 FOR_EACH_VEC_ELT (scc, i, var)
3685 VN_INFO (var)->expr = NULL_TREE;
3686 FOR_EACH_VEC_ELT (scc, i, var)
3687 changed |= visit_use (var);
3690 statistics_histogram_event (cfun, "SCC iterations", iterations);
3692 /* Finally, copy the contents of the no longer used optimistic
3693 table to the valid table. */
3694 FOR_EACH_HTAB_ELEMENT (optimistic_info->nary, nary, vn_nary_op_t, hi)
3695 copy_nary (nary, valid_info);
3696 FOR_EACH_HTAB_ELEMENT (optimistic_info->phis, phi, vn_phi_t, hi)
3697 copy_phi (phi, valid_info);
3698 FOR_EACH_HTAB_ELEMENT (optimistic_info->references, ref, vn_reference_t, hi)
3699 copy_reference (ref, valid_info);
3701 current_info = valid_info;
3705 /* Pop the components of the found SCC for NAME off the SCC stack
3706 and process them. Returns true if all went well, false if
3707 we run into resource limits. */
3709 static bool
3710 extract_and_process_scc_for_name (tree name)
3712 vec<tree> scc = vNULL;
3713 tree x;
3715 /* Found an SCC, pop the components off the SCC stack and
3716 process them. */
3719 x = sccstack.pop ();
3721 VN_INFO (x)->on_sccstack = false;
3722 scc.safe_push (x);
3723 } while (x != name);
3725 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
3726 if (scc.length ()
3727 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
3729 if (dump_file)
3730 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
3731 "SCC size %u exceeding %u\n", scc.length (),
3732 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
3734 scc.release ();
3735 return false;
3738 if (scc.length () > 1)
3739 sort_scc (scc);
3741 if (dump_file && (dump_flags & TDF_DETAILS))
3742 print_scc (dump_file, scc);
3744 process_scc (scc);
3746 scc.release ();
3748 return true;
3751 /* Depth first search on NAME to discover and process SCC's in the SSA
3752 graph.
3753 Execution of this algorithm relies on the fact that the SCC's are
3754 popped off the stack in topological order.
3755 Returns true if successful, false if we stopped processing SCC's due
3756 to resource constraints. */
3758 static bool
3759 DFS (tree name)
3761 vec<ssa_op_iter> itervec = vNULL;
3762 vec<tree> namevec = vNULL;
3763 use_operand_p usep = NULL;
3764 gimple defstmt;
3765 tree use;
3766 ssa_op_iter iter;
3768 start_over:
3769 /* SCC info */
3770 VN_INFO (name)->dfsnum = next_dfs_num++;
3771 VN_INFO (name)->visited = true;
3772 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
3774 sccstack.safe_push (name);
3775 VN_INFO (name)->on_sccstack = true;
3776 defstmt = SSA_NAME_DEF_STMT (name);
3778 /* Recursively DFS on our operands, looking for SCC's. */
3779 if (!gimple_nop_p (defstmt))
3781 /* Push a new iterator. */
3782 if (gimple_code (defstmt) == GIMPLE_PHI)
3783 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
3784 else
3785 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
3787 else
3788 clear_and_done_ssa_iter (&iter);
3790 while (1)
3792 /* If we are done processing uses of a name, go up the stack
3793 of iterators and process SCCs as we found them. */
3794 if (op_iter_done (&iter))
3796 /* See if we found an SCC. */
3797 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
3798 if (!extract_and_process_scc_for_name (name))
3800 namevec.release ();
3801 itervec.release ();
3802 return false;
3805 /* Check if we are done. */
3806 if (namevec.is_empty ())
3808 namevec.release ();
3809 itervec.release ();
3810 return true;
3813 /* Restore the last use walker and continue walking there. */
3814 use = name;
3815 name = namevec.pop ();
3816 memcpy (&iter, &itervec.last (),
3817 sizeof (ssa_op_iter));
3818 itervec.pop ();
3819 goto continue_walking;
3822 use = USE_FROM_PTR (usep);
3824 /* Since we handle phi nodes, we will sometimes get
3825 invariants in the use expression. */
3826 if (TREE_CODE (use) == SSA_NAME)
3828 if (! (VN_INFO (use)->visited))
3830 /* Recurse by pushing the current use walking state on
3831 the stack and starting over. */
3832 itervec.safe_push (iter);
3833 namevec.safe_push (name);
3834 name = use;
3835 goto start_over;
3837 continue_walking:
3838 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3839 VN_INFO (use)->low);
3841 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3842 && VN_INFO (use)->on_sccstack)
3844 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3845 VN_INFO (name)->low);
3849 usep = op_iter_next_use (&iter);
3853 /* Allocate a value number table. */
3855 static void
3856 allocate_vn_table (vn_tables_t table)
3858 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
3859 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
3860 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
3861 free_reference);
3863 gcc_obstack_init (&table->nary_obstack);
3864 table->phis_pool = create_alloc_pool ("VN phis",
3865 sizeof (struct vn_phi_s),
3866 30);
3867 table->references_pool = create_alloc_pool ("VN references",
3868 sizeof (struct vn_reference_s),
3869 30);
3872 /* Free a value number table. */
3874 static void
3875 free_vn_table (vn_tables_t table)
3877 htab_delete (table->phis);
3878 htab_delete (table->nary);
3879 htab_delete (table->references);
3880 obstack_free (&table->nary_obstack, NULL);
3881 free_alloc_pool (table->phis_pool);
3882 free_alloc_pool (table->references_pool);
3885 static void
3886 init_scc_vn (void)
3888 size_t i;
3889 int j;
3890 int *rpo_numbers_temp;
3892 calculate_dominance_info (CDI_DOMINATORS);
3893 sccstack.create (0);
3894 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
3895 free);
3897 constant_value_ids = BITMAP_ALLOC (NULL);
3899 next_dfs_num = 1;
3900 next_value_id = 1;
3902 vn_ssa_aux_table.create (num_ssa_names + 1);
3903 /* VEC_alloc doesn't actually grow it to the right size, it just
3904 preallocates the space to do so. */
3905 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
3906 gcc_obstack_init (&vn_ssa_aux_obstack);
3908 shared_lookup_phiargs.create (0);
3909 shared_lookup_references.create (0);
3910 rpo_numbers = XNEWVEC (int, last_basic_block);
3911 rpo_numbers_temp = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
3912 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
3914 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3915 the i'th block in RPO order is bb. We want to map bb's to RPO
3916 numbers, so we need to rearrange this array. */
3917 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
3918 rpo_numbers[rpo_numbers_temp[j]] = j;
3920 XDELETE (rpo_numbers_temp);
3922 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
3924 /* Create the VN_INFO structures, and initialize value numbers to
3925 TOP. */
3926 for (i = 0; i < num_ssa_names; i++)
3928 tree name = ssa_name (i);
3929 if (name)
3931 VN_INFO_GET (name)->valnum = VN_TOP;
3932 VN_INFO (name)->expr = NULL_TREE;
3933 VN_INFO (name)->value_id = 0;
3937 renumber_gimple_stmt_uids ();
3939 /* Create the valid and optimistic value numbering tables. */
3940 valid_info = XCNEW (struct vn_tables_s);
3941 allocate_vn_table (valid_info);
3942 optimistic_info = XCNEW (struct vn_tables_s);
3943 allocate_vn_table (optimistic_info);
3946 void
3947 free_scc_vn (void)
3949 size_t i;
3951 htab_delete (constant_to_value_id);
3952 BITMAP_FREE (constant_value_ids);
3953 shared_lookup_phiargs.release ();
3954 shared_lookup_references.release ();
3955 XDELETEVEC (rpo_numbers);
3957 for (i = 0; i < num_ssa_names; i++)
3959 tree name = ssa_name (i);
3960 if (name
3961 && VN_INFO (name)->needs_insertion)
3962 release_ssa_name (name);
3964 obstack_free (&vn_ssa_aux_obstack, NULL);
3965 vn_ssa_aux_table.release ();
3967 sccstack.release ();
3968 free_vn_table (valid_info);
3969 XDELETE (valid_info);
3970 free_vn_table (optimistic_info);
3971 XDELETE (optimistic_info);
3974 /* Set *ID according to RESULT. */
3976 static void
3977 set_value_id_for_result (tree result, unsigned int *id)
3979 if (result && TREE_CODE (result) == SSA_NAME)
3980 *id = VN_INFO (result)->value_id;
3981 else if (result && is_gimple_min_invariant (result))
3982 *id = get_or_alloc_constant_value_id (result);
3983 else
3984 *id = get_next_value_id ();
3987 /* Set the value ids in the valid hash tables. */
3989 static void
3990 set_hashtable_value_ids (void)
3992 htab_iterator hi;
3993 vn_nary_op_t vno;
3994 vn_reference_t vr;
3995 vn_phi_t vp;
3997 /* Now set the value ids of the things we had put in the hash
3998 table. */
4000 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
4001 vno, vn_nary_op_t, hi)
4002 set_value_id_for_result (vno->result, &vno->value_id);
4004 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
4005 vp, vn_phi_t, hi)
4006 set_value_id_for_result (vp->result, &vp->value_id);
4008 FOR_EACH_HTAB_ELEMENT (valid_info->references,
4009 vr, vn_reference_t, hi)
4010 set_value_id_for_result (vr->result, &vr->value_id);
4013 /* Do SCCVN. Returns true if it finished, false if we bailed out
4014 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
4015 how we use the alias oracle walking during the VN process. */
4017 bool
4018 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
4020 size_t i;
4021 tree param;
4023 default_vn_walk_kind = default_vn_walk_kind_;
4025 init_scc_vn ();
4026 current_info = valid_info;
4028 for (param = DECL_ARGUMENTS (current_function_decl);
4029 param;
4030 param = DECL_CHAIN (param))
4032 tree def = ssa_default_def (cfun, param);
4033 if (def)
4034 VN_INFO (def)->valnum = def;
4037 for (i = 1; i < num_ssa_names; ++i)
4039 tree name = ssa_name (i);
4040 if (name
4041 && VN_INFO (name)->visited == false
4042 && !has_zero_uses (name))
4043 if (!DFS (name))
4045 free_scc_vn ();
4046 return false;
4050 /* Initialize the value ids. */
4052 for (i = 1; i < num_ssa_names; ++i)
4054 tree name = ssa_name (i);
4055 vn_ssa_aux_t info;
4056 if (!name)
4057 continue;
4058 info = VN_INFO (name);
4059 if (info->valnum == name
4060 || info->valnum == VN_TOP)
4061 info->value_id = get_next_value_id ();
4062 else if (is_gimple_min_invariant (info->valnum))
4063 info->value_id = get_or_alloc_constant_value_id (info->valnum);
4066 /* Propagate. */
4067 for (i = 1; i < num_ssa_names; ++i)
4069 tree name = ssa_name (i);
4070 vn_ssa_aux_t info;
4071 if (!name)
4072 continue;
4073 info = VN_INFO (name);
4074 if (TREE_CODE (info->valnum) == SSA_NAME
4075 && info->valnum != name
4076 && info->value_id != VN_INFO (info->valnum)->value_id)
4077 info->value_id = VN_INFO (info->valnum)->value_id;
4080 set_hashtable_value_ids ();
4082 if (dump_file && (dump_flags & TDF_DETAILS))
4084 fprintf (dump_file, "Value numbers:\n");
4085 for (i = 0; i < num_ssa_names; i++)
4087 tree name = ssa_name (i);
4088 if (name
4089 && VN_INFO (name)->visited
4090 && SSA_VAL (name) != name)
4092 print_generic_expr (dump_file, name, 0);
4093 fprintf (dump_file, " = ");
4094 print_generic_expr (dump_file, SSA_VAL (name), 0);
4095 fprintf (dump_file, "\n");
4100 return true;
4103 /* Return the maximum value id we have ever seen. */
4105 unsigned int
4106 get_max_value_id (void)
4108 return next_value_id;
4111 /* Return the next unique value id. */
4113 unsigned int
4114 get_next_value_id (void)
4116 return next_value_id++;
4120 /* Compare two expressions E1 and E2 and return true if they are equal. */
4122 bool
4123 expressions_equal_p (tree e1, tree e2)
4125 /* The obvious case. */
4126 if (e1 == e2)
4127 return true;
4129 /* If only one of them is null, they cannot be equal. */
4130 if (!e1 || !e2)
4131 return false;
4133 /* Now perform the actual comparison. */
4134 if (TREE_CODE (e1) == TREE_CODE (e2)
4135 && operand_equal_p (e1, e2, OEP_PURE_SAME))
4136 return true;
4138 return false;
4142 /* Return true if the nary operation NARY may trap. This is a copy
4143 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4145 bool
4146 vn_nary_may_trap (vn_nary_op_t nary)
4148 tree type;
4149 tree rhs2 = NULL_TREE;
4150 bool honor_nans = false;
4151 bool honor_snans = false;
4152 bool fp_operation = false;
4153 bool honor_trapv = false;
4154 bool handled, ret;
4155 unsigned i;
4157 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4158 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4159 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4161 type = nary->type;
4162 fp_operation = FLOAT_TYPE_P (type);
4163 if (fp_operation)
4165 honor_nans = flag_trapping_math && !flag_finite_math_only;
4166 honor_snans = flag_signaling_nans != 0;
4168 else if (INTEGRAL_TYPE_P (type)
4169 && TYPE_OVERFLOW_TRAPS (type))
4170 honor_trapv = true;
4172 if (nary->length >= 2)
4173 rhs2 = nary->op[1];
4174 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4175 honor_trapv,
4176 honor_nans, honor_snans, rhs2,
4177 &handled);
4178 if (handled
4179 && ret)
4180 return true;
4182 for (i = 0; i < nary->length; ++i)
4183 if (tree_could_trap_p (nary->op[i]))
4184 return true;
4186 return false;