* doc/extend.texi (Size of an asm): Really move node to its position.
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blob84c0d6e1feb32c03062dc4731a03d330f3eae01e
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2014 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "stor-layout.h"
27 #include "basic-block.h"
28 #include "gimple-pretty-print.h"
29 #include "tree-inline.h"
30 #include "hash-table.h"
31 #include "tree-ssa-alias.h"
32 #include "internal-fn.h"
33 #include "gimple-fold.h"
34 #include "tree-eh.h"
35 #include "gimple-expr.h"
36 #include "is-a.h"
37 #include "gimple.h"
38 #include "gimplify.h"
39 #include "gimple-ssa.h"
40 #include "tree-phinodes.h"
41 #include "ssa-iterators.h"
42 #include "stringpool.h"
43 #include "tree-ssanames.h"
44 #include "expr.h"
45 #include "tree-dfa.h"
46 #include "tree-ssa.h"
47 #include "dumpfile.h"
48 #include "alloc-pool.h"
49 #include "flags.h"
50 #include "cfgloop.h"
51 #include "params.h"
52 #include "tree-ssa-propagate.h"
53 #include "tree-ssa-sccvn.h"
54 #include "tree-cfg.h"
55 #include "domwalk.h"
57 /* This algorithm is based on the SCC algorithm presented by Keith
58 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
59 (http://citeseer.ist.psu.edu/41805.html). In
60 straight line code, it is equivalent to a regular hash based value
61 numbering that is performed in reverse postorder.
63 For code with cycles, there are two alternatives, both of which
64 require keeping the hashtables separate from the actual list of
65 value numbers for SSA names.
67 1. Iterate value numbering in an RPO walk of the blocks, removing
68 all the entries from the hashtable after each iteration (but
69 keeping the SSA name->value number mapping between iterations).
70 Iterate until it does not change.
72 2. Perform value numbering as part of an SCC walk on the SSA graph,
73 iterating only the cycles in the SSA graph until they do not change
74 (using a separate, optimistic hashtable for value numbering the SCC
75 operands).
77 The second is not just faster in practice (because most SSA graph
78 cycles do not involve all the variables in the graph), it also has
79 some nice properties.
81 One of these nice properties is that when we pop an SCC off the
82 stack, we are guaranteed to have processed all the operands coming from
83 *outside of that SCC*, so we do not need to do anything special to
84 ensure they have value numbers.
86 Another nice property is that the SCC walk is done as part of a DFS
87 of the SSA graph, which makes it easy to perform combining and
88 simplifying operations at the same time.
90 The code below is deliberately written in a way that makes it easy
91 to separate the SCC walk from the other work it does.
93 In order to propagate constants through the code, we track which
94 expressions contain constants, and use those while folding. In
95 theory, we could also track expressions whose value numbers are
96 replaced, in case we end up folding based on expression
97 identities.
99 In order to value number memory, we assign value numbers to vuses.
100 This enables us to note that, for example, stores to the same
101 address of the same value from the same starting memory states are
102 equivalent.
103 TODO:
105 1. We can iterate only the changing portions of the SCC's, but
106 I have not seen an SCC big enough for this to be a win.
107 2. If you differentiate between phi nodes for loops and phi nodes
108 for if-then-else, you can properly consider phi nodes in different
109 blocks for equivalence.
110 3. We could value number vuses in more cases, particularly, whole
111 structure copies.
115 /* vn_nary_op hashtable helpers. */
117 struct vn_nary_op_hasher : typed_noop_remove <vn_nary_op_s>
119 typedef vn_nary_op_s value_type;
120 typedef vn_nary_op_s compare_type;
121 static inline hashval_t hash (const value_type *);
122 static inline bool equal (const value_type *, const compare_type *);
125 /* Return the computed hashcode for nary operation P1. */
127 inline hashval_t
128 vn_nary_op_hasher::hash (const value_type *vno1)
130 return vno1->hashcode;
133 /* Compare nary operations P1 and P2 and return true if they are
134 equivalent. */
136 inline bool
137 vn_nary_op_hasher::equal (const value_type *vno1, const compare_type *vno2)
139 return vn_nary_op_eq (vno1, vno2);
142 typedef hash_table <vn_nary_op_hasher> vn_nary_op_table_type;
143 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
146 /* vn_phi hashtable helpers. */
148 static int
149 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
151 struct vn_phi_hasher
153 typedef vn_phi_s value_type;
154 typedef vn_phi_s compare_type;
155 static inline hashval_t hash (const value_type *);
156 static inline bool equal (const value_type *, const compare_type *);
157 static inline void remove (value_type *);
160 /* Return the computed hashcode for phi operation P1. */
162 inline hashval_t
163 vn_phi_hasher::hash (const value_type *vp1)
165 return vp1->hashcode;
168 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
170 inline bool
171 vn_phi_hasher::equal (const value_type *vp1, const compare_type *vp2)
173 return vn_phi_eq (vp1, vp2);
176 /* Free a phi operation structure VP. */
178 inline void
179 vn_phi_hasher::remove (value_type *phi)
181 phi->phiargs.release ();
184 typedef hash_table <vn_phi_hasher> vn_phi_table_type;
185 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
188 /* Compare two reference operands P1 and P2 for equality. Return true if
189 they are equal, and false otherwise. */
191 static int
192 vn_reference_op_eq (const void *p1, const void *p2)
194 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
195 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
197 return (vro1->opcode == vro2->opcode
198 /* We do not care for differences in type qualification. */
199 && (vro1->type == vro2->type
200 || (vro1->type && vro2->type
201 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
202 TYPE_MAIN_VARIANT (vro2->type))))
203 && expressions_equal_p (vro1->op0, vro2->op0)
204 && expressions_equal_p (vro1->op1, vro2->op1)
205 && expressions_equal_p (vro1->op2, vro2->op2));
208 /* Free a reference operation structure VP. */
210 static inline void
211 free_reference (vn_reference_s *vr)
213 vr->operands.release ();
217 /* vn_reference hashtable helpers. */
219 struct vn_reference_hasher
221 typedef vn_reference_s value_type;
222 typedef vn_reference_s compare_type;
223 static inline hashval_t hash (const value_type *);
224 static inline bool equal (const value_type *, const compare_type *);
225 static inline void remove (value_type *);
228 /* Return the hashcode for a given reference operation P1. */
230 inline hashval_t
231 vn_reference_hasher::hash (const value_type *vr1)
233 return vr1->hashcode;
236 inline bool
237 vn_reference_hasher::equal (const value_type *v, const compare_type *c)
239 return vn_reference_eq (v, c);
242 inline void
243 vn_reference_hasher::remove (value_type *v)
245 free_reference (v);
248 typedef hash_table <vn_reference_hasher> vn_reference_table_type;
249 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
252 /* The set of hashtables and alloc_pool's for their items. */
254 typedef struct vn_tables_s
256 vn_nary_op_table_type nary;
257 vn_phi_table_type phis;
258 vn_reference_table_type references;
259 struct obstack nary_obstack;
260 alloc_pool phis_pool;
261 alloc_pool references_pool;
262 } *vn_tables_t;
265 /* vn_constant hashtable helpers. */
267 struct vn_constant_hasher : typed_free_remove <vn_constant_s>
269 typedef vn_constant_s value_type;
270 typedef vn_constant_s compare_type;
271 static inline hashval_t hash (const value_type *);
272 static inline bool equal (const value_type *, const compare_type *);
275 /* Hash table hash function for vn_constant_t. */
277 inline hashval_t
278 vn_constant_hasher::hash (const value_type *vc1)
280 return vc1->hashcode;
283 /* Hash table equality function for vn_constant_t. */
285 inline bool
286 vn_constant_hasher::equal (const value_type *vc1, const compare_type *vc2)
288 if (vc1->hashcode != vc2->hashcode)
289 return false;
291 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
294 static hash_table <vn_constant_hasher> constant_to_value_id;
295 static bitmap constant_value_ids;
298 /* Valid hashtables storing information we have proven to be
299 correct. */
301 static vn_tables_t valid_info;
303 /* Optimistic hashtables storing information we are making assumptions about
304 during iterations. */
306 static vn_tables_t optimistic_info;
308 /* Pointer to the set of hashtables that is currently being used.
309 Should always point to either the optimistic_info, or the
310 valid_info. */
312 static vn_tables_t current_info;
315 /* Reverse post order index for each basic block. */
317 static int *rpo_numbers;
319 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
321 /* This represents the top of the VN lattice, which is the universal
322 value. */
324 tree VN_TOP;
326 /* Unique counter for our value ids. */
328 static unsigned int next_value_id;
330 /* Next DFS number and the stack for strongly connected component
331 detection. */
333 static unsigned int next_dfs_num;
334 static vec<tree> sccstack;
338 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
339 are allocated on an obstack for locality reasons, and to free them
340 without looping over the vec. */
342 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
343 static struct obstack vn_ssa_aux_obstack;
345 /* Return the value numbering information for a given SSA name. */
347 vn_ssa_aux_t
348 VN_INFO (tree name)
350 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
351 gcc_checking_assert (res);
352 return res;
355 /* Set the value numbering info for a given SSA name to a given
356 value. */
358 static inline void
359 VN_INFO_SET (tree name, vn_ssa_aux_t value)
361 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
364 /* Initialize the value numbering info for a given SSA name.
365 This should be called just once for every SSA name. */
367 vn_ssa_aux_t
368 VN_INFO_GET (tree name)
370 vn_ssa_aux_t newinfo;
372 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
373 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
374 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
375 vn_ssa_aux_table.safe_grow (SSA_NAME_VERSION (name) + 1);
376 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
377 return newinfo;
381 /* Get the representative expression for the SSA_NAME NAME. Returns
382 the representative SSA_NAME if there is no expression associated with it. */
384 tree
385 vn_get_expr_for (tree name)
387 vn_ssa_aux_t vn = VN_INFO (name);
388 gimple def_stmt;
389 tree expr = NULL_TREE;
390 enum tree_code code;
392 if (vn->valnum == VN_TOP)
393 return name;
395 /* If the value-number is a constant it is the representative
396 expression. */
397 if (TREE_CODE (vn->valnum) != SSA_NAME)
398 return vn->valnum;
400 /* Get to the information of the value of this SSA_NAME. */
401 vn = VN_INFO (vn->valnum);
403 /* If the value-number is a constant it is the representative
404 expression. */
405 if (TREE_CODE (vn->valnum) != SSA_NAME)
406 return vn->valnum;
408 /* Else if we have an expression, return it. */
409 if (vn->expr != NULL_TREE)
410 return vn->expr;
412 /* Otherwise use the defining statement to build the expression. */
413 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
415 /* If the value number is not an assignment use it directly. */
416 if (!is_gimple_assign (def_stmt))
417 return vn->valnum;
419 /* Note that we can valueize here because we clear the cached
420 simplified expressions after each optimistic iteration. */
421 code = gimple_assign_rhs_code (def_stmt);
422 switch (TREE_CODE_CLASS (code))
424 case tcc_reference:
425 if ((code == REALPART_EXPR
426 || code == IMAGPART_EXPR
427 || code == VIEW_CONVERT_EXPR)
428 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (def_stmt),
429 0)) == SSA_NAME)
430 expr = fold_build1 (code,
431 gimple_expr_type (def_stmt),
432 vn_valueize (TREE_OPERAND
433 (gimple_assign_rhs1 (def_stmt), 0)));
434 break;
436 case tcc_unary:
437 expr = fold_build1 (code,
438 gimple_expr_type (def_stmt),
439 vn_valueize (gimple_assign_rhs1 (def_stmt)));
440 break;
442 case tcc_binary:
443 expr = fold_build2 (code,
444 gimple_expr_type (def_stmt),
445 vn_valueize (gimple_assign_rhs1 (def_stmt)),
446 vn_valueize (gimple_assign_rhs2 (def_stmt)));
447 break;
449 case tcc_exceptional:
450 if (code == CONSTRUCTOR
451 && TREE_CODE
452 (TREE_TYPE (gimple_assign_rhs1 (def_stmt))) == VECTOR_TYPE)
453 expr = gimple_assign_rhs1 (def_stmt);
454 break;
456 default:;
458 if (expr == NULL_TREE)
459 return vn->valnum;
461 /* Cache the expression. */
462 vn->expr = expr;
464 return expr;
467 /* Return the vn_kind the expression computed by the stmt should be
468 associated with. */
470 enum vn_kind
471 vn_get_stmt_kind (gimple stmt)
473 switch (gimple_code (stmt))
475 case GIMPLE_CALL:
476 return VN_REFERENCE;
477 case GIMPLE_PHI:
478 return VN_PHI;
479 case GIMPLE_ASSIGN:
481 enum tree_code code = gimple_assign_rhs_code (stmt);
482 tree rhs1 = gimple_assign_rhs1 (stmt);
483 switch (get_gimple_rhs_class (code))
485 case GIMPLE_UNARY_RHS:
486 case GIMPLE_BINARY_RHS:
487 case GIMPLE_TERNARY_RHS:
488 return VN_NARY;
489 case GIMPLE_SINGLE_RHS:
490 switch (TREE_CODE_CLASS (code))
492 case tcc_reference:
493 /* VOP-less references can go through unary case. */
494 if ((code == REALPART_EXPR
495 || code == IMAGPART_EXPR
496 || code == VIEW_CONVERT_EXPR
497 || code == BIT_FIELD_REF)
498 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
499 return VN_NARY;
501 /* Fallthrough. */
502 case tcc_declaration:
503 return VN_REFERENCE;
505 case tcc_constant:
506 return VN_CONSTANT;
508 default:
509 if (code == ADDR_EXPR)
510 return (is_gimple_min_invariant (rhs1)
511 ? VN_CONSTANT : VN_REFERENCE);
512 else if (code == CONSTRUCTOR)
513 return VN_NARY;
514 return VN_NONE;
516 default:
517 return VN_NONE;
520 default:
521 return VN_NONE;
525 /* Lookup a value id for CONSTANT and return it. If it does not
526 exist returns 0. */
528 unsigned int
529 get_constant_value_id (tree constant)
531 vn_constant_s **slot;
532 struct vn_constant_s vc;
534 vc.hashcode = vn_hash_constant_with_type (constant);
535 vc.constant = constant;
536 slot = constant_to_value_id.find_slot_with_hash (&vc, vc.hashcode, NO_INSERT);
537 if (slot)
538 return (*slot)->value_id;
539 return 0;
542 /* Lookup a value id for CONSTANT, and if it does not exist, create a
543 new one and return it. If it does exist, return it. */
545 unsigned int
546 get_or_alloc_constant_value_id (tree constant)
548 vn_constant_s **slot;
549 struct vn_constant_s vc;
550 vn_constant_t vcp;
552 vc.hashcode = vn_hash_constant_with_type (constant);
553 vc.constant = constant;
554 slot = constant_to_value_id.find_slot_with_hash (&vc, vc.hashcode, INSERT);
555 if (*slot)
556 return (*slot)->value_id;
558 vcp = XNEW (struct vn_constant_s);
559 vcp->hashcode = vc.hashcode;
560 vcp->constant = constant;
561 vcp->value_id = get_next_value_id ();
562 *slot = vcp;
563 bitmap_set_bit (constant_value_ids, vcp->value_id);
564 return vcp->value_id;
567 /* Return true if V is a value id for a constant. */
569 bool
570 value_id_constant_p (unsigned int v)
572 return bitmap_bit_p (constant_value_ids, v);
575 /* Compute the hash for a reference operand VRO1. */
577 static hashval_t
578 vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result)
580 result = iterative_hash_hashval_t (vro1->opcode, result);
581 if (vro1->op0)
582 result = iterative_hash_expr (vro1->op0, result);
583 if (vro1->op1)
584 result = iterative_hash_expr (vro1->op1, result);
585 if (vro1->op2)
586 result = iterative_hash_expr (vro1->op2, result);
587 return result;
590 /* Compute a hash for the reference operation VR1 and return it. */
592 hashval_t
593 vn_reference_compute_hash (const vn_reference_t vr1)
595 hashval_t result = 0;
596 int i;
597 vn_reference_op_t vro;
598 HOST_WIDE_INT off = -1;
599 bool deref = false;
601 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
603 if (vro->opcode == MEM_REF)
604 deref = true;
605 else if (vro->opcode != ADDR_EXPR)
606 deref = false;
607 if (vro->off != -1)
609 if (off == -1)
610 off = 0;
611 off += vro->off;
613 else
615 if (off != -1
616 && off != 0)
617 result = iterative_hash_hashval_t (off, result);
618 off = -1;
619 if (deref
620 && vro->opcode == ADDR_EXPR)
622 if (vro->op0)
624 tree op = TREE_OPERAND (vro->op0, 0);
625 result = iterative_hash_hashval_t (TREE_CODE (op), result);
626 result = iterative_hash_expr (op, result);
629 else
630 result = vn_reference_op_compute_hash (vro, result);
633 if (vr1->vuse)
634 result += SSA_NAME_VERSION (vr1->vuse);
636 return result;
639 /* Return true if reference operations VR1 and VR2 are equivalent. This
640 means they have the same set of operands and vuses. */
642 bool
643 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
645 unsigned i, j;
647 if (vr1->hashcode != vr2->hashcode)
648 return false;
650 /* Early out if this is not a hash collision. */
651 if (vr1->hashcode != vr2->hashcode)
652 return false;
654 /* The VOP needs to be the same. */
655 if (vr1->vuse != vr2->vuse)
656 return false;
658 /* If the operands are the same we are done. */
659 if (vr1->operands == vr2->operands)
660 return true;
662 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
663 return false;
665 if (INTEGRAL_TYPE_P (vr1->type)
666 && INTEGRAL_TYPE_P (vr2->type))
668 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
669 return false;
671 else if (INTEGRAL_TYPE_P (vr1->type)
672 && (TYPE_PRECISION (vr1->type)
673 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
674 return false;
675 else if (INTEGRAL_TYPE_P (vr2->type)
676 && (TYPE_PRECISION (vr2->type)
677 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
678 return false;
680 i = 0;
681 j = 0;
684 HOST_WIDE_INT off1 = 0, off2 = 0;
685 vn_reference_op_t vro1, vro2;
686 vn_reference_op_s tem1, tem2;
687 bool deref1 = false, deref2 = false;
688 for (; vr1->operands.iterate (i, &vro1); i++)
690 if (vro1->opcode == MEM_REF)
691 deref1 = true;
692 if (vro1->off == -1)
693 break;
694 off1 += vro1->off;
696 for (; vr2->operands.iterate (j, &vro2); j++)
698 if (vro2->opcode == MEM_REF)
699 deref2 = true;
700 if (vro2->off == -1)
701 break;
702 off2 += vro2->off;
704 if (off1 != off2)
705 return false;
706 if (deref1 && vro1->opcode == ADDR_EXPR)
708 memset (&tem1, 0, sizeof (tem1));
709 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
710 tem1.type = TREE_TYPE (tem1.op0);
711 tem1.opcode = TREE_CODE (tem1.op0);
712 vro1 = &tem1;
713 deref1 = false;
715 if (deref2 && vro2->opcode == ADDR_EXPR)
717 memset (&tem2, 0, sizeof (tem2));
718 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
719 tem2.type = TREE_TYPE (tem2.op0);
720 tem2.opcode = TREE_CODE (tem2.op0);
721 vro2 = &tem2;
722 deref2 = false;
724 if (deref1 != deref2)
725 return false;
726 if (!vn_reference_op_eq (vro1, vro2))
727 return false;
728 ++j;
729 ++i;
731 while (vr1->operands.length () != i
732 || vr2->operands.length () != j);
734 return true;
737 /* Copy the operations present in load/store REF into RESULT, a vector of
738 vn_reference_op_s's. */
740 void
741 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
743 if (TREE_CODE (ref) == TARGET_MEM_REF)
745 vn_reference_op_s temp;
747 result->reserve (3);
749 memset (&temp, 0, sizeof (temp));
750 temp.type = TREE_TYPE (ref);
751 temp.opcode = TREE_CODE (ref);
752 temp.op0 = TMR_INDEX (ref);
753 temp.op1 = TMR_STEP (ref);
754 temp.op2 = TMR_OFFSET (ref);
755 temp.off = -1;
756 result->quick_push (temp);
758 memset (&temp, 0, sizeof (temp));
759 temp.type = NULL_TREE;
760 temp.opcode = ERROR_MARK;
761 temp.op0 = TMR_INDEX2 (ref);
762 temp.off = -1;
763 result->quick_push (temp);
765 memset (&temp, 0, sizeof (temp));
766 temp.type = NULL_TREE;
767 temp.opcode = TREE_CODE (TMR_BASE (ref));
768 temp.op0 = TMR_BASE (ref);
769 temp.off = -1;
770 result->quick_push (temp);
771 return;
774 /* For non-calls, store the information that makes up the address. */
775 tree orig = ref;
776 while (ref)
778 vn_reference_op_s temp;
780 memset (&temp, 0, sizeof (temp));
781 temp.type = TREE_TYPE (ref);
782 temp.opcode = TREE_CODE (ref);
783 temp.off = -1;
785 switch (temp.opcode)
787 case MODIFY_EXPR:
788 temp.op0 = TREE_OPERAND (ref, 1);
789 break;
790 case WITH_SIZE_EXPR:
791 temp.op0 = TREE_OPERAND (ref, 1);
792 temp.off = 0;
793 break;
794 case MEM_REF:
795 /* The base address gets its own vn_reference_op_s structure. */
796 temp.op0 = TREE_OPERAND (ref, 1);
797 if (tree_fits_shwi_p (TREE_OPERAND (ref, 1)))
798 temp.off = tree_to_shwi (TREE_OPERAND (ref, 1));
799 break;
800 case BIT_FIELD_REF:
801 /* Record bits and position. */
802 temp.op0 = TREE_OPERAND (ref, 1);
803 temp.op1 = TREE_OPERAND (ref, 2);
804 break;
805 case COMPONENT_REF:
806 /* The field decl is enough to unambiguously specify the field,
807 a matching type is not necessary and a mismatching type
808 is always a spurious difference. */
809 temp.type = NULL_TREE;
810 temp.op0 = TREE_OPERAND (ref, 1);
811 temp.op1 = TREE_OPERAND (ref, 2);
813 tree this_offset = component_ref_field_offset (ref);
814 if (this_offset
815 && TREE_CODE (this_offset) == INTEGER_CST)
817 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
818 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
820 offset_int off
821 = (wi::to_offset (this_offset)
822 + wi::lrshift (wi::to_offset (bit_offset),
823 LOG2_BITS_PER_UNIT));
824 if (wi::fits_shwi_p (off)
825 /* Probibit value-numbering zero offset components
826 of addresses the same before the pass folding
827 __builtin_object_size had a chance to run
828 (checking cfun->after_inlining does the
829 trick here). */
830 && (TREE_CODE (orig) != ADDR_EXPR
831 || off != 0
832 || cfun->after_inlining))
833 temp.off = off.to_shwi ();
837 break;
838 case ARRAY_RANGE_REF:
839 case ARRAY_REF:
840 /* Record index as operand. */
841 temp.op0 = TREE_OPERAND (ref, 1);
842 /* Always record lower bounds and element size. */
843 temp.op1 = array_ref_low_bound (ref);
844 temp.op2 = array_ref_element_size (ref);
845 if (TREE_CODE (temp.op0) == INTEGER_CST
846 && TREE_CODE (temp.op1) == INTEGER_CST
847 && TREE_CODE (temp.op2) == INTEGER_CST)
849 offset_int off = ((wi::to_offset (temp.op0)
850 - wi::to_offset (temp.op1))
851 * wi::to_offset (temp.op2));
852 if (wi::fits_shwi_p (off))
853 temp.off = off.to_shwi();
855 break;
856 case VAR_DECL:
857 if (DECL_HARD_REGISTER (ref))
859 temp.op0 = ref;
860 break;
862 /* Fallthru. */
863 case PARM_DECL:
864 case CONST_DECL:
865 case RESULT_DECL:
866 /* Canonicalize decls to MEM[&decl] which is what we end up with
867 when valueizing MEM[ptr] with ptr = &decl. */
868 temp.opcode = MEM_REF;
869 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
870 temp.off = 0;
871 result->safe_push (temp);
872 temp.opcode = ADDR_EXPR;
873 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
874 temp.type = TREE_TYPE (temp.op0);
875 temp.off = -1;
876 break;
877 case STRING_CST:
878 case INTEGER_CST:
879 case COMPLEX_CST:
880 case VECTOR_CST:
881 case REAL_CST:
882 case FIXED_CST:
883 case CONSTRUCTOR:
884 case SSA_NAME:
885 temp.op0 = ref;
886 break;
887 case ADDR_EXPR:
888 if (is_gimple_min_invariant (ref))
890 temp.op0 = ref;
891 break;
893 /* Fallthrough. */
894 /* These are only interesting for their operands, their
895 existence, and their type. They will never be the last
896 ref in the chain of references (IE they require an
897 operand), so we don't have to put anything
898 for op* as it will be handled by the iteration */
899 case REALPART_EXPR:
900 case VIEW_CONVERT_EXPR:
901 temp.off = 0;
902 break;
903 case IMAGPART_EXPR:
904 /* This is only interesting for its constant offset. */
905 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
906 break;
907 default:
908 gcc_unreachable ();
910 result->safe_push (temp);
912 if (REFERENCE_CLASS_P (ref)
913 || TREE_CODE (ref) == MODIFY_EXPR
914 || TREE_CODE (ref) == WITH_SIZE_EXPR
915 || (TREE_CODE (ref) == ADDR_EXPR
916 && !is_gimple_min_invariant (ref)))
917 ref = TREE_OPERAND (ref, 0);
918 else
919 ref = NULL_TREE;
923 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
924 operands in *OPS, the reference alias set SET and the reference type TYPE.
925 Return true if something useful was produced. */
927 bool
928 ao_ref_init_from_vn_reference (ao_ref *ref,
929 alias_set_type set, tree type,
930 vec<vn_reference_op_s> ops)
932 vn_reference_op_t op;
933 unsigned i;
934 tree base = NULL_TREE;
935 tree *op0_p = &base;
936 HOST_WIDE_INT offset = 0;
937 HOST_WIDE_INT max_size;
938 HOST_WIDE_INT size = -1;
939 tree size_tree = NULL_TREE;
940 alias_set_type base_alias_set = -1;
942 /* First get the final access size from just the outermost expression. */
943 op = &ops[0];
944 if (op->opcode == COMPONENT_REF)
945 size_tree = DECL_SIZE (op->op0);
946 else if (op->opcode == BIT_FIELD_REF)
947 size_tree = op->op0;
948 else
950 enum machine_mode mode = TYPE_MODE (type);
951 if (mode == BLKmode)
952 size_tree = TYPE_SIZE (type);
953 else
954 size = GET_MODE_BITSIZE (mode);
956 if (size_tree != NULL_TREE)
958 if (!tree_fits_uhwi_p (size_tree))
959 size = -1;
960 else
961 size = tree_to_uhwi (size_tree);
964 /* Initially, maxsize is the same as the accessed element size.
965 In the following it will only grow (or become -1). */
966 max_size = size;
968 /* Compute cumulative bit-offset for nested component-refs and array-refs,
969 and find the ultimate containing object. */
970 FOR_EACH_VEC_ELT (ops, i, op)
972 switch (op->opcode)
974 /* These may be in the reference ops, but we cannot do anything
975 sensible with them here. */
976 case ADDR_EXPR:
977 /* Apart from ADDR_EXPR arguments to MEM_REF. */
978 if (base != NULL_TREE
979 && TREE_CODE (base) == MEM_REF
980 && op->op0
981 && DECL_P (TREE_OPERAND (op->op0, 0)))
983 vn_reference_op_t pop = &ops[i-1];
984 base = TREE_OPERAND (op->op0, 0);
985 if (pop->off == -1)
987 max_size = -1;
988 offset = 0;
990 else
991 offset += pop->off * BITS_PER_UNIT;
992 op0_p = NULL;
993 break;
995 /* Fallthru. */
996 case CALL_EXPR:
997 return false;
999 /* Record the base objects. */
1000 case MEM_REF:
1001 base_alias_set = get_deref_alias_set (op->op0);
1002 *op0_p = build2 (MEM_REF, op->type,
1003 NULL_TREE, op->op0);
1004 op0_p = &TREE_OPERAND (*op0_p, 0);
1005 break;
1007 case VAR_DECL:
1008 case PARM_DECL:
1009 case RESULT_DECL:
1010 case SSA_NAME:
1011 *op0_p = op->op0;
1012 op0_p = NULL;
1013 break;
1015 /* And now the usual component-reference style ops. */
1016 case BIT_FIELD_REF:
1017 offset += tree_to_shwi (op->op1);
1018 break;
1020 case COMPONENT_REF:
1022 tree field = op->op0;
1023 /* We do not have a complete COMPONENT_REF tree here so we
1024 cannot use component_ref_field_offset. Do the interesting
1025 parts manually. */
1027 if (op->op1
1028 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (field)))
1029 max_size = -1;
1030 else
1032 offset += (tree_to_uhwi (DECL_FIELD_OFFSET (field))
1033 * BITS_PER_UNIT);
1034 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1036 break;
1039 case ARRAY_RANGE_REF:
1040 case ARRAY_REF:
1041 /* We recorded the lower bound and the element size. */
1042 if (!tree_fits_shwi_p (op->op0)
1043 || !tree_fits_shwi_p (op->op1)
1044 || !tree_fits_shwi_p (op->op2))
1045 max_size = -1;
1046 else
1048 HOST_WIDE_INT hindex = tree_to_shwi (op->op0);
1049 hindex -= tree_to_shwi (op->op1);
1050 hindex *= tree_to_shwi (op->op2);
1051 hindex *= BITS_PER_UNIT;
1052 offset += hindex;
1054 break;
1056 case REALPART_EXPR:
1057 break;
1059 case IMAGPART_EXPR:
1060 offset += size;
1061 break;
1063 case VIEW_CONVERT_EXPR:
1064 break;
1066 case STRING_CST:
1067 case INTEGER_CST:
1068 case COMPLEX_CST:
1069 case VECTOR_CST:
1070 case REAL_CST:
1071 case CONSTRUCTOR:
1072 case CONST_DECL:
1073 return false;
1075 default:
1076 return false;
1080 if (base == NULL_TREE)
1081 return false;
1083 ref->ref = NULL_TREE;
1084 ref->base = base;
1085 ref->offset = offset;
1086 ref->size = size;
1087 ref->max_size = max_size;
1088 ref->ref_alias_set = set;
1089 if (base_alias_set != -1)
1090 ref->base_alias_set = base_alias_set;
1091 else
1092 ref->base_alias_set = get_alias_set (base);
1093 /* We discount volatiles from value-numbering elsewhere. */
1094 ref->volatile_p = false;
1096 return true;
1099 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1100 vn_reference_op_s's. */
1102 void
1103 copy_reference_ops_from_call (gimple call,
1104 vec<vn_reference_op_s> *result)
1106 vn_reference_op_s temp;
1107 unsigned i;
1108 tree lhs = gimple_call_lhs (call);
1110 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1111 different. By adding the lhs here in the vector, we ensure that the
1112 hashcode is different, guaranteeing a different value number. */
1113 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1115 memset (&temp, 0, sizeof (temp));
1116 temp.opcode = MODIFY_EXPR;
1117 temp.type = TREE_TYPE (lhs);
1118 temp.op0 = lhs;
1119 temp.off = -1;
1120 result->safe_push (temp);
1123 /* Copy the type, opcode, function being called and static chain. */
1124 memset (&temp, 0, sizeof (temp));
1125 temp.type = gimple_call_return_type (call);
1126 temp.opcode = CALL_EXPR;
1127 temp.op0 = gimple_call_fn (call);
1128 temp.op1 = gimple_call_chain (call);
1129 temp.off = -1;
1130 result->safe_push (temp);
1132 /* Copy the call arguments. As they can be references as well,
1133 just chain them together. */
1134 for (i = 0; i < gimple_call_num_args (call); ++i)
1136 tree callarg = gimple_call_arg (call, i);
1137 copy_reference_ops_from_ref (callarg, result);
1141 /* Create a vector of vn_reference_op_s structures from CALL, a
1142 call statement. The vector is not shared. */
1144 static vec<vn_reference_op_s>
1145 create_reference_ops_from_call (gimple call)
1147 vec<vn_reference_op_s> result = vNULL;
1149 copy_reference_ops_from_call (call, &result);
1150 return result;
1153 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1154 *I_P to point to the last element of the replacement. */
1155 void
1156 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1157 unsigned int *i_p)
1159 unsigned int i = *i_p;
1160 vn_reference_op_t op = &(*ops)[i];
1161 vn_reference_op_t mem_op = &(*ops)[i - 1];
1162 tree addr_base;
1163 HOST_WIDE_INT addr_offset = 0;
1165 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1166 from .foo.bar to the preceding MEM_REF offset and replace the
1167 address with &OBJ. */
1168 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1169 &addr_offset);
1170 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1171 if (addr_base != TREE_OPERAND (op->op0, 0))
1173 offset_int off = offset_int::from (mem_op->op0, SIGNED);
1174 off += addr_offset;
1175 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1176 op->op0 = build_fold_addr_expr (addr_base);
1177 if (tree_fits_shwi_p (mem_op->op0))
1178 mem_op->off = tree_to_shwi (mem_op->op0);
1179 else
1180 mem_op->off = -1;
1184 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1185 *I_P to point to the last element of the replacement. */
1186 static void
1187 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1188 unsigned int *i_p)
1190 unsigned int i = *i_p;
1191 vn_reference_op_t op = &(*ops)[i];
1192 vn_reference_op_t mem_op = &(*ops)[i - 1];
1193 gimple def_stmt;
1194 enum tree_code code;
1195 offset_int off;
1197 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1198 if (!is_gimple_assign (def_stmt))
1199 return;
1201 code = gimple_assign_rhs_code (def_stmt);
1202 if (code != ADDR_EXPR
1203 && code != POINTER_PLUS_EXPR)
1204 return;
1206 off = offset_int::from (mem_op->op0, SIGNED);
1208 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1209 from .foo.bar to the preceding MEM_REF offset and replace the
1210 address with &OBJ. */
1211 if (code == ADDR_EXPR)
1213 tree addr, addr_base;
1214 HOST_WIDE_INT addr_offset;
1216 addr = gimple_assign_rhs1 (def_stmt);
1217 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1218 &addr_offset);
1219 if (!addr_base
1220 || TREE_CODE (addr_base) != MEM_REF)
1221 return;
1223 off += addr_offset;
1224 off += mem_ref_offset (addr_base);
1225 op->op0 = TREE_OPERAND (addr_base, 0);
1227 else
1229 tree ptr, ptroff;
1230 ptr = gimple_assign_rhs1 (def_stmt);
1231 ptroff = gimple_assign_rhs2 (def_stmt);
1232 if (TREE_CODE (ptr) != SSA_NAME
1233 || TREE_CODE (ptroff) != INTEGER_CST)
1234 return;
1236 off += wi::to_offset (ptroff);
1237 op->op0 = ptr;
1240 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1241 if (tree_fits_shwi_p (mem_op->op0))
1242 mem_op->off = tree_to_shwi (mem_op->op0);
1243 else
1244 mem_op->off = -1;
1245 if (TREE_CODE (op->op0) == SSA_NAME)
1246 op->op0 = SSA_VAL (op->op0);
1247 if (TREE_CODE (op->op0) != SSA_NAME)
1248 op->opcode = TREE_CODE (op->op0);
1250 /* And recurse. */
1251 if (TREE_CODE (op->op0) == SSA_NAME)
1252 vn_reference_maybe_forwprop_address (ops, i_p);
1253 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1254 vn_reference_fold_indirect (ops, i_p);
1257 /* Optimize the reference REF to a constant if possible or return
1258 NULL_TREE if not. */
1260 tree
1261 fully_constant_vn_reference_p (vn_reference_t ref)
1263 vec<vn_reference_op_s> operands = ref->operands;
1264 vn_reference_op_t op;
1266 /* Try to simplify the translated expression if it is
1267 a call to a builtin function with at most two arguments. */
1268 op = &operands[0];
1269 if (op->opcode == CALL_EXPR
1270 && TREE_CODE (op->op0) == ADDR_EXPR
1271 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1272 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1273 && operands.length () >= 2
1274 && operands.length () <= 3)
1276 vn_reference_op_t arg0, arg1 = NULL;
1277 bool anyconst = false;
1278 arg0 = &operands[1];
1279 if (operands.length () > 2)
1280 arg1 = &operands[2];
1281 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1282 || (arg0->opcode == ADDR_EXPR
1283 && is_gimple_min_invariant (arg0->op0)))
1284 anyconst = true;
1285 if (arg1
1286 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1287 || (arg1->opcode == ADDR_EXPR
1288 && is_gimple_min_invariant (arg1->op0))))
1289 anyconst = true;
1290 if (anyconst)
1292 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1293 arg1 ? 2 : 1,
1294 arg0->op0,
1295 arg1 ? arg1->op0 : NULL);
1296 if (folded
1297 && TREE_CODE (folded) == NOP_EXPR)
1298 folded = TREE_OPERAND (folded, 0);
1299 if (folded
1300 && is_gimple_min_invariant (folded))
1301 return folded;
1305 /* Simplify reads from constant strings. */
1306 else if (op->opcode == ARRAY_REF
1307 && TREE_CODE (op->op0) == INTEGER_CST
1308 && integer_zerop (op->op1)
1309 && operands.length () == 2)
1311 vn_reference_op_t arg0;
1312 arg0 = &operands[1];
1313 if (arg0->opcode == STRING_CST
1314 && (TYPE_MODE (op->type)
1315 == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
1316 && GET_MODE_CLASS (TYPE_MODE (op->type)) == MODE_INT
1317 && GET_MODE_SIZE (TYPE_MODE (op->type)) == 1
1318 && tree_int_cst_sgn (op->op0) >= 0
1319 && compare_tree_int (op->op0, TREE_STRING_LENGTH (arg0->op0)) < 0)
1320 return build_int_cst_type (op->type,
1321 (TREE_STRING_POINTER (arg0->op0)
1322 [TREE_INT_CST_LOW (op->op0)]));
1325 return NULL_TREE;
1328 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1329 structures into their value numbers. This is done in-place, and
1330 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1331 whether any operands were valueized. */
1333 static vec<vn_reference_op_s>
1334 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1336 vn_reference_op_t vro;
1337 unsigned int i;
1339 *valueized_anything = false;
1341 FOR_EACH_VEC_ELT (orig, i, vro)
1343 if (vro->opcode == SSA_NAME
1344 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1346 tree tem = SSA_VAL (vro->op0);
1347 if (tem != vro->op0)
1349 *valueized_anything = true;
1350 vro->op0 = tem;
1352 /* If it transforms from an SSA_NAME to a constant, update
1353 the opcode. */
1354 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1355 vro->opcode = TREE_CODE (vro->op0);
1357 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1359 tree tem = SSA_VAL (vro->op1);
1360 if (tem != vro->op1)
1362 *valueized_anything = true;
1363 vro->op1 = tem;
1366 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1368 tree tem = SSA_VAL (vro->op2);
1369 if (tem != vro->op2)
1371 *valueized_anything = true;
1372 vro->op2 = tem;
1375 /* If it transforms from an SSA_NAME to an address, fold with
1376 a preceding indirect reference. */
1377 if (i > 0
1378 && vro->op0
1379 && TREE_CODE (vro->op0) == ADDR_EXPR
1380 && orig[i - 1].opcode == MEM_REF)
1381 vn_reference_fold_indirect (&orig, &i);
1382 else if (i > 0
1383 && vro->opcode == SSA_NAME
1384 && orig[i - 1].opcode == MEM_REF)
1385 vn_reference_maybe_forwprop_address (&orig, &i);
1386 /* If it transforms a non-constant ARRAY_REF into a constant
1387 one, adjust the constant offset. */
1388 else if (vro->opcode == ARRAY_REF
1389 && vro->off == -1
1390 && TREE_CODE (vro->op0) == INTEGER_CST
1391 && TREE_CODE (vro->op1) == INTEGER_CST
1392 && TREE_CODE (vro->op2) == INTEGER_CST)
1394 offset_int off = ((wi::to_offset (vro->op0)
1395 - wi::to_offset (vro->op1))
1396 * wi::to_offset (vro->op2));
1397 if (wi::fits_shwi_p (off))
1398 vro->off = off.to_shwi ();
1402 return orig;
1405 static vec<vn_reference_op_s>
1406 valueize_refs (vec<vn_reference_op_s> orig)
1408 bool tem;
1409 return valueize_refs_1 (orig, &tem);
1412 static vec<vn_reference_op_s> shared_lookup_references;
1414 /* Create a vector of vn_reference_op_s structures from REF, a
1415 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1416 this function. *VALUEIZED_ANYTHING will specify whether any
1417 operands were valueized. */
1419 static vec<vn_reference_op_s>
1420 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1422 if (!ref)
1423 return vNULL;
1424 shared_lookup_references.truncate (0);
1425 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1426 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1427 valueized_anything);
1428 return shared_lookup_references;
1431 /* Create a vector of vn_reference_op_s structures from CALL, a
1432 call statement. The vector is shared among all callers of
1433 this function. */
1435 static vec<vn_reference_op_s>
1436 valueize_shared_reference_ops_from_call (gimple call)
1438 if (!call)
1439 return vNULL;
1440 shared_lookup_references.truncate (0);
1441 copy_reference_ops_from_call (call, &shared_lookup_references);
1442 shared_lookup_references = valueize_refs (shared_lookup_references);
1443 return shared_lookup_references;
1446 /* Lookup a SCCVN reference operation VR in the current hash table.
1447 Returns the resulting value number if it exists in the hash table,
1448 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1449 vn_reference_t stored in the hashtable if something is found. */
1451 static tree
1452 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1454 vn_reference_s **slot;
1455 hashval_t hash;
1457 hash = vr->hashcode;
1458 slot = current_info->references.find_slot_with_hash (vr, hash, NO_INSERT);
1459 if (!slot && current_info == optimistic_info)
1460 slot = valid_info->references.find_slot_with_hash (vr, hash, NO_INSERT);
1461 if (slot)
1463 if (vnresult)
1464 *vnresult = (vn_reference_t)*slot;
1465 return ((vn_reference_t)*slot)->result;
1468 return NULL_TREE;
1471 static tree *last_vuse_ptr;
1472 static vn_lookup_kind vn_walk_kind;
1473 static vn_lookup_kind default_vn_walk_kind;
1475 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1476 with the current VUSE and performs the expression lookup. */
1478 static void *
1479 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1480 unsigned int cnt, void *vr_)
1482 vn_reference_t vr = (vn_reference_t)vr_;
1483 vn_reference_s **slot;
1484 hashval_t hash;
1486 /* This bounds the stmt walks we perform on reference lookups
1487 to O(1) instead of O(N) where N is the number of dominating
1488 stores. */
1489 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1490 return (void *)-1;
1492 if (last_vuse_ptr)
1493 *last_vuse_ptr = vuse;
1495 /* Fixup vuse and hash. */
1496 if (vr->vuse)
1497 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1498 vr->vuse = SSA_VAL (vuse);
1499 if (vr->vuse)
1500 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1502 hash = vr->hashcode;
1503 slot = current_info->references.find_slot_with_hash (vr, hash, NO_INSERT);
1504 if (!slot && current_info == optimistic_info)
1505 slot = valid_info->references.find_slot_with_hash (vr, hash, NO_INSERT);
1506 if (slot)
1507 return *slot;
1509 return NULL;
1512 /* Lookup an existing or insert a new vn_reference entry into the
1513 value table for the VUSE, SET, TYPE, OPERANDS reference which
1514 has the value VALUE which is either a constant or an SSA name. */
1516 static vn_reference_t
1517 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1518 alias_set_type set,
1519 tree type,
1520 vec<vn_reference_op_s,
1521 va_heap> operands,
1522 tree value)
1524 struct vn_reference_s vr1;
1525 vn_reference_t result;
1526 unsigned value_id;
1527 vr1.vuse = vuse;
1528 vr1.operands = operands;
1529 vr1.type = type;
1530 vr1.set = set;
1531 vr1.hashcode = vn_reference_compute_hash (&vr1);
1532 if (vn_reference_lookup_1 (&vr1, &result))
1533 return result;
1534 if (TREE_CODE (value) == SSA_NAME)
1535 value_id = VN_INFO (value)->value_id;
1536 else
1537 value_id = get_or_alloc_constant_value_id (value);
1538 return vn_reference_insert_pieces (vuse, set, type,
1539 operands.copy (), value, value_id);
1542 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1543 from the statement defining VUSE and if not successful tries to
1544 translate *REFP and VR_ through an aggregate copy at the definition
1545 of VUSE. */
1547 static void *
1548 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1549 bool disambiguate_only)
1551 vn_reference_t vr = (vn_reference_t)vr_;
1552 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1553 tree base;
1554 HOST_WIDE_INT offset, maxsize;
1555 static vec<vn_reference_op_s>
1556 lhs_ops = vNULL;
1557 ao_ref lhs_ref;
1558 bool lhs_ref_ok = false;
1560 /* First try to disambiguate after value-replacing in the definitions LHS. */
1561 if (is_gimple_assign (def_stmt))
1563 vec<vn_reference_op_s> tem;
1564 tree lhs = gimple_assign_lhs (def_stmt);
1565 bool valueized_anything = false;
1566 /* Avoid re-allocation overhead. */
1567 lhs_ops.truncate (0);
1568 copy_reference_ops_from_ref (lhs, &lhs_ops);
1569 tem = lhs_ops;
1570 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1571 gcc_assert (lhs_ops == tem);
1572 if (valueized_anything)
1574 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1575 get_alias_set (lhs),
1576 TREE_TYPE (lhs), lhs_ops);
1577 if (lhs_ref_ok
1578 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1579 return NULL;
1581 else
1583 ao_ref_init (&lhs_ref, lhs);
1584 lhs_ref_ok = true;
1587 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1588 && gimple_call_num_args (def_stmt) <= 4)
1590 /* For builtin calls valueize its arguments and call the
1591 alias oracle again. Valueization may improve points-to
1592 info of pointers and constify size and position arguments.
1593 Originally this was motivated by PR61034 which has
1594 conditional calls to free falsely clobbering ref because
1595 of imprecise points-to info of the argument. */
1596 tree oldargs[4];
1597 bool valueized_anything;
1598 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1600 oldargs[i] = gimple_call_arg (def_stmt, i);
1601 if (TREE_CODE (oldargs[i]) == SSA_NAME
1602 && VN_INFO (oldargs[i])->valnum != oldargs[i])
1604 gimple_call_set_arg (def_stmt, i, VN_INFO (oldargs[i])->valnum);
1605 valueized_anything = true;
1608 if (valueized_anything)
1610 bool res = call_may_clobber_ref_p_1 (def_stmt, ref);
1611 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1612 gimple_call_set_arg (def_stmt, i, oldargs[i]);
1613 if (!res)
1614 return NULL;
1618 if (disambiguate_only)
1619 return (void *)-1;
1621 base = ao_ref_base (ref);
1622 offset = ref->offset;
1623 maxsize = ref->max_size;
1625 /* If we cannot constrain the size of the reference we cannot
1626 test if anything kills it. */
1627 if (maxsize == -1)
1628 return (void *)-1;
1630 /* We can't deduce anything useful from clobbers. */
1631 if (gimple_clobber_p (def_stmt))
1632 return (void *)-1;
1634 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1635 from that definition.
1636 1) Memset. */
1637 if (is_gimple_reg_type (vr->type)
1638 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1639 && integer_zerop (gimple_call_arg (def_stmt, 1))
1640 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2))
1641 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1643 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1644 tree base2;
1645 HOST_WIDE_INT offset2, size2, maxsize2;
1646 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1647 size2 = tree_to_uhwi (gimple_call_arg (def_stmt, 2)) * 8;
1648 if ((unsigned HOST_WIDE_INT)size2 / 8
1649 == tree_to_uhwi (gimple_call_arg (def_stmt, 2))
1650 && maxsize2 != -1
1651 && operand_equal_p (base, base2, 0)
1652 && offset2 <= offset
1653 && offset2 + size2 >= offset + maxsize)
1655 tree val = build_zero_cst (vr->type);
1656 return vn_reference_lookup_or_insert_for_pieces
1657 (vuse, vr->set, vr->type, vr->operands, val);
1661 /* 2) Assignment from an empty CONSTRUCTOR. */
1662 else if (is_gimple_reg_type (vr->type)
1663 && gimple_assign_single_p (def_stmt)
1664 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1665 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1667 tree base2;
1668 HOST_WIDE_INT offset2, size2, maxsize2;
1669 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1670 &offset2, &size2, &maxsize2);
1671 if (maxsize2 != -1
1672 && operand_equal_p (base, base2, 0)
1673 && offset2 <= offset
1674 && offset2 + size2 >= offset + maxsize)
1676 tree val = build_zero_cst (vr->type);
1677 return vn_reference_lookup_or_insert_for_pieces
1678 (vuse, vr->set, vr->type, vr->operands, val);
1682 /* 3) Assignment from a constant. We can use folds native encode/interpret
1683 routines to extract the assigned bits. */
1684 else if (vn_walk_kind == VN_WALKREWRITE
1685 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1686 && ref->size == maxsize
1687 && maxsize % BITS_PER_UNIT == 0
1688 && offset % BITS_PER_UNIT == 0
1689 && is_gimple_reg_type (vr->type)
1690 && gimple_assign_single_p (def_stmt)
1691 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
1693 tree base2;
1694 HOST_WIDE_INT offset2, size2, maxsize2;
1695 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1696 &offset2, &size2, &maxsize2);
1697 if (maxsize2 != -1
1698 && maxsize2 == size2
1699 && size2 % BITS_PER_UNIT == 0
1700 && offset2 % BITS_PER_UNIT == 0
1701 && operand_equal_p (base, base2, 0)
1702 && offset2 <= offset
1703 && offset2 + size2 >= offset + maxsize)
1705 /* We support up to 512-bit values (for V8DFmode). */
1706 unsigned char buffer[64];
1707 int len;
1709 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1710 buffer, sizeof (buffer));
1711 if (len > 0)
1713 tree val = native_interpret_expr (vr->type,
1714 buffer
1715 + ((offset - offset2)
1716 / BITS_PER_UNIT),
1717 ref->size / BITS_PER_UNIT);
1718 if (val)
1719 return vn_reference_lookup_or_insert_for_pieces
1720 (vuse, vr->set, vr->type, vr->operands, val);
1725 /* 4) Assignment from an SSA name which definition we may be able
1726 to access pieces from. */
1727 else if (ref->size == maxsize
1728 && is_gimple_reg_type (vr->type)
1729 && gimple_assign_single_p (def_stmt)
1730 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
1732 tree rhs1 = gimple_assign_rhs1 (def_stmt);
1733 gimple def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
1734 if (is_gimple_assign (def_stmt2)
1735 && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
1736 || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
1737 && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1))))
1739 tree base2;
1740 HOST_WIDE_INT offset2, size2, maxsize2, off;
1741 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1742 &offset2, &size2, &maxsize2);
1743 off = offset - offset2;
1744 if (maxsize2 != -1
1745 && maxsize2 == size2
1746 && operand_equal_p (base, base2, 0)
1747 && offset2 <= offset
1748 && offset2 + size2 >= offset + maxsize)
1750 tree val = NULL_TREE;
1751 HOST_WIDE_INT elsz
1752 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1))));
1753 if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR)
1755 if (off == 0)
1756 val = gimple_assign_rhs1 (def_stmt2);
1757 else if (off == elsz)
1758 val = gimple_assign_rhs2 (def_stmt2);
1760 else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR
1761 && off % elsz == 0)
1763 tree ctor = gimple_assign_rhs1 (def_stmt2);
1764 unsigned i = off / elsz;
1765 if (i < CONSTRUCTOR_NELTS (ctor))
1767 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
1768 if (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
1770 if (TREE_CODE (TREE_TYPE (elt->value))
1771 != VECTOR_TYPE)
1772 val = elt->value;
1776 if (val)
1777 return vn_reference_lookup_or_insert_for_pieces
1778 (vuse, vr->set, vr->type, vr->operands, val);
1783 /* 5) For aggregate copies translate the reference through them if
1784 the copy kills ref. */
1785 else if (vn_walk_kind == VN_WALKREWRITE
1786 && gimple_assign_single_p (def_stmt)
1787 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1788 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1789 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1791 tree base2;
1792 HOST_WIDE_INT offset2, size2, maxsize2;
1793 int i, j;
1794 auto_vec<vn_reference_op_s> rhs;
1795 vn_reference_op_t vro;
1796 ao_ref r;
1798 if (!lhs_ref_ok)
1799 return (void *)-1;
1801 /* See if the assignment kills REF. */
1802 base2 = ao_ref_base (&lhs_ref);
1803 offset2 = lhs_ref.offset;
1804 size2 = lhs_ref.size;
1805 maxsize2 = lhs_ref.max_size;
1806 if (maxsize2 == -1
1807 || (base != base2 && !operand_equal_p (base, base2, 0))
1808 || offset2 > offset
1809 || offset2 + size2 < offset + maxsize)
1810 return (void *)-1;
1812 /* Find the common base of ref and the lhs. lhs_ops already
1813 contains valueized operands for the lhs. */
1814 i = vr->operands.length () - 1;
1815 j = lhs_ops.length () - 1;
1816 while (j >= 0 && i >= 0
1817 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
1819 i--;
1820 j--;
1823 /* ??? The innermost op should always be a MEM_REF and we already
1824 checked that the assignment to the lhs kills vr. Thus for
1825 aggregate copies using char[] types the vn_reference_op_eq
1826 may fail when comparing types for compatibility. But we really
1827 don't care here - further lookups with the rewritten operands
1828 will simply fail if we messed up types too badly. */
1829 if (j == 0 && i >= 0
1830 && lhs_ops[0].opcode == MEM_REF
1831 && lhs_ops[0].off != -1
1832 && (lhs_ops[0].off == vr->operands[i].off))
1833 i--, j--;
1835 /* i now points to the first additional op.
1836 ??? LHS may not be completely contained in VR, one or more
1837 VIEW_CONVERT_EXPRs could be in its way. We could at least
1838 try handling outermost VIEW_CONVERT_EXPRs. */
1839 if (j != -1)
1840 return (void *)-1;
1842 /* Now re-write REF to be based on the rhs of the assignment. */
1843 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1844 /* We need to pre-pend vr->operands[0..i] to rhs. */
1845 if (i + 1 + rhs.length () > vr->operands.length ())
1847 vec<vn_reference_op_s> old = vr->operands;
1848 vr->operands.safe_grow (i + 1 + rhs.length ());
1849 if (old == shared_lookup_references
1850 && vr->operands != old)
1851 shared_lookup_references = vNULL;
1853 else
1854 vr->operands.truncate (i + 1 + rhs.length ());
1855 FOR_EACH_VEC_ELT (rhs, j, vro)
1856 vr->operands[i + 1 + j] = *vro;
1857 vr->operands = valueize_refs (vr->operands);
1858 vr->hashcode = vn_reference_compute_hash (vr);
1860 /* Adjust *ref from the new operands. */
1861 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1862 return (void *)-1;
1863 /* This can happen with bitfields. */
1864 if (ref->size != r.size)
1865 return (void *)-1;
1866 *ref = r;
1868 /* Do not update last seen VUSE after translating. */
1869 last_vuse_ptr = NULL;
1871 /* Keep looking for the adjusted *REF / VR pair. */
1872 return NULL;
1875 /* 6) For memcpy copies translate the reference through them if
1876 the copy kills ref. */
1877 else if (vn_walk_kind == VN_WALKREWRITE
1878 && is_gimple_reg_type (vr->type)
1879 /* ??? Handle BCOPY as well. */
1880 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
1881 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
1882 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
1883 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
1884 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
1885 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
1886 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
1887 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2)))
1889 tree lhs, rhs;
1890 ao_ref r;
1891 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
1892 vn_reference_op_s op;
1893 HOST_WIDE_INT at;
1896 /* Only handle non-variable, addressable refs. */
1897 if (ref->size != maxsize
1898 || offset % BITS_PER_UNIT != 0
1899 || ref->size % BITS_PER_UNIT != 0)
1900 return (void *)-1;
1902 /* Extract a pointer base and an offset for the destination. */
1903 lhs = gimple_call_arg (def_stmt, 0);
1904 lhs_offset = 0;
1905 if (TREE_CODE (lhs) == SSA_NAME)
1906 lhs = SSA_VAL (lhs);
1907 if (TREE_CODE (lhs) == ADDR_EXPR)
1909 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
1910 &lhs_offset);
1911 if (!tem)
1912 return (void *)-1;
1913 if (TREE_CODE (tem) == MEM_REF
1914 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
1916 lhs = TREE_OPERAND (tem, 0);
1917 lhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
1919 else if (DECL_P (tem))
1920 lhs = build_fold_addr_expr (tem);
1921 else
1922 return (void *)-1;
1924 if (TREE_CODE (lhs) != SSA_NAME
1925 && TREE_CODE (lhs) != ADDR_EXPR)
1926 return (void *)-1;
1928 /* Extract a pointer base and an offset for the source. */
1929 rhs = gimple_call_arg (def_stmt, 1);
1930 rhs_offset = 0;
1931 if (TREE_CODE (rhs) == SSA_NAME)
1932 rhs = SSA_VAL (rhs);
1933 if (TREE_CODE (rhs) == ADDR_EXPR)
1935 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
1936 &rhs_offset);
1937 if (!tem)
1938 return (void *)-1;
1939 if (TREE_CODE (tem) == MEM_REF
1940 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
1942 rhs = TREE_OPERAND (tem, 0);
1943 rhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
1945 else if (DECL_P (tem))
1946 rhs = build_fold_addr_expr (tem);
1947 else
1948 return (void *)-1;
1950 if (TREE_CODE (rhs) != SSA_NAME
1951 && TREE_CODE (rhs) != ADDR_EXPR)
1952 return (void *)-1;
1954 copy_size = tree_to_uhwi (gimple_call_arg (def_stmt, 2));
1956 /* The bases of the destination and the references have to agree. */
1957 if ((TREE_CODE (base) != MEM_REF
1958 && !DECL_P (base))
1959 || (TREE_CODE (base) == MEM_REF
1960 && (TREE_OPERAND (base, 0) != lhs
1961 || !tree_fits_uhwi_p (TREE_OPERAND (base, 1))))
1962 || (DECL_P (base)
1963 && (TREE_CODE (lhs) != ADDR_EXPR
1964 || TREE_OPERAND (lhs, 0) != base)))
1965 return (void *)-1;
1967 /* And the access has to be contained within the memcpy destination. */
1968 at = offset / BITS_PER_UNIT;
1969 if (TREE_CODE (base) == MEM_REF)
1970 at += tree_to_uhwi (TREE_OPERAND (base, 1));
1971 if (lhs_offset > at
1972 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
1973 return (void *)-1;
1975 /* Make room for 2 operands in the new reference. */
1976 if (vr->operands.length () < 2)
1978 vec<vn_reference_op_s> old = vr->operands;
1979 vr->operands.safe_grow_cleared (2);
1980 if (old == shared_lookup_references
1981 && vr->operands != old)
1982 shared_lookup_references.create (0);
1984 else
1985 vr->operands.truncate (2);
1987 /* The looked-through reference is a simple MEM_REF. */
1988 memset (&op, 0, sizeof (op));
1989 op.type = vr->type;
1990 op.opcode = MEM_REF;
1991 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
1992 op.off = at - lhs_offset + rhs_offset;
1993 vr->operands[0] = op;
1994 op.type = TREE_TYPE (rhs);
1995 op.opcode = TREE_CODE (rhs);
1996 op.op0 = rhs;
1997 op.off = -1;
1998 vr->operands[1] = op;
1999 vr->hashcode = vn_reference_compute_hash (vr);
2001 /* Adjust *ref from the new operands. */
2002 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2003 return (void *)-1;
2004 /* This can happen with bitfields. */
2005 if (ref->size != r.size)
2006 return (void *)-1;
2007 *ref = r;
2009 /* Do not update last seen VUSE after translating. */
2010 last_vuse_ptr = NULL;
2012 /* Keep looking for the adjusted *REF / VR pair. */
2013 return NULL;
2016 /* Bail out and stop walking. */
2017 return (void *)-1;
2020 /* Lookup a reference operation by it's parts, in the current hash table.
2021 Returns the resulting value number if it exists in the hash table,
2022 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2023 vn_reference_t stored in the hashtable if something is found. */
2025 tree
2026 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2027 vec<vn_reference_op_s> operands,
2028 vn_reference_t *vnresult, vn_lookup_kind kind)
2030 struct vn_reference_s vr1;
2031 vn_reference_t tmp;
2032 tree cst;
2034 if (!vnresult)
2035 vnresult = &tmp;
2036 *vnresult = NULL;
2038 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2039 shared_lookup_references.truncate (0);
2040 shared_lookup_references.safe_grow (operands.length ());
2041 memcpy (shared_lookup_references.address (),
2042 operands.address (),
2043 sizeof (vn_reference_op_s)
2044 * operands.length ());
2045 vr1.operands = operands = shared_lookup_references
2046 = valueize_refs (shared_lookup_references);
2047 vr1.type = type;
2048 vr1.set = set;
2049 vr1.hashcode = vn_reference_compute_hash (&vr1);
2050 if ((cst = fully_constant_vn_reference_p (&vr1)))
2051 return cst;
2053 vn_reference_lookup_1 (&vr1, vnresult);
2054 if (!*vnresult
2055 && kind != VN_NOWALK
2056 && vr1.vuse)
2058 ao_ref r;
2059 vn_walk_kind = kind;
2060 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2061 *vnresult =
2062 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2063 vn_reference_lookup_2,
2064 vn_reference_lookup_3, &vr1);
2065 if (vr1.operands != operands)
2066 vr1.operands.release ();
2069 if (*vnresult)
2070 return (*vnresult)->result;
2072 return NULL_TREE;
2075 /* Lookup OP in the current hash table, and return the resulting value
2076 number if it exists in the hash table. Return NULL_TREE if it does
2077 not exist in the hash table or if the result field of the structure
2078 was NULL.. VNRESULT will be filled in with the vn_reference_t
2079 stored in the hashtable if one exists. */
2081 tree
2082 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2083 vn_reference_t *vnresult)
2085 vec<vn_reference_op_s> operands;
2086 struct vn_reference_s vr1;
2087 tree cst;
2088 bool valuezied_anything;
2090 if (vnresult)
2091 *vnresult = NULL;
2093 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2094 vr1.operands = operands
2095 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2096 vr1.type = TREE_TYPE (op);
2097 vr1.set = get_alias_set (op);
2098 vr1.hashcode = vn_reference_compute_hash (&vr1);
2099 if ((cst = fully_constant_vn_reference_p (&vr1)))
2100 return cst;
2102 if (kind != VN_NOWALK
2103 && vr1.vuse)
2105 vn_reference_t wvnresult;
2106 ao_ref r;
2107 /* Make sure to use a valueized reference if we valueized anything.
2108 Otherwise preserve the full reference for advanced TBAA. */
2109 if (!valuezied_anything
2110 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2111 vr1.operands))
2112 ao_ref_init (&r, op);
2113 vn_walk_kind = kind;
2114 wvnresult =
2115 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2116 vn_reference_lookup_2,
2117 vn_reference_lookup_3, &vr1);
2118 if (vr1.operands != operands)
2119 vr1.operands.release ();
2120 if (wvnresult)
2122 if (vnresult)
2123 *vnresult = wvnresult;
2124 return wvnresult->result;
2127 return NULL_TREE;
2130 return vn_reference_lookup_1 (&vr1, vnresult);
2134 /* Insert OP into the current hash table with a value number of
2135 RESULT, and return the resulting reference structure we created. */
2137 vn_reference_t
2138 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2140 vn_reference_s **slot;
2141 vn_reference_t vr1;
2142 bool tem;
2144 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
2145 if (TREE_CODE (result) == SSA_NAME)
2146 vr1->value_id = VN_INFO (result)->value_id;
2147 else
2148 vr1->value_id = get_or_alloc_constant_value_id (result);
2149 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2150 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2151 vr1->type = TREE_TYPE (op);
2152 vr1->set = get_alias_set (op);
2153 vr1->hashcode = vn_reference_compute_hash (vr1);
2154 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2155 vr1->result_vdef = vdef;
2157 slot = current_info->references.find_slot_with_hash (vr1, vr1->hashcode,
2158 INSERT);
2160 /* Because we lookup stores using vuses, and value number failures
2161 using the vdefs (see visit_reference_op_store for how and why),
2162 it's possible that on failure we may try to insert an already
2163 inserted store. This is not wrong, there is no ssa name for a
2164 store that we could use as a differentiator anyway. Thus, unlike
2165 the other lookup functions, you cannot gcc_assert (!*slot)
2166 here. */
2168 /* But free the old slot in case of a collision. */
2169 if (*slot)
2170 free_reference (*slot);
2172 *slot = vr1;
2173 return vr1;
2176 /* Insert a reference by it's pieces into the current hash table with
2177 a value number of RESULT. Return the resulting reference
2178 structure we created. */
2180 vn_reference_t
2181 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2182 vec<vn_reference_op_s> operands,
2183 tree result, unsigned int value_id)
2186 vn_reference_s **slot;
2187 vn_reference_t vr1;
2189 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
2190 vr1->value_id = value_id;
2191 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2192 vr1->operands = valueize_refs (operands);
2193 vr1->type = type;
2194 vr1->set = set;
2195 vr1->hashcode = vn_reference_compute_hash (vr1);
2196 if (result && TREE_CODE (result) == SSA_NAME)
2197 result = SSA_VAL (result);
2198 vr1->result = result;
2200 slot = current_info->references.find_slot_with_hash (vr1, vr1->hashcode,
2201 INSERT);
2203 /* At this point we should have all the things inserted that we have
2204 seen before, and we should never try inserting something that
2205 already exists. */
2206 gcc_assert (!*slot);
2207 if (*slot)
2208 free_reference (*slot);
2210 *slot = vr1;
2211 return vr1;
2214 /* Compute and return the hash value for nary operation VBO1. */
2216 hashval_t
2217 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2219 hashval_t hash;
2220 unsigned i;
2222 for (i = 0; i < vno1->length; ++i)
2223 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2224 vno1->op[i] = SSA_VAL (vno1->op[i]);
2226 if (vno1->length == 2
2227 && commutative_tree_code (vno1->opcode)
2228 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2230 tree temp = vno1->op[0];
2231 vno1->op[0] = vno1->op[1];
2232 vno1->op[1] = temp;
2235 hash = iterative_hash_hashval_t (vno1->opcode, 0);
2236 for (i = 0; i < vno1->length; ++i)
2237 hash = iterative_hash_expr (vno1->op[i], hash);
2239 return hash;
2242 /* Compare nary operations VNO1 and VNO2 and return true if they are
2243 equivalent. */
2245 bool
2246 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2248 unsigned i;
2250 if (vno1->hashcode != vno2->hashcode)
2251 return false;
2253 if (vno1->length != vno2->length)
2254 return false;
2256 if (vno1->opcode != vno2->opcode
2257 || !types_compatible_p (vno1->type, vno2->type))
2258 return false;
2260 for (i = 0; i < vno1->length; ++i)
2261 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2262 return false;
2264 return true;
2267 /* Initialize VNO from the pieces provided. */
2269 static void
2270 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2271 enum tree_code code, tree type, tree *ops)
2273 vno->opcode = code;
2274 vno->length = length;
2275 vno->type = type;
2276 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2279 /* Initialize VNO from OP. */
2281 static void
2282 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2284 unsigned i;
2286 vno->opcode = TREE_CODE (op);
2287 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2288 vno->type = TREE_TYPE (op);
2289 for (i = 0; i < vno->length; ++i)
2290 vno->op[i] = TREE_OPERAND (op, i);
2293 /* Return the number of operands for a vn_nary ops structure from STMT. */
2295 static unsigned int
2296 vn_nary_length_from_stmt (gimple stmt)
2298 switch (gimple_assign_rhs_code (stmt))
2300 case REALPART_EXPR:
2301 case IMAGPART_EXPR:
2302 case VIEW_CONVERT_EXPR:
2303 return 1;
2305 case BIT_FIELD_REF:
2306 return 3;
2308 case CONSTRUCTOR:
2309 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2311 default:
2312 return gimple_num_ops (stmt) - 1;
2316 /* Initialize VNO from STMT. */
2318 static void
2319 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
2321 unsigned i;
2323 vno->opcode = gimple_assign_rhs_code (stmt);
2324 vno->type = gimple_expr_type (stmt);
2325 switch (vno->opcode)
2327 case REALPART_EXPR:
2328 case IMAGPART_EXPR:
2329 case VIEW_CONVERT_EXPR:
2330 vno->length = 1;
2331 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2332 break;
2334 case BIT_FIELD_REF:
2335 vno->length = 3;
2336 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2337 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2338 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2339 break;
2341 case CONSTRUCTOR:
2342 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2343 for (i = 0; i < vno->length; ++i)
2344 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2345 break;
2347 default:
2348 gcc_checking_assert (!gimple_assign_single_p (stmt));
2349 vno->length = gimple_num_ops (stmt) - 1;
2350 for (i = 0; i < vno->length; ++i)
2351 vno->op[i] = gimple_op (stmt, i + 1);
2355 /* Compute the hashcode for VNO and look for it in the hash table;
2356 return the resulting value number if it exists in the hash table.
2357 Return NULL_TREE if it does not exist in the hash table or if the
2358 result field of the operation is NULL. VNRESULT will contain the
2359 vn_nary_op_t from the hashtable if it exists. */
2361 static tree
2362 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2364 vn_nary_op_s **slot;
2366 if (vnresult)
2367 *vnresult = NULL;
2369 vno->hashcode = vn_nary_op_compute_hash (vno);
2370 slot = current_info->nary.find_slot_with_hash (vno, vno->hashcode, NO_INSERT);
2371 if (!slot && current_info == optimistic_info)
2372 slot = valid_info->nary.find_slot_with_hash (vno, vno->hashcode, NO_INSERT);
2373 if (!slot)
2374 return NULL_TREE;
2375 if (vnresult)
2376 *vnresult = *slot;
2377 return (*slot)->result;
2380 /* Lookup a n-ary operation by its pieces and return the resulting value
2381 number if it exists in the hash table. Return NULL_TREE if it does
2382 not exist in the hash table or if the result field of the operation
2383 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2384 if it exists. */
2386 tree
2387 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2388 tree type, tree *ops, vn_nary_op_t *vnresult)
2390 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2391 sizeof_vn_nary_op (length));
2392 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2393 return vn_nary_op_lookup_1 (vno1, vnresult);
2396 /* Lookup OP in the current hash table, and return the resulting value
2397 number if it exists in the hash table. Return NULL_TREE if it does
2398 not exist in the hash table or if the result field of the operation
2399 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2400 if it exists. */
2402 tree
2403 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2405 vn_nary_op_t vno1
2406 = XALLOCAVAR (struct vn_nary_op_s,
2407 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2408 init_vn_nary_op_from_op (vno1, op);
2409 return vn_nary_op_lookup_1 (vno1, vnresult);
2412 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2413 value number if it exists in the hash table. Return NULL_TREE if
2414 it does not exist in the hash table. VNRESULT will contain the
2415 vn_nary_op_t from the hashtable if it exists. */
2417 tree
2418 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
2420 vn_nary_op_t vno1
2421 = XALLOCAVAR (struct vn_nary_op_s,
2422 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2423 init_vn_nary_op_from_stmt (vno1, stmt);
2424 return vn_nary_op_lookup_1 (vno1, vnresult);
2427 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2429 static vn_nary_op_t
2430 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2432 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2435 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2436 obstack. */
2438 static vn_nary_op_t
2439 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2441 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2442 &current_info->nary_obstack);
2444 vno1->value_id = value_id;
2445 vno1->length = length;
2446 vno1->result = result;
2448 return vno1;
2451 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2452 VNO->HASHCODE first. */
2454 static vn_nary_op_t
2455 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type table,
2456 bool compute_hash)
2458 vn_nary_op_s **slot;
2460 if (compute_hash)
2461 vno->hashcode = vn_nary_op_compute_hash (vno);
2463 slot = table.find_slot_with_hash (vno, vno->hashcode, INSERT);
2464 gcc_assert (!*slot);
2466 *slot = vno;
2467 return vno;
2470 /* Insert a n-ary operation into the current hash table using it's
2471 pieces. Return the vn_nary_op_t structure we created and put in
2472 the hashtable. */
2474 vn_nary_op_t
2475 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2476 tree type, tree *ops,
2477 tree result, unsigned int value_id)
2479 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2480 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2481 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2484 /* Insert OP into the current hash table with a value number of
2485 RESULT. Return the vn_nary_op_t structure we created and put in
2486 the hashtable. */
2488 vn_nary_op_t
2489 vn_nary_op_insert (tree op, tree result)
2491 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2492 vn_nary_op_t vno1;
2494 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2495 init_vn_nary_op_from_op (vno1, op);
2496 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2499 /* Insert the rhs of STMT into the current hash table with a value number of
2500 RESULT. */
2502 vn_nary_op_t
2503 vn_nary_op_insert_stmt (gimple stmt, tree result)
2505 vn_nary_op_t vno1
2506 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2507 result, VN_INFO (result)->value_id);
2508 init_vn_nary_op_from_stmt (vno1, stmt);
2509 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2512 /* Compute a hashcode for PHI operation VP1 and return it. */
2514 static inline hashval_t
2515 vn_phi_compute_hash (vn_phi_t vp1)
2517 hashval_t result;
2518 int i;
2519 tree phi1op;
2520 tree type;
2522 result = vp1->block->index;
2524 /* If all PHI arguments are constants we need to distinguish
2525 the PHI node via its type. */
2526 type = vp1->type;
2527 result += vn_hash_type (type);
2529 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2531 if (phi1op == VN_TOP)
2532 continue;
2533 result = iterative_hash_expr (phi1op, result);
2536 return result;
2539 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2541 static int
2542 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
2544 if (vp1->hashcode != vp2->hashcode)
2545 return false;
2547 if (vp1->block == vp2->block)
2549 int i;
2550 tree phi1op;
2552 /* If the PHI nodes do not have compatible types
2553 they are not the same. */
2554 if (!types_compatible_p (vp1->type, vp2->type))
2555 return false;
2557 /* Any phi in the same block will have it's arguments in the
2558 same edge order, because of how we store phi nodes. */
2559 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2561 tree phi2op = vp2->phiargs[i];
2562 if (phi1op == VN_TOP || phi2op == VN_TOP)
2563 continue;
2564 if (!expressions_equal_p (phi1op, phi2op))
2565 return false;
2567 return true;
2569 return false;
2572 static vec<tree> shared_lookup_phiargs;
2574 /* Lookup PHI in the current hash table, and return the resulting
2575 value number if it exists in the hash table. Return NULL_TREE if
2576 it does not exist in the hash table. */
2578 static tree
2579 vn_phi_lookup (gimple phi)
2581 vn_phi_s **slot;
2582 struct vn_phi_s vp1;
2583 unsigned i;
2585 shared_lookup_phiargs.truncate (0);
2587 /* Canonicalize the SSA_NAME's to their value number. */
2588 for (i = 0; i < gimple_phi_num_args (phi); i++)
2590 tree def = PHI_ARG_DEF (phi, i);
2591 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2592 shared_lookup_phiargs.safe_push (def);
2594 vp1.type = TREE_TYPE (gimple_phi_result (phi));
2595 vp1.phiargs = shared_lookup_phiargs;
2596 vp1.block = gimple_bb (phi);
2597 vp1.hashcode = vn_phi_compute_hash (&vp1);
2598 slot = current_info->phis.find_slot_with_hash (&vp1, vp1.hashcode, NO_INSERT);
2599 if (!slot && current_info == optimistic_info)
2600 slot = valid_info->phis.find_slot_with_hash (&vp1, vp1.hashcode, NO_INSERT);
2601 if (!slot)
2602 return NULL_TREE;
2603 return (*slot)->result;
2606 /* Insert PHI into the current hash table with a value number of
2607 RESULT. */
2609 static vn_phi_t
2610 vn_phi_insert (gimple phi, tree result)
2612 vn_phi_s **slot;
2613 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
2614 unsigned i;
2615 vec<tree> args = vNULL;
2617 /* Canonicalize the SSA_NAME's to their value number. */
2618 for (i = 0; i < gimple_phi_num_args (phi); i++)
2620 tree def = PHI_ARG_DEF (phi, i);
2621 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2622 args.safe_push (def);
2624 vp1->value_id = VN_INFO (result)->value_id;
2625 vp1->type = TREE_TYPE (gimple_phi_result (phi));
2626 vp1->phiargs = args;
2627 vp1->block = gimple_bb (phi);
2628 vp1->result = result;
2629 vp1->hashcode = vn_phi_compute_hash (vp1);
2631 slot = current_info->phis.find_slot_with_hash (vp1, vp1->hashcode, INSERT);
2633 /* Because we iterate over phi operations more than once, it's
2634 possible the slot might already exist here, hence no assert.*/
2635 *slot = vp1;
2636 return vp1;
2640 /* Print set of components in strongly connected component SCC to OUT. */
2642 static void
2643 print_scc (FILE *out, vec<tree> scc)
2645 tree var;
2646 unsigned int i;
2648 fprintf (out, "SCC consists of:");
2649 FOR_EACH_VEC_ELT (scc, i, var)
2651 fprintf (out, " ");
2652 print_generic_expr (out, var, 0);
2654 fprintf (out, "\n");
2657 /* Set the value number of FROM to TO, return true if it has changed
2658 as a result. */
2660 static inline bool
2661 set_ssa_val_to (tree from, tree to)
2663 tree currval = SSA_VAL (from);
2664 HOST_WIDE_INT toff, coff;
2666 /* The only thing we allow as value numbers are ssa_names
2667 and invariants. So assert that here. We don't allow VN_TOP
2668 as visiting a stmt should produce a value-number other than
2669 that.
2670 ??? Still VN_TOP can happen for unreachable code, so force
2671 it to varying in that case. Not all code is prepared to
2672 get VN_TOP on valueization. */
2673 if (to == VN_TOP)
2675 if (dump_file && (dump_flags & TDF_DETAILS))
2676 fprintf (dump_file, "Forcing value number to varying on "
2677 "receiving VN_TOP\n");
2678 to = from;
2681 gcc_assert (to != NULL_TREE
2682 && (TREE_CODE (to) == SSA_NAME
2683 || is_gimple_min_invariant (to)));
2685 if (from != to)
2687 if (currval == from)
2689 if (dump_file && (dump_flags & TDF_DETAILS))
2691 fprintf (dump_file, "Not changing value number of ");
2692 print_generic_expr (dump_file, from, 0);
2693 fprintf (dump_file, " from VARYING to ");
2694 print_generic_expr (dump_file, to, 0);
2695 fprintf (dump_file, "\n");
2697 return false;
2699 else if (TREE_CODE (to) == SSA_NAME
2700 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2701 to = from;
2704 if (dump_file && (dump_flags & TDF_DETAILS))
2706 fprintf (dump_file, "Setting value number of ");
2707 print_generic_expr (dump_file, from, 0);
2708 fprintf (dump_file, " to ");
2709 print_generic_expr (dump_file, to, 0);
2712 if (currval != to
2713 && !operand_equal_p (currval, to, 0)
2714 /* ??? For addresses involving volatile objects or types operand_equal_p
2715 does not reliably detect ADDR_EXPRs as equal. We know we are only
2716 getting invariant gimple addresses here, so can use
2717 get_addr_base_and_unit_offset to do this comparison. */
2718 && !(TREE_CODE (currval) == ADDR_EXPR
2719 && TREE_CODE (to) == ADDR_EXPR
2720 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
2721 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
2722 && coff == toff))
2724 VN_INFO (from)->valnum = to;
2725 if (dump_file && (dump_flags & TDF_DETAILS))
2726 fprintf (dump_file, " (changed)\n");
2727 return true;
2729 if (dump_file && (dump_flags & TDF_DETAILS))
2730 fprintf (dump_file, "\n");
2731 return false;
2734 /* Mark as processed all the definitions in the defining stmt of USE, or
2735 the USE itself. */
2737 static void
2738 mark_use_processed (tree use)
2740 ssa_op_iter iter;
2741 def_operand_p defp;
2742 gimple stmt = SSA_NAME_DEF_STMT (use);
2744 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
2746 VN_INFO (use)->use_processed = true;
2747 return;
2750 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2752 tree def = DEF_FROM_PTR (defp);
2754 VN_INFO (def)->use_processed = true;
2758 /* Set all definitions in STMT to value number to themselves.
2759 Return true if a value number changed. */
2761 static bool
2762 defs_to_varying (gimple stmt)
2764 bool changed = false;
2765 ssa_op_iter iter;
2766 def_operand_p defp;
2768 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2770 tree def = DEF_FROM_PTR (defp);
2771 changed |= set_ssa_val_to (def, def);
2773 return changed;
2776 static bool expr_has_constants (tree expr);
2778 /* Visit a copy between LHS and RHS, return true if the value number
2779 changed. */
2781 static bool
2782 visit_copy (tree lhs, tree rhs)
2784 /* The copy may have a more interesting constant filled expression
2785 (we don't, since we know our RHS is just an SSA name). */
2786 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2787 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2789 /* And finally valueize. */
2790 rhs = SSA_VAL (rhs);
2792 return set_ssa_val_to (lhs, rhs);
2795 /* Visit a nary operator RHS, value number it, and return true if the
2796 value number of LHS has changed as a result. */
2798 static bool
2799 visit_nary_op (tree lhs, gimple stmt)
2801 bool changed = false;
2802 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2804 if (result)
2805 changed = set_ssa_val_to (lhs, result);
2806 else
2808 changed = set_ssa_val_to (lhs, lhs);
2809 vn_nary_op_insert_stmt (stmt, lhs);
2812 return changed;
2815 /* Visit a call STMT storing into LHS. Return true if the value number
2816 of the LHS has changed as a result. */
2818 static bool
2819 visit_reference_op_call (tree lhs, gimple stmt)
2821 bool changed = false;
2822 struct vn_reference_s vr1;
2823 vn_reference_t vnresult = NULL;
2824 tree vuse = gimple_vuse (stmt);
2825 tree vdef = gimple_vdef (stmt);
2827 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
2828 if (lhs && TREE_CODE (lhs) != SSA_NAME)
2829 lhs = NULL_TREE;
2831 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2832 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
2833 vr1.type = gimple_expr_type (stmt);
2834 vr1.set = 0;
2835 vr1.hashcode = vn_reference_compute_hash (&vr1);
2836 vn_reference_lookup_1 (&vr1, &vnresult);
2838 if (vnresult)
2840 if (vnresult->result_vdef && vdef)
2841 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
2843 if (!vnresult->result && lhs)
2844 vnresult->result = lhs;
2846 if (vnresult->result && lhs)
2848 changed |= set_ssa_val_to (lhs, vnresult->result);
2850 if (VN_INFO (vnresult->result)->has_constants)
2851 VN_INFO (lhs)->has_constants = true;
2854 else
2856 vn_reference_s **slot;
2857 vn_reference_t vr2;
2858 if (vdef)
2859 changed |= set_ssa_val_to (vdef, vdef);
2860 if (lhs)
2861 changed |= set_ssa_val_to (lhs, lhs);
2862 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
2863 vr2->vuse = vr1.vuse;
2864 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
2865 vr2->type = vr1.type;
2866 vr2->set = vr1.set;
2867 vr2->hashcode = vr1.hashcode;
2868 vr2->result = lhs;
2869 vr2->result_vdef = vdef;
2870 slot = current_info->references.find_slot_with_hash (vr2, vr2->hashcode,
2871 INSERT);
2872 if (*slot)
2873 free_reference (*slot);
2874 *slot = vr2;
2877 return changed;
2880 /* Visit a load from a reference operator RHS, part of STMT, value number it,
2881 and return true if the value number of the LHS has changed as a result. */
2883 static bool
2884 visit_reference_op_load (tree lhs, tree op, gimple stmt)
2886 bool changed = false;
2887 tree last_vuse;
2888 tree result;
2890 last_vuse = gimple_vuse (stmt);
2891 last_vuse_ptr = &last_vuse;
2892 result = vn_reference_lookup (op, gimple_vuse (stmt),
2893 default_vn_walk_kind, NULL);
2894 last_vuse_ptr = NULL;
2896 /* If we have a VCE, try looking up its operand as it might be stored in
2897 a different type. */
2898 if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
2899 result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
2900 default_vn_walk_kind, NULL);
2902 /* We handle type-punning through unions by value-numbering based
2903 on offset and size of the access. Be prepared to handle a
2904 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
2905 if (result
2906 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
2908 /* We will be setting the value number of lhs to the value number
2909 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2910 So first simplify and lookup this expression to see if it
2911 is already available. */
2912 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
2913 if ((CONVERT_EXPR_P (val)
2914 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
2915 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
2917 tree tem = vn_get_expr_for (TREE_OPERAND (val, 0));
2918 if ((CONVERT_EXPR_P (tem)
2919 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
2920 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
2921 TREE_TYPE (val), tem)))
2922 val = tem;
2924 result = val;
2925 if (!is_gimple_min_invariant (val)
2926 && TREE_CODE (val) != SSA_NAME)
2927 result = vn_nary_op_lookup (val, NULL);
2928 /* If the expression is not yet available, value-number lhs to
2929 a new SSA_NAME we create. */
2930 if (!result)
2932 result = make_temp_ssa_name (TREE_TYPE (lhs), gimple_build_nop (),
2933 "vntemp");
2934 /* Initialize value-number information properly. */
2935 VN_INFO_GET (result)->valnum = result;
2936 VN_INFO (result)->value_id = get_next_value_id ();
2937 VN_INFO (result)->expr = val;
2938 VN_INFO (result)->has_constants = expr_has_constants (val);
2939 VN_INFO (result)->needs_insertion = true;
2940 /* As all "inserted" statements are singleton SCCs, insert
2941 to the valid table. This is strictly needed to
2942 avoid re-generating new value SSA_NAMEs for the same
2943 expression during SCC iteration over and over (the
2944 optimistic table gets cleared after each iteration).
2945 We do not need to insert into the optimistic table, as
2946 lookups there will fall back to the valid table. */
2947 if (current_info == optimistic_info)
2949 current_info = valid_info;
2950 vn_nary_op_insert (val, result);
2951 current_info = optimistic_info;
2953 else
2954 vn_nary_op_insert (val, result);
2955 if (dump_file && (dump_flags & TDF_DETAILS))
2957 fprintf (dump_file, "Inserting name ");
2958 print_generic_expr (dump_file, result, 0);
2959 fprintf (dump_file, " for expression ");
2960 print_generic_expr (dump_file, val, 0);
2961 fprintf (dump_file, "\n");
2966 if (result)
2968 changed = set_ssa_val_to (lhs, result);
2969 if (TREE_CODE (result) == SSA_NAME
2970 && VN_INFO (result)->has_constants)
2972 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2973 VN_INFO (lhs)->has_constants = true;
2976 else
2978 changed = set_ssa_val_to (lhs, lhs);
2979 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
2982 return changed;
2986 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2987 and return true if the value number of the LHS has changed as a result. */
2989 static bool
2990 visit_reference_op_store (tree lhs, tree op, gimple stmt)
2992 bool changed = false;
2993 vn_reference_t vnresult = NULL;
2994 tree result, assign;
2995 bool resultsame = false;
2996 tree vuse = gimple_vuse (stmt);
2997 tree vdef = gimple_vdef (stmt);
2999 /* First we want to lookup using the *vuses* from the store and see
3000 if there the last store to this location with the same address
3001 had the same value.
3003 The vuses represent the memory state before the store. If the
3004 memory state, address, and value of the store is the same as the
3005 last store to this location, then this store will produce the
3006 same memory state as that store.
3008 In this case the vdef versions for this store are value numbered to those
3009 vuse versions, since they represent the same memory state after
3010 this store.
3012 Otherwise, the vdefs for the store are used when inserting into
3013 the table, since the store generates a new memory state. */
3015 result = vn_reference_lookup (lhs, vuse, VN_NOWALK, NULL);
3017 if (result)
3019 if (TREE_CODE (result) == SSA_NAME)
3020 result = SSA_VAL (result);
3021 if (TREE_CODE (op) == SSA_NAME)
3022 op = SSA_VAL (op);
3023 resultsame = expressions_equal_p (result, op);
3026 if (!result || !resultsame)
3028 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3029 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult);
3030 if (vnresult)
3032 VN_INFO (vdef)->use_processed = true;
3033 return set_ssa_val_to (vdef, vnresult->result_vdef);
3037 if (!result || !resultsame)
3039 if (dump_file && (dump_flags & TDF_DETAILS))
3041 fprintf (dump_file, "No store match\n");
3042 fprintf (dump_file, "Value numbering store ");
3043 print_generic_expr (dump_file, lhs, 0);
3044 fprintf (dump_file, " to ");
3045 print_generic_expr (dump_file, op, 0);
3046 fprintf (dump_file, "\n");
3048 /* Have to set value numbers before insert, since insert is
3049 going to valueize the references in-place. */
3050 if (vdef)
3052 changed |= set_ssa_val_to (vdef, vdef);
3055 /* Do not insert structure copies into the tables. */
3056 if (is_gimple_min_invariant (op)
3057 || is_gimple_reg (op))
3058 vn_reference_insert (lhs, op, vdef, NULL);
3060 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3061 vn_reference_insert (assign, lhs, vuse, vdef);
3063 else
3065 /* We had a match, so value number the vdef to have the value
3066 number of the vuse it came from. */
3068 if (dump_file && (dump_flags & TDF_DETAILS))
3069 fprintf (dump_file, "Store matched earlier value,"
3070 "value numbering store vdefs to matching vuses.\n");
3072 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3075 return changed;
3078 /* Visit and value number PHI, return true if the value number
3079 changed. */
3081 static bool
3082 visit_phi (gimple phi)
3084 bool changed = false;
3085 tree result;
3086 tree sameval = VN_TOP;
3087 bool allsame = true;
3089 /* TODO: We could check for this in init_sccvn, and replace this
3090 with a gcc_assert. */
3091 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3092 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3094 /* See if all non-TOP arguments have the same value. TOP is
3095 equivalent to everything, so we can ignore it. */
3096 edge_iterator ei;
3097 edge e;
3098 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3099 if (e->flags & EDGE_EXECUTABLE)
3101 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3103 if (TREE_CODE (def) == SSA_NAME)
3104 def = SSA_VAL (def);
3105 if (def == VN_TOP)
3106 continue;
3107 if (sameval == VN_TOP)
3109 sameval = def;
3111 else
3113 if (!expressions_equal_p (def, sameval))
3115 allsame = false;
3116 break;
3121 /* If all value numbered to the same value, the phi node has that
3122 value. */
3123 if (allsame)
3125 if (is_gimple_min_invariant (sameval))
3127 VN_INFO (PHI_RESULT (phi))->has_constants = true;
3128 VN_INFO (PHI_RESULT (phi))->expr = sameval;
3130 else
3132 VN_INFO (PHI_RESULT (phi))->has_constants = false;
3133 VN_INFO (PHI_RESULT (phi))->expr = sameval;
3136 if (TREE_CODE (sameval) == SSA_NAME)
3137 return visit_copy (PHI_RESULT (phi), sameval);
3139 return set_ssa_val_to (PHI_RESULT (phi), sameval);
3142 /* Otherwise, see if it is equivalent to a phi node in this block. */
3143 result = vn_phi_lookup (phi);
3144 if (result)
3146 if (TREE_CODE (result) == SSA_NAME)
3147 changed = visit_copy (PHI_RESULT (phi), result);
3148 else
3149 changed = set_ssa_val_to (PHI_RESULT (phi), result);
3151 else
3153 vn_phi_insert (phi, PHI_RESULT (phi));
3154 VN_INFO (PHI_RESULT (phi))->has_constants = false;
3155 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
3156 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3159 return changed;
3162 /* Return true if EXPR contains constants. */
3164 static bool
3165 expr_has_constants (tree expr)
3167 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
3169 case tcc_unary:
3170 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
3172 case tcc_binary:
3173 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
3174 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
3175 /* Constants inside reference ops are rarely interesting, but
3176 it can take a lot of looking to find them. */
3177 case tcc_reference:
3178 case tcc_declaration:
3179 return false;
3180 default:
3181 return is_gimple_min_invariant (expr);
3183 return false;
3186 /* Return true if STMT contains constants. */
3188 static bool
3189 stmt_has_constants (gimple stmt)
3191 tree tem;
3193 if (gimple_code (stmt) != GIMPLE_ASSIGN)
3194 return false;
3196 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
3198 case GIMPLE_TERNARY_RHS:
3199 tem = gimple_assign_rhs3 (stmt);
3200 if (TREE_CODE (tem) == SSA_NAME)
3201 tem = SSA_VAL (tem);
3202 if (is_gimple_min_invariant (tem))
3203 return true;
3204 /* Fallthru. */
3206 case GIMPLE_BINARY_RHS:
3207 tem = gimple_assign_rhs2 (stmt);
3208 if (TREE_CODE (tem) == SSA_NAME)
3209 tem = SSA_VAL (tem);
3210 if (is_gimple_min_invariant (tem))
3211 return true;
3212 /* Fallthru. */
3214 case GIMPLE_SINGLE_RHS:
3215 /* Constants inside reference ops are rarely interesting, but
3216 it can take a lot of looking to find them. */
3217 case GIMPLE_UNARY_RHS:
3218 tem = gimple_assign_rhs1 (stmt);
3219 if (TREE_CODE (tem) == SSA_NAME)
3220 tem = SSA_VAL (tem);
3221 return is_gimple_min_invariant (tem);
3223 default:
3224 gcc_unreachable ();
3226 return false;
3229 /* Simplify the binary expression RHS, and return the result if
3230 simplified. */
3232 static tree
3233 simplify_binary_expression (gimple stmt)
3235 tree result = NULL_TREE;
3236 tree op0 = gimple_assign_rhs1 (stmt);
3237 tree op1 = gimple_assign_rhs2 (stmt);
3238 enum tree_code code = gimple_assign_rhs_code (stmt);
3240 /* This will not catch every single case we could combine, but will
3241 catch those with constants. The goal here is to simultaneously
3242 combine constants between expressions, but avoid infinite
3243 expansion of expressions during simplification. */
3244 if (TREE_CODE (op0) == SSA_NAME)
3246 if (VN_INFO (op0)->has_constants
3247 || TREE_CODE_CLASS (code) == tcc_comparison
3248 || code == COMPLEX_EXPR)
3249 op0 = vn_get_expr_for (op0);
3250 else
3251 op0 = vn_valueize (op0);
3254 if (TREE_CODE (op1) == SSA_NAME)
3256 if (VN_INFO (op1)->has_constants
3257 || code == COMPLEX_EXPR)
3258 op1 = vn_get_expr_for (op1);
3259 else
3260 op1 = vn_valueize (op1);
3263 /* Pointer plus constant can be represented as invariant address.
3264 Do so to allow further propatation, see also tree forwprop. */
3265 if (code == POINTER_PLUS_EXPR
3266 && tree_fits_uhwi_p (op1)
3267 && TREE_CODE (op0) == ADDR_EXPR
3268 && is_gimple_min_invariant (op0))
3269 return build_invariant_address (TREE_TYPE (op0),
3270 TREE_OPERAND (op0, 0),
3271 tree_to_uhwi (op1));
3273 /* Avoid folding if nothing changed. */
3274 if (op0 == gimple_assign_rhs1 (stmt)
3275 && op1 == gimple_assign_rhs2 (stmt))
3276 return NULL_TREE;
3278 fold_defer_overflow_warnings ();
3280 result = fold_binary (code, gimple_expr_type (stmt), op0, op1);
3281 if (result)
3282 STRIP_USELESS_TYPE_CONVERSION (result);
3284 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
3285 stmt, 0);
3287 /* Make sure result is not a complex expression consisting
3288 of operators of operators (IE (a + b) + (a + c))
3289 Otherwise, we will end up with unbounded expressions if
3290 fold does anything at all. */
3291 if (result && valid_gimple_rhs_p (result))
3292 return result;
3294 return NULL_TREE;
3297 /* Simplify the unary expression RHS, and return the result if
3298 simplified. */
3300 static tree
3301 simplify_unary_expression (gimple stmt)
3303 tree result = NULL_TREE;
3304 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
3305 enum tree_code code = gimple_assign_rhs_code (stmt);
3307 /* We handle some tcc_reference codes here that are all
3308 GIMPLE_ASSIGN_SINGLE codes. */
3309 if (code == REALPART_EXPR
3310 || code == IMAGPART_EXPR
3311 || code == VIEW_CONVERT_EXPR
3312 || code == BIT_FIELD_REF)
3313 op0 = TREE_OPERAND (op0, 0);
3315 if (TREE_CODE (op0) != SSA_NAME)
3316 return NULL_TREE;
3318 orig_op0 = op0;
3319 if (VN_INFO (op0)->has_constants)
3320 op0 = vn_get_expr_for (op0);
3321 else if (CONVERT_EXPR_CODE_P (code)
3322 || code == REALPART_EXPR
3323 || code == IMAGPART_EXPR
3324 || code == VIEW_CONVERT_EXPR
3325 || code == BIT_FIELD_REF)
3327 /* We want to do tree-combining on conversion-like expressions.
3328 Make sure we feed only SSA_NAMEs or constants to fold though. */
3329 tree tem = vn_get_expr_for (op0);
3330 if (UNARY_CLASS_P (tem)
3331 || BINARY_CLASS_P (tem)
3332 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
3333 || TREE_CODE (tem) == SSA_NAME
3334 || TREE_CODE (tem) == CONSTRUCTOR
3335 || is_gimple_min_invariant (tem))
3336 op0 = tem;
3339 /* Avoid folding if nothing changed, but remember the expression. */
3340 if (op0 == orig_op0)
3341 return NULL_TREE;
3343 if (code == BIT_FIELD_REF)
3345 tree rhs = gimple_assign_rhs1 (stmt);
3346 result = fold_ternary (BIT_FIELD_REF, TREE_TYPE (rhs),
3347 op0, TREE_OPERAND (rhs, 1), TREE_OPERAND (rhs, 2));
3349 else
3350 result = fold_unary_ignore_overflow (code, gimple_expr_type (stmt), op0);
3351 if (result)
3353 STRIP_USELESS_TYPE_CONVERSION (result);
3354 if (valid_gimple_rhs_p (result))
3355 return result;
3358 return NULL_TREE;
3361 /* Try to simplify RHS using equivalences and constant folding. */
3363 static tree
3364 try_to_simplify (gimple stmt)
3366 enum tree_code code = gimple_assign_rhs_code (stmt);
3367 tree tem;
3369 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3370 in this case, there is no point in doing extra work. */
3371 if (code == SSA_NAME)
3372 return NULL_TREE;
3374 /* First try constant folding based on our current lattice. */
3375 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize);
3376 if (tem
3377 && (TREE_CODE (tem) == SSA_NAME
3378 || is_gimple_min_invariant (tem)))
3379 return tem;
3381 /* If that didn't work try combining multiple statements. */
3382 switch (TREE_CODE_CLASS (code))
3384 case tcc_reference:
3385 /* Fallthrough for some unary codes that can operate on registers. */
3386 if (!(code == REALPART_EXPR
3387 || code == IMAGPART_EXPR
3388 || code == VIEW_CONVERT_EXPR
3389 || code == BIT_FIELD_REF))
3390 break;
3391 /* We could do a little more with unary ops, if they expand
3392 into binary ops, but it's debatable whether it is worth it. */
3393 case tcc_unary:
3394 return simplify_unary_expression (stmt);
3396 case tcc_comparison:
3397 case tcc_binary:
3398 return simplify_binary_expression (stmt);
3400 default:
3401 break;
3404 return NULL_TREE;
3407 /* Visit and value number USE, return true if the value number
3408 changed. */
3410 static bool
3411 visit_use (tree use)
3413 bool changed = false;
3414 gimple stmt = SSA_NAME_DEF_STMT (use);
3416 mark_use_processed (use);
3418 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3419 if (dump_file && (dump_flags & TDF_DETAILS)
3420 && !SSA_NAME_IS_DEFAULT_DEF (use))
3422 fprintf (dump_file, "Value numbering ");
3423 print_generic_expr (dump_file, use, 0);
3424 fprintf (dump_file, " stmt = ");
3425 print_gimple_stmt (dump_file, stmt, 0, 0);
3428 /* Handle uninitialized uses. */
3429 if (SSA_NAME_IS_DEFAULT_DEF (use))
3430 changed = set_ssa_val_to (use, use);
3431 else
3433 if (gimple_code (stmt) == GIMPLE_PHI)
3434 changed = visit_phi (stmt);
3435 else if (gimple_has_volatile_ops (stmt))
3436 changed = defs_to_varying (stmt);
3437 else if (is_gimple_assign (stmt))
3439 enum tree_code code = gimple_assign_rhs_code (stmt);
3440 tree lhs = gimple_assign_lhs (stmt);
3441 tree rhs1 = gimple_assign_rhs1 (stmt);
3442 tree simplified;
3444 /* Shortcut for copies. Simplifying copies is pointless,
3445 since we copy the expression and value they represent. */
3446 if (code == SSA_NAME
3447 && TREE_CODE (lhs) == SSA_NAME)
3449 changed = visit_copy (lhs, rhs1);
3450 goto done;
3452 simplified = try_to_simplify (stmt);
3453 if (simplified)
3455 if (dump_file && (dump_flags & TDF_DETAILS))
3457 fprintf (dump_file, "RHS ");
3458 print_gimple_expr (dump_file, stmt, 0, 0);
3459 fprintf (dump_file, " simplified to ");
3460 print_generic_expr (dump_file, simplified, 0);
3461 if (TREE_CODE (lhs) == SSA_NAME)
3462 fprintf (dump_file, " has constants %d\n",
3463 expr_has_constants (simplified));
3464 else
3465 fprintf (dump_file, "\n");
3468 /* Setting value numbers to constants will occasionally
3469 screw up phi congruence because constants are not
3470 uniquely associated with a single ssa name that can be
3471 looked up. */
3472 if (simplified
3473 && is_gimple_min_invariant (simplified)
3474 && TREE_CODE (lhs) == SSA_NAME)
3476 VN_INFO (lhs)->expr = simplified;
3477 VN_INFO (lhs)->has_constants = true;
3478 changed = set_ssa_val_to (lhs, simplified);
3479 goto done;
3481 else if (simplified
3482 && TREE_CODE (simplified) == SSA_NAME
3483 && TREE_CODE (lhs) == SSA_NAME)
3485 changed = visit_copy (lhs, simplified);
3486 goto done;
3488 else if (simplified)
3490 if (TREE_CODE (lhs) == SSA_NAME)
3492 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
3493 /* We have to unshare the expression or else
3494 valuizing may change the IL stream. */
3495 VN_INFO (lhs)->expr = unshare_expr (simplified);
3498 else if (stmt_has_constants (stmt)
3499 && TREE_CODE (lhs) == SSA_NAME)
3500 VN_INFO (lhs)->has_constants = true;
3501 else if (TREE_CODE (lhs) == SSA_NAME)
3503 /* We reset expr and constantness here because we may
3504 have been value numbering optimistically, and
3505 iterating. They may become non-constant in this case,
3506 even if they were optimistically constant. */
3508 VN_INFO (lhs)->has_constants = false;
3509 VN_INFO (lhs)->expr = NULL_TREE;
3512 if ((TREE_CODE (lhs) == SSA_NAME
3513 /* We can substitute SSA_NAMEs that are live over
3514 abnormal edges with their constant value. */
3515 && !(gimple_assign_copy_p (stmt)
3516 && is_gimple_min_invariant (rhs1))
3517 && !(simplified
3518 && is_gimple_min_invariant (simplified))
3519 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3520 /* Stores or copies from SSA_NAMEs that are live over
3521 abnormal edges are a problem. */
3522 || (code == SSA_NAME
3523 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3524 changed = defs_to_varying (stmt);
3525 else if (REFERENCE_CLASS_P (lhs)
3526 || DECL_P (lhs))
3527 changed = visit_reference_op_store (lhs, rhs1, stmt);
3528 else if (TREE_CODE (lhs) == SSA_NAME)
3530 if ((gimple_assign_copy_p (stmt)
3531 && is_gimple_min_invariant (rhs1))
3532 || (simplified
3533 && is_gimple_min_invariant (simplified)))
3535 VN_INFO (lhs)->has_constants = true;
3536 if (simplified)
3537 changed = set_ssa_val_to (lhs, simplified);
3538 else
3539 changed = set_ssa_val_to (lhs, rhs1);
3541 else
3543 /* First try to lookup the simplified expression. */
3544 if (simplified)
3546 enum gimple_rhs_class rhs_class;
3549 rhs_class = get_gimple_rhs_class (TREE_CODE (simplified));
3550 if ((rhs_class == GIMPLE_UNARY_RHS
3551 || rhs_class == GIMPLE_BINARY_RHS
3552 || rhs_class == GIMPLE_TERNARY_RHS)
3553 && valid_gimple_rhs_p (simplified))
3555 tree result = vn_nary_op_lookup (simplified, NULL);
3556 if (result)
3558 changed = set_ssa_val_to (lhs, result);
3559 goto done;
3564 /* Otherwise visit the original statement. */
3565 switch (vn_get_stmt_kind (stmt))
3567 case VN_NARY:
3568 changed = visit_nary_op (lhs, stmt);
3569 break;
3570 case VN_REFERENCE:
3571 changed = visit_reference_op_load (lhs, rhs1, stmt);
3572 break;
3573 default:
3574 changed = defs_to_varying (stmt);
3575 break;
3579 else
3580 changed = defs_to_varying (stmt);
3582 else if (is_gimple_call (stmt))
3584 tree lhs = gimple_call_lhs (stmt);
3585 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3587 /* Try constant folding based on our current lattice. */
3588 tree simplified = gimple_fold_stmt_to_constant_1 (stmt,
3589 vn_valueize);
3590 if (simplified)
3592 if (dump_file && (dump_flags & TDF_DETAILS))
3594 fprintf (dump_file, "call ");
3595 print_gimple_expr (dump_file, stmt, 0, 0);
3596 fprintf (dump_file, " simplified to ");
3597 print_generic_expr (dump_file, simplified, 0);
3598 if (TREE_CODE (lhs) == SSA_NAME)
3599 fprintf (dump_file, " has constants %d\n",
3600 expr_has_constants (simplified));
3601 else
3602 fprintf (dump_file, "\n");
3605 /* Setting value numbers to constants will occasionally
3606 screw up phi congruence because constants are not
3607 uniquely associated with a single ssa name that can be
3608 looked up. */
3609 if (simplified
3610 && is_gimple_min_invariant (simplified))
3612 VN_INFO (lhs)->expr = simplified;
3613 VN_INFO (lhs)->has_constants = true;
3614 changed = set_ssa_val_to (lhs, simplified);
3615 if (gimple_vdef (stmt))
3616 changed |= set_ssa_val_to (gimple_vdef (stmt),
3617 gimple_vuse (stmt));
3618 goto done;
3620 else if (simplified
3621 && TREE_CODE (simplified) == SSA_NAME)
3623 changed = visit_copy (lhs, simplified);
3624 if (gimple_vdef (stmt))
3625 changed |= set_ssa_val_to (gimple_vdef (stmt),
3626 gimple_vuse (stmt));
3627 goto done;
3629 else
3631 if (stmt_has_constants (stmt))
3632 VN_INFO (lhs)->has_constants = true;
3633 else
3635 /* We reset expr and constantness here because we may
3636 have been value numbering optimistically, and
3637 iterating. They may become non-constant in this case,
3638 even if they were optimistically constant. */
3639 VN_INFO (lhs)->has_constants = false;
3640 VN_INFO (lhs)->expr = NULL_TREE;
3643 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3645 changed = defs_to_varying (stmt);
3646 goto done;
3651 if (!gimple_call_internal_p (stmt)
3652 && (/* Calls to the same function with the same vuse
3653 and the same operands do not necessarily return the same
3654 value, unless they're pure or const. */
3655 gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST)
3656 /* If calls have a vdef, subsequent calls won't have
3657 the same incoming vuse. So, if 2 calls with vdef have the
3658 same vuse, we know they're not subsequent.
3659 We can value number 2 calls to the same function with the
3660 same vuse and the same operands which are not subsequent
3661 the same, because there is no code in the program that can
3662 compare the 2 values... */
3663 || (gimple_vdef (stmt)
3664 /* ... unless the call returns a pointer which does
3665 not alias with anything else. In which case the
3666 information that the values are distinct are encoded
3667 in the IL. */
3668 && !(gimple_call_return_flags (stmt) & ERF_NOALIAS))))
3669 changed = visit_reference_op_call (lhs, stmt);
3670 else
3671 changed = defs_to_varying (stmt);
3673 else
3674 changed = defs_to_varying (stmt);
3676 done:
3677 return changed;
3680 /* Compare two operands by reverse postorder index */
3682 static int
3683 compare_ops (const void *pa, const void *pb)
3685 const tree opa = *((const tree *)pa);
3686 const tree opb = *((const tree *)pb);
3687 gimple opstmta = SSA_NAME_DEF_STMT (opa);
3688 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
3689 basic_block bba;
3690 basic_block bbb;
3692 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3693 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3694 else if (gimple_nop_p (opstmta))
3695 return -1;
3696 else if (gimple_nop_p (opstmtb))
3697 return 1;
3699 bba = gimple_bb (opstmta);
3700 bbb = gimple_bb (opstmtb);
3702 if (!bba && !bbb)
3703 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3704 else if (!bba)
3705 return -1;
3706 else if (!bbb)
3707 return 1;
3709 if (bba == bbb)
3711 if (gimple_code (opstmta) == GIMPLE_PHI
3712 && gimple_code (opstmtb) == GIMPLE_PHI)
3713 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3714 else if (gimple_code (opstmta) == GIMPLE_PHI)
3715 return -1;
3716 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3717 return 1;
3718 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3719 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3720 else
3721 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3723 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3726 /* Sort an array containing members of a strongly connected component
3727 SCC so that the members are ordered by RPO number.
3728 This means that when the sort is complete, iterating through the
3729 array will give you the members in RPO order. */
3731 static void
3732 sort_scc (vec<tree> scc)
3734 scc.qsort (compare_ops);
3737 /* Insert the no longer used nary ONARY to the hash INFO. */
3739 static void
3740 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3742 size_t size = sizeof_vn_nary_op (onary->length);
3743 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3744 &info->nary_obstack);
3745 memcpy (nary, onary, size);
3746 vn_nary_op_insert_into (nary, info->nary, false);
3749 /* Insert the no longer used phi OPHI to the hash INFO. */
3751 static void
3752 copy_phi (vn_phi_t ophi, vn_tables_t info)
3754 vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
3755 vn_phi_s **slot;
3756 memcpy (phi, ophi, sizeof (*phi));
3757 ophi->phiargs.create (0);
3758 slot = info->phis.find_slot_with_hash (phi, phi->hashcode, INSERT);
3759 gcc_assert (!*slot);
3760 *slot = phi;
3763 /* Insert the no longer used reference OREF to the hash INFO. */
3765 static void
3766 copy_reference (vn_reference_t oref, vn_tables_t info)
3768 vn_reference_t ref;
3769 vn_reference_s **slot;
3770 ref = (vn_reference_t) pool_alloc (info->references_pool);
3771 memcpy (ref, oref, sizeof (*ref));
3772 oref->operands.create (0);
3773 slot = info->references.find_slot_with_hash (ref, ref->hashcode, INSERT);
3774 if (*slot)
3775 free_reference (*slot);
3776 *slot = ref;
3779 /* Process a strongly connected component in the SSA graph. */
3781 static void
3782 process_scc (vec<tree> scc)
3784 tree var;
3785 unsigned int i;
3786 unsigned int iterations = 0;
3787 bool changed = true;
3788 vn_nary_op_iterator_type hin;
3789 vn_phi_iterator_type hip;
3790 vn_reference_iterator_type hir;
3791 vn_nary_op_t nary;
3792 vn_phi_t phi;
3793 vn_reference_t ref;
3795 /* If the SCC has a single member, just visit it. */
3796 if (scc.length () == 1)
3798 tree use = scc[0];
3799 if (VN_INFO (use)->use_processed)
3800 return;
3801 /* We need to make sure it doesn't form a cycle itself, which can
3802 happen for self-referential PHI nodes. In that case we would
3803 end up inserting an expression with VN_TOP operands into the
3804 valid table which makes us derive bogus equivalences later.
3805 The cheapest way to check this is to assume it for all PHI nodes. */
3806 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3807 /* Fallthru to iteration. */ ;
3808 else
3810 visit_use (use);
3811 return;
3815 /* Iterate over the SCC with the optimistic table until it stops
3816 changing. */
3817 current_info = optimistic_info;
3818 while (changed)
3820 changed = false;
3821 iterations++;
3822 if (dump_file && (dump_flags & TDF_DETAILS))
3823 fprintf (dump_file, "Starting iteration %d\n", iterations);
3824 /* As we are value-numbering optimistically we have to
3825 clear the expression tables and the simplified expressions
3826 in each iteration until we converge. */
3827 optimistic_info->nary.empty ();
3828 optimistic_info->phis.empty ();
3829 optimistic_info->references.empty ();
3830 obstack_free (&optimistic_info->nary_obstack, NULL);
3831 gcc_obstack_init (&optimistic_info->nary_obstack);
3832 empty_alloc_pool (optimistic_info->phis_pool);
3833 empty_alloc_pool (optimistic_info->references_pool);
3834 FOR_EACH_VEC_ELT (scc, i, var)
3835 VN_INFO (var)->expr = NULL_TREE;
3836 FOR_EACH_VEC_ELT (scc, i, var)
3837 changed |= visit_use (var);
3840 statistics_histogram_event (cfun, "SCC iterations", iterations);
3842 /* Finally, copy the contents of the no longer used optimistic
3843 table to the valid table. */
3844 FOR_EACH_HASH_TABLE_ELEMENT (optimistic_info->nary, nary, vn_nary_op_t, hin)
3845 copy_nary (nary, valid_info);
3846 FOR_EACH_HASH_TABLE_ELEMENT (optimistic_info->phis, phi, vn_phi_t, hip)
3847 copy_phi (phi, valid_info);
3848 FOR_EACH_HASH_TABLE_ELEMENT (optimistic_info->references,
3849 ref, vn_reference_t, hir)
3850 copy_reference (ref, valid_info);
3852 current_info = valid_info;
3856 /* Pop the components of the found SCC for NAME off the SCC stack
3857 and process them. Returns true if all went well, false if
3858 we run into resource limits. */
3860 static bool
3861 extract_and_process_scc_for_name (tree name)
3863 auto_vec<tree> scc;
3864 tree x;
3866 /* Found an SCC, pop the components off the SCC stack and
3867 process them. */
3870 x = sccstack.pop ();
3872 VN_INFO (x)->on_sccstack = false;
3873 scc.safe_push (x);
3874 } while (x != name);
3876 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
3877 if (scc.length ()
3878 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
3880 if (dump_file)
3881 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
3882 "SCC size %u exceeding %u\n", scc.length (),
3883 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
3885 return false;
3888 if (scc.length () > 1)
3889 sort_scc (scc);
3891 if (dump_file && (dump_flags & TDF_DETAILS))
3892 print_scc (dump_file, scc);
3894 process_scc (scc);
3896 return true;
3899 /* Depth first search on NAME to discover and process SCC's in the SSA
3900 graph.
3901 Execution of this algorithm relies on the fact that the SCC's are
3902 popped off the stack in topological order.
3903 Returns true if successful, false if we stopped processing SCC's due
3904 to resource constraints. */
3906 static bool
3907 DFS (tree name)
3909 vec<ssa_op_iter> itervec = vNULL;
3910 vec<tree> namevec = vNULL;
3911 use_operand_p usep = NULL;
3912 gimple defstmt;
3913 tree use;
3914 ssa_op_iter iter;
3916 start_over:
3917 /* SCC info */
3918 VN_INFO (name)->dfsnum = next_dfs_num++;
3919 VN_INFO (name)->visited = true;
3920 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
3922 sccstack.safe_push (name);
3923 VN_INFO (name)->on_sccstack = true;
3924 defstmt = SSA_NAME_DEF_STMT (name);
3926 /* Recursively DFS on our operands, looking for SCC's. */
3927 if (!gimple_nop_p (defstmt))
3929 /* Push a new iterator. */
3930 if (gimple_code (defstmt) == GIMPLE_PHI)
3931 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
3932 else
3933 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
3935 else
3936 clear_and_done_ssa_iter (&iter);
3938 while (1)
3940 /* If we are done processing uses of a name, go up the stack
3941 of iterators and process SCCs as we found them. */
3942 if (op_iter_done (&iter))
3944 /* See if we found an SCC. */
3945 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
3946 if (!extract_and_process_scc_for_name (name))
3948 namevec.release ();
3949 itervec.release ();
3950 return false;
3953 /* Check if we are done. */
3954 if (namevec.is_empty ())
3956 namevec.release ();
3957 itervec.release ();
3958 return true;
3961 /* Restore the last use walker and continue walking there. */
3962 use = name;
3963 name = namevec.pop ();
3964 memcpy (&iter, &itervec.last (),
3965 sizeof (ssa_op_iter));
3966 itervec.pop ();
3967 goto continue_walking;
3970 use = USE_FROM_PTR (usep);
3972 /* Since we handle phi nodes, we will sometimes get
3973 invariants in the use expression. */
3974 if (TREE_CODE (use) == SSA_NAME)
3976 if (! (VN_INFO (use)->visited))
3978 /* Recurse by pushing the current use walking state on
3979 the stack and starting over. */
3980 itervec.safe_push (iter);
3981 namevec.safe_push (name);
3982 name = use;
3983 goto start_over;
3985 continue_walking:
3986 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3987 VN_INFO (use)->low);
3989 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3990 && VN_INFO (use)->on_sccstack)
3992 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3993 VN_INFO (name)->low);
3997 usep = op_iter_next_use (&iter);
4001 /* Allocate a value number table. */
4003 static void
4004 allocate_vn_table (vn_tables_t table)
4006 table->phis.create (23);
4007 table->nary.create (23);
4008 table->references.create (23);
4010 gcc_obstack_init (&table->nary_obstack);
4011 table->phis_pool = create_alloc_pool ("VN phis",
4012 sizeof (struct vn_phi_s),
4013 30);
4014 table->references_pool = create_alloc_pool ("VN references",
4015 sizeof (struct vn_reference_s),
4016 30);
4019 /* Free a value number table. */
4021 static void
4022 free_vn_table (vn_tables_t table)
4024 table->phis.dispose ();
4025 table->nary.dispose ();
4026 table->references.dispose ();
4027 obstack_free (&table->nary_obstack, NULL);
4028 free_alloc_pool (table->phis_pool);
4029 free_alloc_pool (table->references_pool);
4032 static void
4033 init_scc_vn (void)
4035 size_t i;
4036 int j;
4037 int *rpo_numbers_temp;
4039 calculate_dominance_info (CDI_DOMINATORS);
4040 sccstack.create (0);
4041 constant_to_value_id.create (23);
4043 constant_value_ids = BITMAP_ALLOC (NULL);
4045 next_dfs_num = 1;
4046 next_value_id = 1;
4048 vn_ssa_aux_table.create (num_ssa_names + 1);
4049 /* VEC_alloc doesn't actually grow it to the right size, it just
4050 preallocates the space to do so. */
4051 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
4052 gcc_obstack_init (&vn_ssa_aux_obstack);
4054 shared_lookup_phiargs.create (0);
4055 shared_lookup_references.create (0);
4056 rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
4057 rpo_numbers_temp =
4058 XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
4059 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4061 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4062 the i'th block in RPO order is bb. We want to map bb's to RPO
4063 numbers, so we need to rearrange this array. */
4064 for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
4065 rpo_numbers[rpo_numbers_temp[j]] = j;
4067 XDELETE (rpo_numbers_temp);
4069 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
4071 /* Create the VN_INFO structures, and initialize value numbers to
4072 TOP. */
4073 for (i = 0; i < num_ssa_names; i++)
4075 tree name = ssa_name (i);
4076 if (name)
4078 VN_INFO_GET (name)->valnum = VN_TOP;
4079 VN_INFO (name)->expr = NULL_TREE;
4080 VN_INFO (name)->value_id = 0;
4084 renumber_gimple_stmt_uids ();
4086 /* Create the valid and optimistic value numbering tables. */
4087 valid_info = XCNEW (struct vn_tables_s);
4088 allocate_vn_table (valid_info);
4089 optimistic_info = XCNEW (struct vn_tables_s);
4090 allocate_vn_table (optimistic_info);
4093 void
4094 free_scc_vn (void)
4096 size_t i;
4098 constant_to_value_id.dispose ();
4099 BITMAP_FREE (constant_value_ids);
4100 shared_lookup_phiargs.release ();
4101 shared_lookup_references.release ();
4102 XDELETEVEC (rpo_numbers);
4104 for (i = 0; i < num_ssa_names; i++)
4106 tree name = ssa_name (i);
4107 if (name
4108 && VN_INFO (name)->needs_insertion)
4109 release_ssa_name (name);
4111 obstack_free (&vn_ssa_aux_obstack, NULL);
4112 vn_ssa_aux_table.release ();
4114 sccstack.release ();
4115 free_vn_table (valid_info);
4116 XDELETE (valid_info);
4117 free_vn_table (optimistic_info);
4118 XDELETE (optimistic_info);
4121 /* Set *ID according to RESULT. */
4123 static void
4124 set_value_id_for_result (tree result, unsigned int *id)
4126 if (result && TREE_CODE (result) == SSA_NAME)
4127 *id = VN_INFO (result)->value_id;
4128 else if (result && is_gimple_min_invariant (result))
4129 *id = get_or_alloc_constant_value_id (result);
4130 else
4131 *id = get_next_value_id ();
4134 /* Set the value ids in the valid hash tables. */
4136 static void
4137 set_hashtable_value_ids (void)
4139 vn_nary_op_iterator_type hin;
4140 vn_phi_iterator_type hip;
4141 vn_reference_iterator_type hir;
4142 vn_nary_op_t vno;
4143 vn_reference_t vr;
4144 vn_phi_t vp;
4146 /* Now set the value ids of the things we had put in the hash
4147 table. */
4149 FOR_EACH_HASH_TABLE_ELEMENT (valid_info->nary, vno, vn_nary_op_t, hin)
4150 set_value_id_for_result (vno->result, &vno->value_id);
4152 FOR_EACH_HASH_TABLE_ELEMENT (valid_info->phis, vp, vn_phi_t, hip)
4153 set_value_id_for_result (vp->result, &vp->value_id);
4155 FOR_EACH_HASH_TABLE_ELEMENT (valid_info->references, vr, vn_reference_t, hir)
4156 set_value_id_for_result (vr->result, &vr->value_id);
4159 class cond_dom_walker : public dom_walker
4161 public:
4162 cond_dom_walker () : dom_walker (CDI_DOMINATORS), fail (false) {}
4164 virtual void before_dom_children (basic_block);
4166 bool fail;
4169 void
4170 cond_dom_walker::before_dom_children (basic_block bb)
4172 edge e;
4173 edge_iterator ei;
4175 if (fail)
4176 return;
4178 /* If any of the predecessor edges are still marked as possibly
4179 executable consider this block reachable. */
4180 bool reachable = bb == ENTRY_BLOCK_PTR_FOR_FN (cfun);
4181 FOR_EACH_EDGE (e, ei, bb->preds)
4182 reachable |= (e->flags & EDGE_EXECUTABLE);
4184 /* If the block is not reachable all outgoing edges are not
4185 executable. */
4186 if (!reachable)
4188 if (dump_file && (dump_flags & TDF_DETAILS))
4189 fprintf (dump_file, "Marking all outgoing edges of unreachable "
4190 "BB %d as not executable\n", bb->index);
4192 FOR_EACH_EDGE (e, ei, bb->succs)
4193 e->flags &= ~EDGE_EXECUTABLE;
4194 return;
4197 gimple stmt = last_stmt (bb);
4198 if (!stmt)
4199 return;
4201 /* Value-number the last stmts SSA uses. */
4202 ssa_op_iter i;
4203 tree op;
4204 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_USE)
4205 if (VN_INFO (op)->visited == false
4206 && !DFS (op))
4208 fail = true;
4209 return;
4212 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
4213 if value-numbering can prove they are not reachable. Handling
4214 computed gotos is also possible. */
4215 tree val;
4216 switch (gimple_code (stmt))
4218 case GIMPLE_COND:
4220 tree lhs = gimple_cond_lhs (stmt);
4221 tree rhs = gimple_cond_rhs (stmt);
4222 /* Work hard in computing the condition and take into account
4223 the valueization of the defining stmt. */
4224 if (TREE_CODE (lhs) == SSA_NAME)
4225 lhs = vn_get_expr_for (lhs);
4226 if (TREE_CODE (rhs) == SSA_NAME)
4227 rhs = vn_get_expr_for (rhs);
4228 val = fold_binary (gimple_cond_code (stmt),
4229 boolean_type_node, lhs, rhs);
4230 break;
4232 case GIMPLE_SWITCH:
4233 val = gimple_switch_index (stmt);
4234 break;
4235 case GIMPLE_GOTO:
4236 val = gimple_goto_dest (stmt);
4237 break;
4238 default:
4239 val = NULL_TREE;
4240 break;
4242 if (!val)
4243 return;
4245 edge taken = find_taken_edge (bb, vn_valueize (val));
4246 if (!taken)
4247 return;
4249 if (dump_file && (dump_flags & TDF_DETAILS))
4250 fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
4251 "not executable\n", bb->index, bb->index, taken->dest->index);
4253 FOR_EACH_EDGE (e, ei, bb->succs)
4254 if (e != taken)
4255 e->flags &= ~EDGE_EXECUTABLE;
4258 /* Do SCCVN. Returns true if it finished, false if we bailed out
4259 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
4260 how we use the alias oracle walking during the VN process. */
4262 bool
4263 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
4265 basic_block bb;
4266 size_t i;
4267 tree param;
4269 default_vn_walk_kind = default_vn_walk_kind_;
4271 init_scc_vn ();
4272 current_info = valid_info;
4274 for (param = DECL_ARGUMENTS (current_function_decl);
4275 param;
4276 param = DECL_CHAIN (param))
4278 tree def = ssa_default_def (cfun, param);
4279 if (def)
4280 VN_INFO (def)->valnum = def;
4283 /* Mark all edges as possibly executable. */
4284 FOR_ALL_BB_FN (bb, cfun)
4286 edge_iterator ei;
4287 edge e;
4288 FOR_EACH_EDGE (e, ei, bb->succs)
4289 e->flags |= EDGE_EXECUTABLE;
4292 /* Walk all blocks in dominator order, value-numbering the last stmts
4293 SSA uses and decide whether outgoing edges are not executable. */
4294 cond_dom_walker walker;
4295 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
4296 if (walker.fail)
4298 free_scc_vn ();
4299 return false;
4302 /* Value-number remaining SSA names. */
4303 for (i = 1; i < num_ssa_names; ++i)
4305 tree name = ssa_name (i);
4306 if (name
4307 && VN_INFO (name)->visited == false
4308 && !has_zero_uses (name))
4309 if (!DFS (name))
4311 free_scc_vn ();
4312 return false;
4316 /* Initialize the value ids. */
4318 for (i = 1; i < num_ssa_names; ++i)
4320 tree name = ssa_name (i);
4321 vn_ssa_aux_t info;
4322 if (!name)
4323 continue;
4324 info = VN_INFO (name);
4325 if (info->valnum == name
4326 || info->valnum == VN_TOP)
4327 info->value_id = get_next_value_id ();
4328 else if (is_gimple_min_invariant (info->valnum))
4329 info->value_id = get_or_alloc_constant_value_id (info->valnum);
4332 /* Propagate. */
4333 for (i = 1; i < num_ssa_names; ++i)
4335 tree name = ssa_name (i);
4336 vn_ssa_aux_t info;
4337 if (!name)
4338 continue;
4339 info = VN_INFO (name);
4340 if (TREE_CODE (info->valnum) == SSA_NAME
4341 && info->valnum != name
4342 && info->value_id != VN_INFO (info->valnum)->value_id)
4343 info->value_id = VN_INFO (info->valnum)->value_id;
4346 set_hashtable_value_ids ();
4348 if (dump_file && (dump_flags & TDF_DETAILS))
4350 fprintf (dump_file, "Value numbers:\n");
4351 for (i = 0; i < num_ssa_names; i++)
4353 tree name = ssa_name (i);
4354 if (name
4355 && VN_INFO (name)->visited
4356 && SSA_VAL (name) != name)
4358 print_generic_expr (dump_file, name, 0);
4359 fprintf (dump_file, " = ");
4360 print_generic_expr (dump_file, SSA_VAL (name), 0);
4361 fprintf (dump_file, "\n");
4366 return true;
4369 /* Return the maximum value id we have ever seen. */
4371 unsigned int
4372 get_max_value_id (void)
4374 return next_value_id;
4377 /* Return the next unique value id. */
4379 unsigned int
4380 get_next_value_id (void)
4382 return next_value_id++;
4386 /* Compare two expressions E1 and E2 and return true if they are equal. */
4388 bool
4389 expressions_equal_p (tree e1, tree e2)
4391 /* The obvious case. */
4392 if (e1 == e2)
4393 return true;
4395 /* If only one of them is null, they cannot be equal. */
4396 if (!e1 || !e2)
4397 return false;
4399 /* Now perform the actual comparison. */
4400 if (TREE_CODE (e1) == TREE_CODE (e2)
4401 && operand_equal_p (e1, e2, OEP_PURE_SAME))
4402 return true;
4404 return false;
4408 /* Return true if the nary operation NARY may trap. This is a copy
4409 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4411 bool
4412 vn_nary_may_trap (vn_nary_op_t nary)
4414 tree type;
4415 tree rhs2 = NULL_TREE;
4416 bool honor_nans = false;
4417 bool honor_snans = false;
4418 bool fp_operation = false;
4419 bool honor_trapv = false;
4420 bool handled, ret;
4421 unsigned i;
4423 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4424 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4425 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4427 type = nary->type;
4428 fp_operation = FLOAT_TYPE_P (type);
4429 if (fp_operation)
4431 honor_nans = flag_trapping_math && !flag_finite_math_only;
4432 honor_snans = flag_signaling_nans != 0;
4434 else if (INTEGRAL_TYPE_P (type)
4435 && TYPE_OVERFLOW_TRAPS (type))
4436 honor_trapv = true;
4438 if (nary->length >= 2)
4439 rhs2 = nary->op[1];
4440 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4441 honor_trapv,
4442 honor_nans, honor_snans, rhs2,
4443 &handled);
4444 if (handled
4445 && ret)
4446 return true;
4448 for (i = 0; i < nary->length; ++i)
4449 if (tree_could_trap_p (nary->op[i]))
4450 return true;
4452 return false;