AVX-512. Branch to hold overall changes introduced by 20140717 EAS.
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blob139ac3bbc72728b8c0aa75d9e9cdd080ba598f19
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2014 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "stor-layout.h"
27 #include "basic-block.h"
28 #include "gimple-pretty-print.h"
29 #include "tree-inline.h"
30 #include "hash-table.h"
31 #include "tree-ssa-alias.h"
32 #include "internal-fn.h"
33 #include "gimple-fold.h"
34 #include "tree-eh.h"
35 #include "gimple-expr.h"
36 #include "is-a.h"
37 #include "gimple.h"
38 #include "gimplify.h"
39 #include "gimple-ssa.h"
40 #include "tree-phinodes.h"
41 #include "ssa-iterators.h"
42 #include "stringpool.h"
43 #include "tree-ssanames.h"
44 #include "expr.h"
45 #include "tree-dfa.h"
46 #include "tree-ssa.h"
47 #include "dumpfile.h"
48 #include "alloc-pool.h"
49 #include "flags.h"
50 #include "cfgloop.h"
51 #include "params.h"
52 #include "tree-ssa-propagate.h"
53 #include "tree-ssa-sccvn.h"
54 #include "tree-cfg.h"
55 #include "domwalk.h"
57 /* This algorithm is based on the SCC algorithm presented by Keith
58 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
59 (http://citeseer.ist.psu.edu/41805.html). In
60 straight line code, it is equivalent to a regular hash based value
61 numbering that is performed in reverse postorder.
63 For code with cycles, there are two alternatives, both of which
64 require keeping the hashtables separate from the actual list of
65 value numbers for SSA names.
67 1. Iterate value numbering in an RPO walk of the blocks, removing
68 all the entries from the hashtable after each iteration (but
69 keeping the SSA name->value number mapping between iterations).
70 Iterate until it does not change.
72 2. Perform value numbering as part of an SCC walk on the SSA graph,
73 iterating only the cycles in the SSA graph until they do not change
74 (using a separate, optimistic hashtable for value numbering the SCC
75 operands).
77 The second is not just faster in practice (because most SSA graph
78 cycles do not involve all the variables in the graph), it also has
79 some nice properties.
81 One of these nice properties is that when we pop an SCC off the
82 stack, we are guaranteed to have processed all the operands coming from
83 *outside of that SCC*, so we do not need to do anything special to
84 ensure they have value numbers.
86 Another nice property is that the SCC walk is done as part of a DFS
87 of the SSA graph, which makes it easy to perform combining and
88 simplifying operations at the same time.
90 The code below is deliberately written in a way that makes it easy
91 to separate the SCC walk from the other work it does.
93 In order to propagate constants through the code, we track which
94 expressions contain constants, and use those while folding. In
95 theory, we could also track expressions whose value numbers are
96 replaced, in case we end up folding based on expression
97 identities.
99 In order to value number memory, we assign value numbers to vuses.
100 This enables us to note that, for example, stores to the same
101 address of the same value from the same starting memory states are
102 equivalent.
103 TODO:
105 1. We can iterate only the changing portions of the SCC's, but
106 I have not seen an SCC big enough for this to be a win.
107 2. If you differentiate between phi nodes for loops and phi nodes
108 for if-then-else, you can properly consider phi nodes in different
109 blocks for equivalence.
110 3. We could value number vuses in more cases, particularly, whole
111 structure copies.
115 /* vn_nary_op hashtable helpers. */
117 struct vn_nary_op_hasher : typed_noop_remove <vn_nary_op_s>
119 typedef vn_nary_op_s value_type;
120 typedef vn_nary_op_s compare_type;
121 static inline hashval_t hash (const value_type *);
122 static inline bool equal (const value_type *, const compare_type *);
125 /* Return the computed hashcode for nary operation P1. */
127 inline hashval_t
128 vn_nary_op_hasher::hash (const value_type *vno1)
130 return vno1->hashcode;
133 /* Compare nary operations P1 and P2 and return true if they are
134 equivalent. */
136 inline bool
137 vn_nary_op_hasher::equal (const value_type *vno1, const compare_type *vno2)
139 return vn_nary_op_eq (vno1, vno2);
142 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
143 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
146 /* vn_phi hashtable helpers. */
148 static int
149 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
151 struct vn_phi_hasher
153 typedef vn_phi_s value_type;
154 typedef vn_phi_s compare_type;
155 static inline hashval_t hash (const value_type *);
156 static inline bool equal (const value_type *, const compare_type *);
157 static inline void remove (value_type *);
160 /* Return the computed hashcode for phi operation P1. */
162 inline hashval_t
163 vn_phi_hasher::hash (const value_type *vp1)
165 return vp1->hashcode;
168 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
170 inline bool
171 vn_phi_hasher::equal (const value_type *vp1, const compare_type *vp2)
173 return vn_phi_eq (vp1, vp2);
176 /* Free a phi operation structure VP. */
178 inline void
179 vn_phi_hasher::remove (value_type *phi)
181 phi->phiargs.release ();
184 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
185 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
188 /* Compare two reference operands P1 and P2 for equality. Return true if
189 they are equal, and false otherwise. */
191 static int
192 vn_reference_op_eq (const void *p1, const void *p2)
194 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
195 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
197 return (vro1->opcode == vro2->opcode
198 /* We do not care for differences in type qualification. */
199 && (vro1->type == vro2->type
200 || (vro1->type && vro2->type
201 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
202 TYPE_MAIN_VARIANT (vro2->type))))
203 && expressions_equal_p (vro1->op0, vro2->op0)
204 && expressions_equal_p (vro1->op1, vro2->op1)
205 && expressions_equal_p (vro1->op2, vro2->op2));
208 /* Free a reference operation structure VP. */
210 static inline void
211 free_reference (vn_reference_s *vr)
213 vr->operands.release ();
217 /* vn_reference hashtable helpers. */
219 struct vn_reference_hasher
221 typedef vn_reference_s value_type;
222 typedef vn_reference_s compare_type;
223 static inline hashval_t hash (const value_type *);
224 static inline bool equal (const value_type *, const compare_type *);
225 static inline void remove (value_type *);
228 /* Return the hashcode for a given reference operation P1. */
230 inline hashval_t
231 vn_reference_hasher::hash (const value_type *vr1)
233 return vr1->hashcode;
236 inline bool
237 vn_reference_hasher::equal (const value_type *v, const compare_type *c)
239 return vn_reference_eq (v, c);
242 inline void
243 vn_reference_hasher::remove (value_type *v)
245 free_reference (v);
248 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
249 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
252 /* The set of hashtables and alloc_pool's for their items. */
254 typedef struct vn_tables_s
256 vn_nary_op_table_type *nary;
257 vn_phi_table_type *phis;
258 vn_reference_table_type *references;
259 struct obstack nary_obstack;
260 alloc_pool phis_pool;
261 alloc_pool references_pool;
262 } *vn_tables_t;
265 /* vn_constant hashtable helpers. */
267 struct vn_constant_hasher : typed_free_remove <vn_constant_s>
269 typedef vn_constant_s value_type;
270 typedef vn_constant_s compare_type;
271 static inline hashval_t hash (const value_type *);
272 static inline bool equal (const value_type *, const compare_type *);
275 /* Hash table hash function for vn_constant_t. */
277 inline hashval_t
278 vn_constant_hasher::hash (const value_type *vc1)
280 return vc1->hashcode;
283 /* Hash table equality function for vn_constant_t. */
285 inline bool
286 vn_constant_hasher::equal (const value_type *vc1, const compare_type *vc2)
288 if (vc1->hashcode != vc2->hashcode)
289 return false;
291 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
294 static hash_table<vn_constant_hasher> *constant_to_value_id;
295 static bitmap constant_value_ids;
298 /* Valid hashtables storing information we have proven to be
299 correct. */
301 static vn_tables_t valid_info;
303 /* Optimistic hashtables storing information we are making assumptions about
304 during iterations. */
306 static vn_tables_t optimistic_info;
308 /* Pointer to the set of hashtables that is currently being used.
309 Should always point to either the optimistic_info, or the
310 valid_info. */
312 static vn_tables_t current_info;
315 /* Reverse post order index for each basic block. */
317 static int *rpo_numbers;
319 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
321 /* Return the SSA value of the VUSE x, supporting released VDEFs
322 during elimination which will value-number the VDEF to the
323 associated VUSE (but not substitute in the whole lattice). */
325 static inline tree
326 vuse_ssa_val (tree x)
328 if (!x)
329 return NULL_TREE;
333 x = SSA_VAL (x);
335 while (SSA_NAME_IN_FREE_LIST (x));
337 return x;
340 /* This represents the top of the VN lattice, which is the universal
341 value. */
343 tree VN_TOP;
345 /* Unique counter for our value ids. */
347 static unsigned int next_value_id;
349 /* Next DFS number and the stack for strongly connected component
350 detection. */
352 static unsigned int next_dfs_num;
353 static vec<tree> sccstack;
357 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
358 are allocated on an obstack for locality reasons, and to free them
359 without looping over the vec. */
361 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
362 static struct obstack vn_ssa_aux_obstack;
364 /* Return the value numbering information for a given SSA name. */
366 vn_ssa_aux_t
367 VN_INFO (tree name)
369 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
370 gcc_checking_assert (res);
371 return res;
374 /* Set the value numbering info for a given SSA name to a given
375 value. */
377 static inline void
378 VN_INFO_SET (tree name, vn_ssa_aux_t value)
380 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
383 /* Initialize the value numbering info for a given SSA name.
384 This should be called just once for every SSA name. */
386 vn_ssa_aux_t
387 VN_INFO_GET (tree name)
389 vn_ssa_aux_t newinfo;
391 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
392 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
393 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
394 vn_ssa_aux_table.safe_grow (SSA_NAME_VERSION (name) + 1);
395 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
396 return newinfo;
400 /* Get the representative expression for the SSA_NAME NAME. Returns
401 the representative SSA_NAME if there is no expression associated with it. */
403 tree
404 vn_get_expr_for (tree name)
406 vn_ssa_aux_t vn = VN_INFO (name);
407 gimple def_stmt;
408 tree expr = NULL_TREE;
409 enum tree_code code;
411 if (vn->valnum == VN_TOP)
412 return name;
414 /* If the value-number is a constant it is the representative
415 expression. */
416 if (TREE_CODE (vn->valnum) != SSA_NAME)
417 return vn->valnum;
419 /* Get to the information of the value of this SSA_NAME. */
420 vn = VN_INFO (vn->valnum);
422 /* If the value-number is a constant it is the representative
423 expression. */
424 if (TREE_CODE (vn->valnum) != SSA_NAME)
425 return vn->valnum;
427 /* Else if we have an expression, return it. */
428 if (vn->expr != NULL_TREE)
429 return vn->expr;
431 /* Otherwise use the defining statement to build the expression. */
432 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
434 /* If the value number is not an assignment use it directly. */
435 if (!is_gimple_assign (def_stmt))
436 return vn->valnum;
438 /* Note that we can valueize here because we clear the cached
439 simplified expressions after each optimistic iteration. */
440 code = gimple_assign_rhs_code (def_stmt);
441 switch (TREE_CODE_CLASS (code))
443 case tcc_reference:
444 if ((code == REALPART_EXPR
445 || code == IMAGPART_EXPR
446 || code == VIEW_CONVERT_EXPR)
447 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (def_stmt),
448 0)) == SSA_NAME)
449 expr = fold_build1 (code,
450 gimple_expr_type (def_stmt),
451 vn_valueize (TREE_OPERAND
452 (gimple_assign_rhs1 (def_stmt), 0)));
453 break;
455 case tcc_unary:
456 expr = fold_build1 (code,
457 gimple_expr_type (def_stmt),
458 vn_valueize (gimple_assign_rhs1 (def_stmt)));
459 break;
461 case tcc_binary:
462 expr = fold_build2 (code,
463 gimple_expr_type (def_stmt),
464 vn_valueize (gimple_assign_rhs1 (def_stmt)),
465 vn_valueize (gimple_assign_rhs2 (def_stmt)));
466 break;
468 case tcc_exceptional:
469 if (code == CONSTRUCTOR
470 && TREE_CODE
471 (TREE_TYPE (gimple_assign_rhs1 (def_stmt))) == VECTOR_TYPE)
472 expr = gimple_assign_rhs1 (def_stmt);
473 break;
475 default:;
477 if (expr == NULL_TREE)
478 return vn->valnum;
480 /* Cache the expression. */
481 vn->expr = expr;
483 return expr;
486 /* Return the vn_kind the expression computed by the stmt should be
487 associated with. */
489 enum vn_kind
490 vn_get_stmt_kind (gimple stmt)
492 switch (gimple_code (stmt))
494 case GIMPLE_CALL:
495 return VN_REFERENCE;
496 case GIMPLE_PHI:
497 return VN_PHI;
498 case GIMPLE_ASSIGN:
500 enum tree_code code = gimple_assign_rhs_code (stmt);
501 tree rhs1 = gimple_assign_rhs1 (stmt);
502 switch (get_gimple_rhs_class (code))
504 case GIMPLE_UNARY_RHS:
505 case GIMPLE_BINARY_RHS:
506 case GIMPLE_TERNARY_RHS:
507 return VN_NARY;
508 case GIMPLE_SINGLE_RHS:
509 switch (TREE_CODE_CLASS (code))
511 case tcc_reference:
512 /* VOP-less references can go through unary case. */
513 if ((code == REALPART_EXPR
514 || code == IMAGPART_EXPR
515 || code == VIEW_CONVERT_EXPR
516 || code == BIT_FIELD_REF)
517 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
518 return VN_NARY;
520 /* Fallthrough. */
521 case tcc_declaration:
522 return VN_REFERENCE;
524 case tcc_constant:
525 return VN_CONSTANT;
527 default:
528 if (code == ADDR_EXPR)
529 return (is_gimple_min_invariant (rhs1)
530 ? VN_CONSTANT : VN_REFERENCE);
531 else if (code == CONSTRUCTOR)
532 return VN_NARY;
533 return VN_NONE;
535 default:
536 return VN_NONE;
539 default:
540 return VN_NONE;
544 /* Lookup a value id for CONSTANT and return it. If it does not
545 exist returns 0. */
547 unsigned int
548 get_constant_value_id (tree constant)
550 vn_constant_s **slot;
551 struct vn_constant_s vc;
553 vc.hashcode = vn_hash_constant_with_type (constant);
554 vc.constant = constant;
555 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
556 if (slot)
557 return (*slot)->value_id;
558 return 0;
561 /* Lookup a value id for CONSTANT, and if it does not exist, create a
562 new one and return it. If it does exist, return it. */
564 unsigned int
565 get_or_alloc_constant_value_id (tree constant)
567 vn_constant_s **slot;
568 struct vn_constant_s vc;
569 vn_constant_t vcp;
571 vc.hashcode = vn_hash_constant_with_type (constant);
572 vc.constant = constant;
573 slot = constant_to_value_id->find_slot (&vc, INSERT);
574 if (*slot)
575 return (*slot)->value_id;
577 vcp = XNEW (struct vn_constant_s);
578 vcp->hashcode = vc.hashcode;
579 vcp->constant = constant;
580 vcp->value_id = get_next_value_id ();
581 *slot = vcp;
582 bitmap_set_bit (constant_value_ids, vcp->value_id);
583 return vcp->value_id;
586 /* Return true if V is a value id for a constant. */
588 bool
589 value_id_constant_p (unsigned int v)
591 return bitmap_bit_p (constant_value_ids, v);
594 /* Compute the hash for a reference operand VRO1. */
596 static hashval_t
597 vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result)
599 result = iterative_hash_hashval_t (vro1->opcode, result);
600 if (vro1->op0)
601 result = iterative_hash_expr (vro1->op0, result);
602 if (vro1->op1)
603 result = iterative_hash_expr (vro1->op1, result);
604 if (vro1->op2)
605 result = iterative_hash_expr (vro1->op2, result);
606 return result;
609 /* Compute a hash for the reference operation VR1 and return it. */
611 hashval_t
612 vn_reference_compute_hash (const vn_reference_t vr1)
614 hashval_t result = 0;
615 int i;
616 vn_reference_op_t vro;
617 HOST_WIDE_INT off = -1;
618 bool deref = false;
620 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
622 if (vro->opcode == MEM_REF)
623 deref = true;
624 else if (vro->opcode != ADDR_EXPR)
625 deref = false;
626 if (vro->off != -1)
628 if (off == -1)
629 off = 0;
630 off += vro->off;
632 else
634 if (off != -1
635 && off != 0)
636 result = iterative_hash_hashval_t (off, result);
637 off = -1;
638 if (deref
639 && vro->opcode == ADDR_EXPR)
641 if (vro->op0)
643 tree op = TREE_OPERAND (vro->op0, 0);
644 result = iterative_hash_hashval_t (TREE_CODE (op), result);
645 result = iterative_hash_expr (op, result);
648 else
649 result = vn_reference_op_compute_hash (vro, result);
652 if (vr1->vuse)
653 result += SSA_NAME_VERSION (vr1->vuse);
655 return result;
658 /* Return true if reference operations VR1 and VR2 are equivalent. This
659 means they have the same set of operands and vuses. */
661 bool
662 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
664 unsigned i, j;
666 /* Early out if this is not a hash collision. */
667 if (vr1->hashcode != vr2->hashcode)
668 return false;
670 /* The VOP needs to be the same. */
671 if (vr1->vuse != vr2->vuse)
672 return false;
674 /* If the operands are the same we are done. */
675 if (vr1->operands == vr2->operands)
676 return true;
678 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
679 return false;
681 if (INTEGRAL_TYPE_P (vr1->type)
682 && INTEGRAL_TYPE_P (vr2->type))
684 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
685 return false;
687 else if (INTEGRAL_TYPE_P (vr1->type)
688 && (TYPE_PRECISION (vr1->type)
689 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
690 return false;
691 else if (INTEGRAL_TYPE_P (vr2->type)
692 && (TYPE_PRECISION (vr2->type)
693 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
694 return false;
696 i = 0;
697 j = 0;
700 HOST_WIDE_INT off1 = 0, off2 = 0;
701 vn_reference_op_t vro1, vro2;
702 vn_reference_op_s tem1, tem2;
703 bool deref1 = false, deref2 = false;
704 for (; vr1->operands.iterate (i, &vro1); i++)
706 if (vro1->opcode == MEM_REF)
707 deref1 = true;
708 if (vro1->off == -1)
709 break;
710 off1 += vro1->off;
712 for (; vr2->operands.iterate (j, &vro2); j++)
714 if (vro2->opcode == MEM_REF)
715 deref2 = true;
716 if (vro2->off == -1)
717 break;
718 off2 += vro2->off;
720 if (off1 != off2)
721 return false;
722 if (deref1 && vro1->opcode == ADDR_EXPR)
724 memset (&tem1, 0, sizeof (tem1));
725 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
726 tem1.type = TREE_TYPE (tem1.op0);
727 tem1.opcode = TREE_CODE (tem1.op0);
728 vro1 = &tem1;
729 deref1 = false;
731 if (deref2 && vro2->opcode == ADDR_EXPR)
733 memset (&tem2, 0, sizeof (tem2));
734 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
735 tem2.type = TREE_TYPE (tem2.op0);
736 tem2.opcode = TREE_CODE (tem2.op0);
737 vro2 = &tem2;
738 deref2 = false;
740 if (deref1 != deref2)
741 return false;
742 if (!vn_reference_op_eq (vro1, vro2))
743 return false;
744 ++j;
745 ++i;
747 while (vr1->operands.length () != i
748 || vr2->operands.length () != j);
750 return true;
753 /* Copy the operations present in load/store REF into RESULT, a vector of
754 vn_reference_op_s's. */
756 void
757 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
759 if (TREE_CODE (ref) == TARGET_MEM_REF)
761 vn_reference_op_s temp;
763 result->reserve (3);
765 memset (&temp, 0, sizeof (temp));
766 temp.type = TREE_TYPE (ref);
767 temp.opcode = TREE_CODE (ref);
768 temp.op0 = TMR_INDEX (ref);
769 temp.op1 = TMR_STEP (ref);
770 temp.op2 = TMR_OFFSET (ref);
771 temp.off = -1;
772 result->quick_push (temp);
774 memset (&temp, 0, sizeof (temp));
775 temp.type = NULL_TREE;
776 temp.opcode = ERROR_MARK;
777 temp.op0 = TMR_INDEX2 (ref);
778 temp.off = -1;
779 result->quick_push (temp);
781 memset (&temp, 0, sizeof (temp));
782 temp.type = NULL_TREE;
783 temp.opcode = TREE_CODE (TMR_BASE (ref));
784 temp.op0 = TMR_BASE (ref);
785 temp.off = -1;
786 result->quick_push (temp);
787 return;
790 /* For non-calls, store the information that makes up the address. */
791 tree orig = ref;
792 while (ref)
794 vn_reference_op_s temp;
796 memset (&temp, 0, sizeof (temp));
797 temp.type = TREE_TYPE (ref);
798 temp.opcode = TREE_CODE (ref);
799 temp.off = -1;
801 switch (temp.opcode)
803 case MODIFY_EXPR:
804 temp.op0 = TREE_OPERAND (ref, 1);
805 break;
806 case WITH_SIZE_EXPR:
807 temp.op0 = TREE_OPERAND (ref, 1);
808 temp.off = 0;
809 break;
810 case MEM_REF:
811 /* The base address gets its own vn_reference_op_s structure. */
812 temp.op0 = TREE_OPERAND (ref, 1);
813 if (tree_fits_shwi_p (TREE_OPERAND (ref, 1)))
814 temp.off = tree_to_shwi (TREE_OPERAND (ref, 1));
815 break;
816 case BIT_FIELD_REF:
817 /* Record bits and position. */
818 temp.op0 = TREE_OPERAND (ref, 1);
819 temp.op1 = TREE_OPERAND (ref, 2);
820 break;
821 case COMPONENT_REF:
822 /* The field decl is enough to unambiguously specify the field,
823 a matching type is not necessary and a mismatching type
824 is always a spurious difference. */
825 temp.type = NULL_TREE;
826 temp.op0 = TREE_OPERAND (ref, 1);
827 temp.op1 = TREE_OPERAND (ref, 2);
829 tree this_offset = component_ref_field_offset (ref);
830 if (this_offset
831 && TREE_CODE (this_offset) == INTEGER_CST)
833 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
834 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
836 offset_int off
837 = (wi::to_offset (this_offset)
838 + wi::lrshift (wi::to_offset (bit_offset),
839 LOG2_BITS_PER_UNIT));
840 if (wi::fits_shwi_p (off)
841 /* Probibit value-numbering zero offset components
842 of addresses the same before the pass folding
843 __builtin_object_size had a chance to run
844 (checking cfun->after_inlining does the
845 trick here). */
846 && (TREE_CODE (orig) != ADDR_EXPR
847 || off != 0
848 || cfun->after_inlining))
849 temp.off = off.to_shwi ();
853 break;
854 case ARRAY_RANGE_REF:
855 case ARRAY_REF:
856 /* Record index as operand. */
857 temp.op0 = TREE_OPERAND (ref, 1);
858 /* Always record lower bounds and element size. */
859 temp.op1 = array_ref_low_bound (ref);
860 temp.op2 = array_ref_element_size (ref);
861 if (TREE_CODE (temp.op0) == INTEGER_CST
862 && TREE_CODE (temp.op1) == INTEGER_CST
863 && TREE_CODE (temp.op2) == INTEGER_CST)
865 offset_int off = ((wi::to_offset (temp.op0)
866 - wi::to_offset (temp.op1))
867 * wi::to_offset (temp.op2));
868 if (wi::fits_shwi_p (off))
869 temp.off = off.to_shwi();
871 break;
872 case VAR_DECL:
873 if (DECL_HARD_REGISTER (ref))
875 temp.op0 = ref;
876 break;
878 /* Fallthru. */
879 case PARM_DECL:
880 case CONST_DECL:
881 case RESULT_DECL:
882 /* Canonicalize decls to MEM[&decl] which is what we end up with
883 when valueizing MEM[ptr] with ptr = &decl. */
884 temp.opcode = MEM_REF;
885 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
886 temp.off = 0;
887 result->safe_push (temp);
888 temp.opcode = ADDR_EXPR;
889 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
890 temp.type = TREE_TYPE (temp.op0);
891 temp.off = -1;
892 break;
893 case STRING_CST:
894 case INTEGER_CST:
895 case COMPLEX_CST:
896 case VECTOR_CST:
897 case REAL_CST:
898 case FIXED_CST:
899 case CONSTRUCTOR:
900 case SSA_NAME:
901 temp.op0 = ref;
902 break;
903 case ADDR_EXPR:
904 if (is_gimple_min_invariant (ref))
906 temp.op0 = ref;
907 break;
909 /* Fallthrough. */
910 /* These are only interesting for their operands, their
911 existence, and their type. They will never be the last
912 ref in the chain of references (IE they require an
913 operand), so we don't have to put anything
914 for op* as it will be handled by the iteration */
915 case REALPART_EXPR:
916 case VIEW_CONVERT_EXPR:
917 temp.off = 0;
918 break;
919 case IMAGPART_EXPR:
920 /* This is only interesting for its constant offset. */
921 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
922 break;
923 default:
924 gcc_unreachable ();
926 result->safe_push (temp);
928 if (REFERENCE_CLASS_P (ref)
929 || TREE_CODE (ref) == MODIFY_EXPR
930 || TREE_CODE (ref) == WITH_SIZE_EXPR
931 || (TREE_CODE (ref) == ADDR_EXPR
932 && !is_gimple_min_invariant (ref)))
933 ref = TREE_OPERAND (ref, 0);
934 else
935 ref = NULL_TREE;
939 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
940 operands in *OPS, the reference alias set SET and the reference type TYPE.
941 Return true if something useful was produced. */
943 bool
944 ao_ref_init_from_vn_reference (ao_ref *ref,
945 alias_set_type set, tree type,
946 vec<vn_reference_op_s> ops)
948 vn_reference_op_t op;
949 unsigned i;
950 tree base = NULL_TREE;
951 tree *op0_p = &base;
952 HOST_WIDE_INT offset = 0;
953 HOST_WIDE_INT max_size;
954 HOST_WIDE_INT size = -1;
955 tree size_tree = NULL_TREE;
956 alias_set_type base_alias_set = -1;
958 /* First get the final access size from just the outermost expression. */
959 op = &ops[0];
960 if (op->opcode == COMPONENT_REF)
961 size_tree = DECL_SIZE (op->op0);
962 else if (op->opcode == BIT_FIELD_REF)
963 size_tree = op->op0;
964 else
966 enum machine_mode mode = TYPE_MODE (type);
967 if (mode == BLKmode)
968 size_tree = TYPE_SIZE (type);
969 else
970 size = GET_MODE_BITSIZE (mode);
972 if (size_tree != NULL_TREE)
974 if (!tree_fits_uhwi_p (size_tree))
975 size = -1;
976 else
977 size = tree_to_uhwi (size_tree);
980 /* Initially, maxsize is the same as the accessed element size.
981 In the following it will only grow (or become -1). */
982 max_size = size;
984 /* Compute cumulative bit-offset for nested component-refs and array-refs,
985 and find the ultimate containing object. */
986 FOR_EACH_VEC_ELT (ops, i, op)
988 switch (op->opcode)
990 /* These may be in the reference ops, but we cannot do anything
991 sensible with them here. */
992 case ADDR_EXPR:
993 /* Apart from ADDR_EXPR arguments to MEM_REF. */
994 if (base != NULL_TREE
995 && TREE_CODE (base) == MEM_REF
996 && op->op0
997 && DECL_P (TREE_OPERAND (op->op0, 0)))
999 vn_reference_op_t pop = &ops[i-1];
1000 base = TREE_OPERAND (op->op0, 0);
1001 if (pop->off == -1)
1003 max_size = -1;
1004 offset = 0;
1006 else
1007 offset += pop->off * BITS_PER_UNIT;
1008 op0_p = NULL;
1009 break;
1011 /* Fallthru. */
1012 case CALL_EXPR:
1013 return false;
1015 /* Record the base objects. */
1016 case MEM_REF:
1017 base_alias_set = get_deref_alias_set (op->op0);
1018 *op0_p = build2 (MEM_REF, op->type,
1019 NULL_TREE, op->op0);
1020 op0_p = &TREE_OPERAND (*op0_p, 0);
1021 break;
1023 case VAR_DECL:
1024 case PARM_DECL:
1025 case RESULT_DECL:
1026 case SSA_NAME:
1027 *op0_p = op->op0;
1028 op0_p = NULL;
1029 break;
1031 /* And now the usual component-reference style ops. */
1032 case BIT_FIELD_REF:
1033 offset += tree_to_shwi (op->op1);
1034 break;
1036 case COMPONENT_REF:
1038 tree field = op->op0;
1039 /* We do not have a complete COMPONENT_REF tree here so we
1040 cannot use component_ref_field_offset. Do the interesting
1041 parts manually. */
1043 if (op->op1
1044 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (field)))
1045 max_size = -1;
1046 else
1048 offset += (tree_to_uhwi (DECL_FIELD_OFFSET (field))
1049 * BITS_PER_UNIT);
1050 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1052 break;
1055 case ARRAY_RANGE_REF:
1056 case ARRAY_REF:
1057 /* We recorded the lower bound and the element size. */
1058 if (!tree_fits_shwi_p (op->op0)
1059 || !tree_fits_shwi_p (op->op1)
1060 || !tree_fits_shwi_p (op->op2))
1061 max_size = -1;
1062 else
1064 HOST_WIDE_INT hindex = tree_to_shwi (op->op0);
1065 hindex -= tree_to_shwi (op->op1);
1066 hindex *= tree_to_shwi (op->op2);
1067 hindex *= BITS_PER_UNIT;
1068 offset += hindex;
1070 break;
1072 case REALPART_EXPR:
1073 break;
1075 case IMAGPART_EXPR:
1076 offset += size;
1077 break;
1079 case VIEW_CONVERT_EXPR:
1080 break;
1082 case STRING_CST:
1083 case INTEGER_CST:
1084 case COMPLEX_CST:
1085 case VECTOR_CST:
1086 case REAL_CST:
1087 case CONSTRUCTOR:
1088 case CONST_DECL:
1089 return false;
1091 default:
1092 return false;
1096 if (base == NULL_TREE)
1097 return false;
1099 ref->ref = NULL_TREE;
1100 ref->base = base;
1101 ref->offset = offset;
1102 ref->size = size;
1103 ref->max_size = max_size;
1104 ref->ref_alias_set = set;
1105 if (base_alias_set != -1)
1106 ref->base_alias_set = base_alias_set;
1107 else
1108 ref->base_alias_set = get_alias_set (base);
1109 /* We discount volatiles from value-numbering elsewhere. */
1110 ref->volatile_p = false;
1112 return true;
1115 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1116 vn_reference_op_s's. */
1118 void
1119 copy_reference_ops_from_call (gimple call,
1120 vec<vn_reference_op_s> *result)
1122 vn_reference_op_s temp;
1123 unsigned i;
1124 tree lhs = gimple_call_lhs (call);
1125 int lr;
1127 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1128 different. By adding the lhs here in the vector, we ensure that the
1129 hashcode is different, guaranteeing a different value number. */
1130 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1132 memset (&temp, 0, sizeof (temp));
1133 temp.opcode = MODIFY_EXPR;
1134 temp.type = TREE_TYPE (lhs);
1135 temp.op0 = lhs;
1136 temp.off = -1;
1137 result->safe_push (temp);
1140 /* Copy the type, opcode, function, static chain and EH region, if any. */
1141 memset (&temp, 0, sizeof (temp));
1142 temp.type = gimple_call_return_type (call);
1143 temp.opcode = CALL_EXPR;
1144 temp.op0 = gimple_call_fn (call);
1145 temp.op1 = gimple_call_chain (call);
1146 if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1147 temp.op2 = size_int (lr);
1148 temp.off = -1;
1149 result->safe_push (temp);
1151 /* Copy the call arguments. As they can be references as well,
1152 just chain them together. */
1153 for (i = 0; i < gimple_call_num_args (call); ++i)
1155 tree callarg = gimple_call_arg (call, i);
1156 copy_reference_ops_from_ref (callarg, result);
1160 /* Create a vector of vn_reference_op_s structures from CALL, a
1161 call statement. The vector is not shared. */
1163 static vec<vn_reference_op_s>
1164 create_reference_ops_from_call (gimple call)
1166 vec<vn_reference_op_s> result = vNULL;
1168 copy_reference_ops_from_call (call, &result);
1169 return result;
1172 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1173 *I_P to point to the last element of the replacement. */
1174 void
1175 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1176 unsigned int *i_p)
1178 unsigned int i = *i_p;
1179 vn_reference_op_t op = &(*ops)[i];
1180 vn_reference_op_t mem_op = &(*ops)[i - 1];
1181 tree addr_base;
1182 HOST_WIDE_INT addr_offset = 0;
1184 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1185 from .foo.bar to the preceding MEM_REF offset and replace the
1186 address with &OBJ. */
1187 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1188 &addr_offset);
1189 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1190 if (addr_base != TREE_OPERAND (op->op0, 0))
1192 offset_int off = offset_int::from (mem_op->op0, SIGNED);
1193 off += addr_offset;
1194 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1195 op->op0 = build_fold_addr_expr (addr_base);
1196 if (tree_fits_shwi_p (mem_op->op0))
1197 mem_op->off = tree_to_shwi (mem_op->op0);
1198 else
1199 mem_op->off = -1;
1203 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1204 *I_P to point to the last element of the replacement. */
1205 static void
1206 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1207 unsigned int *i_p)
1209 unsigned int i = *i_p;
1210 vn_reference_op_t op = &(*ops)[i];
1211 vn_reference_op_t mem_op = &(*ops)[i - 1];
1212 gimple def_stmt;
1213 enum tree_code code;
1214 offset_int off;
1216 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1217 if (!is_gimple_assign (def_stmt))
1218 return;
1220 code = gimple_assign_rhs_code (def_stmt);
1221 if (code != ADDR_EXPR
1222 && code != POINTER_PLUS_EXPR)
1223 return;
1225 off = offset_int::from (mem_op->op0, SIGNED);
1227 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1228 from .foo.bar to the preceding MEM_REF offset and replace the
1229 address with &OBJ. */
1230 if (code == ADDR_EXPR)
1232 tree addr, addr_base;
1233 HOST_WIDE_INT addr_offset;
1235 addr = gimple_assign_rhs1 (def_stmt);
1236 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1237 &addr_offset);
1238 if (!addr_base
1239 || TREE_CODE (addr_base) != MEM_REF)
1240 return;
1242 off += addr_offset;
1243 off += mem_ref_offset (addr_base);
1244 op->op0 = TREE_OPERAND (addr_base, 0);
1246 else
1248 tree ptr, ptroff;
1249 ptr = gimple_assign_rhs1 (def_stmt);
1250 ptroff = gimple_assign_rhs2 (def_stmt);
1251 if (TREE_CODE (ptr) != SSA_NAME
1252 || TREE_CODE (ptroff) != INTEGER_CST)
1253 return;
1255 off += wi::to_offset (ptroff);
1256 op->op0 = ptr;
1259 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1260 if (tree_fits_shwi_p (mem_op->op0))
1261 mem_op->off = tree_to_shwi (mem_op->op0);
1262 else
1263 mem_op->off = -1;
1264 if (TREE_CODE (op->op0) == SSA_NAME)
1265 op->op0 = SSA_VAL (op->op0);
1266 if (TREE_CODE (op->op0) != SSA_NAME)
1267 op->opcode = TREE_CODE (op->op0);
1269 /* And recurse. */
1270 if (TREE_CODE (op->op0) == SSA_NAME)
1271 vn_reference_maybe_forwprop_address (ops, i_p);
1272 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1273 vn_reference_fold_indirect (ops, i_p);
1276 /* Optimize the reference REF to a constant if possible or return
1277 NULL_TREE if not. */
1279 tree
1280 fully_constant_vn_reference_p (vn_reference_t ref)
1282 vec<vn_reference_op_s> operands = ref->operands;
1283 vn_reference_op_t op;
1285 /* Try to simplify the translated expression if it is
1286 a call to a builtin function with at most two arguments. */
1287 op = &operands[0];
1288 if (op->opcode == CALL_EXPR
1289 && TREE_CODE (op->op0) == ADDR_EXPR
1290 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1291 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1292 && operands.length () >= 2
1293 && operands.length () <= 3)
1295 vn_reference_op_t arg0, arg1 = NULL;
1296 bool anyconst = false;
1297 arg0 = &operands[1];
1298 if (operands.length () > 2)
1299 arg1 = &operands[2];
1300 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1301 || (arg0->opcode == ADDR_EXPR
1302 && is_gimple_min_invariant (arg0->op0)))
1303 anyconst = true;
1304 if (arg1
1305 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1306 || (arg1->opcode == ADDR_EXPR
1307 && is_gimple_min_invariant (arg1->op0))))
1308 anyconst = true;
1309 if (anyconst)
1311 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1312 arg1 ? 2 : 1,
1313 arg0->op0,
1314 arg1 ? arg1->op0 : NULL);
1315 if (folded
1316 && TREE_CODE (folded) == NOP_EXPR)
1317 folded = TREE_OPERAND (folded, 0);
1318 if (folded
1319 && is_gimple_min_invariant (folded))
1320 return folded;
1324 /* Simplify reads from constant strings. */
1325 else if (op->opcode == ARRAY_REF
1326 && TREE_CODE (op->op0) == INTEGER_CST
1327 && integer_zerop (op->op1)
1328 && operands.length () == 2)
1330 vn_reference_op_t arg0;
1331 arg0 = &operands[1];
1332 if (arg0->opcode == STRING_CST
1333 && (TYPE_MODE (op->type)
1334 == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
1335 && GET_MODE_CLASS (TYPE_MODE (op->type)) == MODE_INT
1336 && GET_MODE_SIZE (TYPE_MODE (op->type)) == 1
1337 && tree_int_cst_sgn (op->op0) >= 0
1338 && compare_tree_int (op->op0, TREE_STRING_LENGTH (arg0->op0)) < 0)
1339 return build_int_cst_type (op->type,
1340 (TREE_STRING_POINTER (arg0->op0)
1341 [TREE_INT_CST_LOW (op->op0)]));
1344 return NULL_TREE;
1347 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1348 structures into their value numbers. This is done in-place, and
1349 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1350 whether any operands were valueized. */
1352 static vec<vn_reference_op_s>
1353 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1355 vn_reference_op_t vro;
1356 unsigned int i;
1358 *valueized_anything = false;
1360 FOR_EACH_VEC_ELT (orig, i, vro)
1362 if (vro->opcode == SSA_NAME
1363 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1365 tree tem = SSA_VAL (vro->op0);
1366 if (tem != vro->op0)
1368 *valueized_anything = true;
1369 vro->op0 = tem;
1371 /* If it transforms from an SSA_NAME to a constant, update
1372 the opcode. */
1373 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1374 vro->opcode = TREE_CODE (vro->op0);
1376 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1378 tree tem = SSA_VAL (vro->op1);
1379 if (tem != vro->op1)
1381 *valueized_anything = true;
1382 vro->op1 = tem;
1385 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1387 tree tem = SSA_VAL (vro->op2);
1388 if (tem != vro->op2)
1390 *valueized_anything = true;
1391 vro->op2 = tem;
1394 /* If it transforms from an SSA_NAME to an address, fold with
1395 a preceding indirect reference. */
1396 if (i > 0
1397 && vro->op0
1398 && TREE_CODE (vro->op0) == ADDR_EXPR
1399 && orig[i - 1].opcode == MEM_REF)
1400 vn_reference_fold_indirect (&orig, &i);
1401 else if (i > 0
1402 && vro->opcode == SSA_NAME
1403 && orig[i - 1].opcode == MEM_REF)
1404 vn_reference_maybe_forwprop_address (&orig, &i);
1405 /* If it transforms a non-constant ARRAY_REF into a constant
1406 one, adjust the constant offset. */
1407 else if (vro->opcode == ARRAY_REF
1408 && vro->off == -1
1409 && TREE_CODE (vro->op0) == INTEGER_CST
1410 && TREE_CODE (vro->op1) == INTEGER_CST
1411 && TREE_CODE (vro->op2) == INTEGER_CST)
1413 offset_int off = ((wi::to_offset (vro->op0)
1414 - wi::to_offset (vro->op1))
1415 * wi::to_offset (vro->op2));
1416 if (wi::fits_shwi_p (off))
1417 vro->off = off.to_shwi ();
1421 return orig;
1424 static vec<vn_reference_op_s>
1425 valueize_refs (vec<vn_reference_op_s> orig)
1427 bool tem;
1428 return valueize_refs_1 (orig, &tem);
1431 static vec<vn_reference_op_s> shared_lookup_references;
1433 /* Create a vector of vn_reference_op_s structures from REF, a
1434 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1435 this function. *VALUEIZED_ANYTHING will specify whether any
1436 operands were valueized. */
1438 static vec<vn_reference_op_s>
1439 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1441 if (!ref)
1442 return vNULL;
1443 shared_lookup_references.truncate (0);
1444 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1445 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1446 valueized_anything);
1447 return shared_lookup_references;
1450 /* Create a vector of vn_reference_op_s structures from CALL, a
1451 call statement. The vector is shared among all callers of
1452 this function. */
1454 static vec<vn_reference_op_s>
1455 valueize_shared_reference_ops_from_call (gimple call)
1457 if (!call)
1458 return vNULL;
1459 shared_lookup_references.truncate (0);
1460 copy_reference_ops_from_call (call, &shared_lookup_references);
1461 shared_lookup_references = valueize_refs (shared_lookup_references);
1462 return shared_lookup_references;
1465 /* Lookup a SCCVN reference operation VR in the current hash table.
1466 Returns the resulting value number if it exists in the hash table,
1467 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1468 vn_reference_t stored in the hashtable if something is found. */
1470 static tree
1471 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1473 vn_reference_s **slot;
1474 hashval_t hash;
1476 hash = vr->hashcode;
1477 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1478 if (!slot && current_info == optimistic_info)
1479 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1480 if (slot)
1482 if (vnresult)
1483 *vnresult = (vn_reference_t)*slot;
1484 return ((vn_reference_t)*slot)->result;
1487 return NULL_TREE;
1490 static tree *last_vuse_ptr;
1491 static vn_lookup_kind vn_walk_kind;
1492 static vn_lookup_kind default_vn_walk_kind;
1494 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1495 with the current VUSE and performs the expression lookup. */
1497 static void *
1498 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1499 unsigned int cnt, void *vr_)
1501 vn_reference_t vr = (vn_reference_t)vr_;
1502 vn_reference_s **slot;
1503 hashval_t hash;
1505 /* This bounds the stmt walks we perform on reference lookups
1506 to O(1) instead of O(N) where N is the number of dominating
1507 stores. */
1508 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1509 return (void *)-1;
1511 if (last_vuse_ptr)
1512 *last_vuse_ptr = vuse;
1514 /* Fixup vuse and hash. */
1515 if (vr->vuse)
1516 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1517 vr->vuse = vuse_ssa_val (vuse);
1518 if (vr->vuse)
1519 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1521 hash = vr->hashcode;
1522 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1523 if (!slot && current_info == optimistic_info)
1524 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1525 if (slot)
1526 return *slot;
1528 return NULL;
1531 /* Lookup an existing or insert a new vn_reference entry into the
1532 value table for the VUSE, SET, TYPE, OPERANDS reference which
1533 has the value VALUE which is either a constant or an SSA name. */
1535 static vn_reference_t
1536 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1537 alias_set_type set,
1538 tree type,
1539 vec<vn_reference_op_s,
1540 va_heap> operands,
1541 tree value)
1543 struct vn_reference_s vr1;
1544 vn_reference_t result;
1545 unsigned value_id;
1546 vr1.vuse = vuse;
1547 vr1.operands = operands;
1548 vr1.type = type;
1549 vr1.set = set;
1550 vr1.hashcode = vn_reference_compute_hash (&vr1);
1551 if (vn_reference_lookup_1 (&vr1, &result))
1552 return result;
1553 if (TREE_CODE (value) == SSA_NAME)
1554 value_id = VN_INFO (value)->value_id;
1555 else
1556 value_id = get_or_alloc_constant_value_id (value);
1557 return vn_reference_insert_pieces (vuse, set, type,
1558 operands.copy (), value, value_id);
1561 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1562 from the statement defining VUSE and if not successful tries to
1563 translate *REFP and VR_ through an aggregate copy at the definition
1564 of VUSE. */
1566 static void *
1567 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1568 bool disambiguate_only)
1570 vn_reference_t vr = (vn_reference_t)vr_;
1571 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1572 tree base;
1573 HOST_WIDE_INT offset, maxsize;
1574 static vec<vn_reference_op_s>
1575 lhs_ops = vNULL;
1576 ao_ref lhs_ref;
1577 bool lhs_ref_ok = false;
1579 /* First try to disambiguate after value-replacing in the definitions LHS. */
1580 if (is_gimple_assign (def_stmt))
1582 vec<vn_reference_op_s> tem;
1583 tree lhs = gimple_assign_lhs (def_stmt);
1584 bool valueized_anything = false;
1585 /* Avoid re-allocation overhead. */
1586 lhs_ops.truncate (0);
1587 copy_reference_ops_from_ref (lhs, &lhs_ops);
1588 tem = lhs_ops;
1589 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1590 gcc_assert (lhs_ops == tem);
1591 if (valueized_anything)
1593 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1594 get_alias_set (lhs),
1595 TREE_TYPE (lhs), lhs_ops);
1596 if (lhs_ref_ok
1597 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1598 return NULL;
1600 else
1602 ao_ref_init (&lhs_ref, lhs);
1603 lhs_ref_ok = true;
1606 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1607 && gimple_call_num_args (def_stmt) <= 4)
1609 /* For builtin calls valueize its arguments and call the
1610 alias oracle again. Valueization may improve points-to
1611 info of pointers and constify size and position arguments.
1612 Originally this was motivated by PR61034 which has
1613 conditional calls to free falsely clobbering ref because
1614 of imprecise points-to info of the argument. */
1615 tree oldargs[4];
1616 bool valueized_anything = false;
1617 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1619 oldargs[i] = gimple_call_arg (def_stmt, i);
1620 if (TREE_CODE (oldargs[i]) == SSA_NAME
1621 && VN_INFO (oldargs[i])->valnum != oldargs[i])
1623 gimple_call_set_arg (def_stmt, i, VN_INFO (oldargs[i])->valnum);
1624 valueized_anything = true;
1627 if (valueized_anything)
1629 bool res = call_may_clobber_ref_p_1 (def_stmt, ref);
1630 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1631 gimple_call_set_arg (def_stmt, i, oldargs[i]);
1632 if (!res)
1633 return NULL;
1637 if (disambiguate_only)
1638 return (void *)-1;
1640 base = ao_ref_base (ref);
1641 offset = ref->offset;
1642 maxsize = ref->max_size;
1644 /* If we cannot constrain the size of the reference we cannot
1645 test if anything kills it. */
1646 if (maxsize == -1)
1647 return (void *)-1;
1649 /* We can't deduce anything useful from clobbers. */
1650 if (gimple_clobber_p (def_stmt))
1651 return (void *)-1;
1653 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1654 from that definition.
1655 1) Memset. */
1656 if (is_gimple_reg_type (vr->type)
1657 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1658 && integer_zerop (gimple_call_arg (def_stmt, 1))
1659 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2))
1660 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1662 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1663 tree base2;
1664 HOST_WIDE_INT offset2, size2, maxsize2;
1665 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1666 size2 = tree_to_uhwi (gimple_call_arg (def_stmt, 2)) * 8;
1667 if ((unsigned HOST_WIDE_INT)size2 / 8
1668 == tree_to_uhwi (gimple_call_arg (def_stmt, 2))
1669 && maxsize2 != -1
1670 && operand_equal_p (base, base2, 0)
1671 && offset2 <= offset
1672 && offset2 + size2 >= offset + maxsize)
1674 tree val = build_zero_cst (vr->type);
1675 return vn_reference_lookup_or_insert_for_pieces
1676 (vuse, vr->set, vr->type, vr->operands, val);
1680 /* 2) Assignment from an empty CONSTRUCTOR. */
1681 else if (is_gimple_reg_type (vr->type)
1682 && gimple_assign_single_p (def_stmt)
1683 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1684 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1686 tree base2;
1687 HOST_WIDE_INT offset2, size2, maxsize2;
1688 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1689 &offset2, &size2, &maxsize2);
1690 if (maxsize2 != -1
1691 && operand_equal_p (base, base2, 0)
1692 && offset2 <= offset
1693 && offset2 + size2 >= offset + maxsize)
1695 tree val = build_zero_cst (vr->type);
1696 return vn_reference_lookup_or_insert_for_pieces
1697 (vuse, vr->set, vr->type, vr->operands, val);
1701 /* 3) Assignment from a constant. We can use folds native encode/interpret
1702 routines to extract the assigned bits. */
1703 else if (vn_walk_kind == VN_WALKREWRITE
1704 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1705 && ref->size == maxsize
1706 && maxsize % BITS_PER_UNIT == 0
1707 && offset % BITS_PER_UNIT == 0
1708 && is_gimple_reg_type (vr->type)
1709 && gimple_assign_single_p (def_stmt)
1710 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
1712 tree base2;
1713 HOST_WIDE_INT offset2, size2, maxsize2;
1714 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1715 &offset2, &size2, &maxsize2);
1716 if (maxsize2 != -1
1717 && maxsize2 == size2
1718 && size2 % BITS_PER_UNIT == 0
1719 && offset2 % BITS_PER_UNIT == 0
1720 && operand_equal_p (base, base2, 0)
1721 && offset2 <= offset
1722 && offset2 + size2 >= offset + maxsize)
1724 /* We support up to 512-bit values (for V8DFmode). */
1725 unsigned char buffer[64];
1726 int len;
1728 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1729 buffer, sizeof (buffer));
1730 if (len > 0)
1732 tree val = native_interpret_expr (vr->type,
1733 buffer
1734 + ((offset - offset2)
1735 / BITS_PER_UNIT),
1736 ref->size / BITS_PER_UNIT);
1737 if (val)
1738 return vn_reference_lookup_or_insert_for_pieces
1739 (vuse, vr->set, vr->type, vr->operands, val);
1744 /* 4) Assignment from an SSA name which definition we may be able
1745 to access pieces from. */
1746 else if (ref->size == maxsize
1747 && is_gimple_reg_type (vr->type)
1748 && gimple_assign_single_p (def_stmt)
1749 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
1751 tree rhs1 = gimple_assign_rhs1 (def_stmt);
1752 gimple def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
1753 if (is_gimple_assign (def_stmt2)
1754 && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
1755 || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
1756 && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1))))
1758 tree base2;
1759 HOST_WIDE_INT offset2, size2, maxsize2, off;
1760 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1761 &offset2, &size2, &maxsize2);
1762 off = offset - offset2;
1763 if (maxsize2 != -1
1764 && maxsize2 == size2
1765 && operand_equal_p (base, base2, 0)
1766 && offset2 <= offset
1767 && offset2 + size2 >= offset + maxsize)
1769 tree val = NULL_TREE;
1770 HOST_WIDE_INT elsz
1771 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1))));
1772 if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR)
1774 if (off == 0)
1775 val = gimple_assign_rhs1 (def_stmt2);
1776 else if (off == elsz)
1777 val = gimple_assign_rhs2 (def_stmt2);
1779 else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR
1780 && off % elsz == 0)
1782 tree ctor = gimple_assign_rhs1 (def_stmt2);
1783 unsigned i = off / elsz;
1784 if (i < CONSTRUCTOR_NELTS (ctor))
1786 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
1787 if (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
1789 if (TREE_CODE (TREE_TYPE (elt->value))
1790 != VECTOR_TYPE)
1791 val = elt->value;
1795 if (val)
1796 return vn_reference_lookup_or_insert_for_pieces
1797 (vuse, vr->set, vr->type, vr->operands, val);
1802 /* 5) For aggregate copies translate the reference through them if
1803 the copy kills ref. */
1804 else if (vn_walk_kind == VN_WALKREWRITE
1805 && gimple_assign_single_p (def_stmt)
1806 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1807 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1808 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1810 tree base2;
1811 HOST_WIDE_INT offset2, size2, maxsize2;
1812 int i, j;
1813 auto_vec<vn_reference_op_s> rhs;
1814 vn_reference_op_t vro;
1815 ao_ref r;
1817 if (!lhs_ref_ok)
1818 return (void *)-1;
1820 /* See if the assignment kills REF. */
1821 base2 = ao_ref_base (&lhs_ref);
1822 offset2 = lhs_ref.offset;
1823 size2 = lhs_ref.size;
1824 maxsize2 = lhs_ref.max_size;
1825 if (maxsize2 == -1
1826 || (base != base2 && !operand_equal_p (base, base2, 0))
1827 || offset2 > offset
1828 || offset2 + size2 < offset + maxsize)
1829 return (void *)-1;
1831 /* Find the common base of ref and the lhs. lhs_ops already
1832 contains valueized operands for the lhs. */
1833 i = vr->operands.length () - 1;
1834 j = lhs_ops.length () - 1;
1835 while (j >= 0 && i >= 0
1836 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
1838 i--;
1839 j--;
1842 /* ??? The innermost op should always be a MEM_REF and we already
1843 checked that the assignment to the lhs kills vr. Thus for
1844 aggregate copies using char[] types the vn_reference_op_eq
1845 may fail when comparing types for compatibility. But we really
1846 don't care here - further lookups with the rewritten operands
1847 will simply fail if we messed up types too badly. */
1848 if (j == 0 && i >= 0
1849 && lhs_ops[0].opcode == MEM_REF
1850 && lhs_ops[0].off != -1
1851 && (lhs_ops[0].off == vr->operands[i].off))
1852 i--, j--;
1854 /* i now points to the first additional op.
1855 ??? LHS may not be completely contained in VR, one or more
1856 VIEW_CONVERT_EXPRs could be in its way. We could at least
1857 try handling outermost VIEW_CONVERT_EXPRs. */
1858 if (j != -1)
1859 return (void *)-1;
1861 /* Now re-write REF to be based on the rhs of the assignment. */
1862 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1863 /* We need to pre-pend vr->operands[0..i] to rhs. */
1864 if (i + 1 + rhs.length () > vr->operands.length ())
1866 vec<vn_reference_op_s> old = vr->operands;
1867 vr->operands.safe_grow (i + 1 + rhs.length ());
1868 if (old == shared_lookup_references
1869 && vr->operands != old)
1870 shared_lookup_references = vNULL;
1872 else
1873 vr->operands.truncate (i + 1 + rhs.length ());
1874 FOR_EACH_VEC_ELT (rhs, j, vro)
1875 vr->operands[i + 1 + j] = *vro;
1876 vr->operands = valueize_refs (vr->operands);
1877 vr->hashcode = vn_reference_compute_hash (vr);
1879 /* Adjust *ref from the new operands. */
1880 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1881 return (void *)-1;
1882 /* This can happen with bitfields. */
1883 if (ref->size != r.size)
1884 return (void *)-1;
1885 *ref = r;
1887 /* Do not update last seen VUSE after translating. */
1888 last_vuse_ptr = NULL;
1890 /* Keep looking for the adjusted *REF / VR pair. */
1891 return NULL;
1894 /* 6) For memcpy copies translate the reference through them if
1895 the copy kills ref. */
1896 else if (vn_walk_kind == VN_WALKREWRITE
1897 && is_gimple_reg_type (vr->type)
1898 /* ??? Handle BCOPY as well. */
1899 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
1900 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
1901 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
1902 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
1903 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
1904 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
1905 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
1906 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2)))
1908 tree lhs, rhs;
1909 ao_ref r;
1910 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
1911 vn_reference_op_s op;
1912 HOST_WIDE_INT at;
1915 /* Only handle non-variable, addressable refs. */
1916 if (ref->size != maxsize
1917 || offset % BITS_PER_UNIT != 0
1918 || ref->size % BITS_PER_UNIT != 0)
1919 return (void *)-1;
1921 /* Extract a pointer base and an offset for the destination. */
1922 lhs = gimple_call_arg (def_stmt, 0);
1923 lhs_offset = 0;
1924 if (TREE_CODE (lhs) == SSA_NAME)
1925 lhs = SSA_VAL (lhs);
1926 if (TREE_CODE (lhs) == ADDR_EXPR)
1928 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
1929 &lhs_offset);
1930 if (!tem)
1931 return (void *)-1;
1932 if (TREE_CODE (tem) == MEM_REF
1933 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
1935 lhs = TREE_OPERAND (tem, 0);
1936 lhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
1938 else if (DECL_P (tem))
1939 lhs = build_fold_addr_expr (tem);
1940 else
1941 return (void *)-1;
1943 if (TREE_CODE (lhs) != SSA_NAME
1944 && TREE_CODE (lhs) != ADDR_EXPR)
1945 return (void *)-1;
1947 /* Extract a pointer base and an offset for the source. */
1948 rhs = gimple_call_arg (def_stmt, 1);
1949 rhs_offset = 0;
1950 if (TREE_CODE (rhs) == SSA_NAME)
1951 rhs = SSA_VAL (rhs);
1952 if (TREE_CODE (rhs) == ADDR_EXPR)
1954 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
1955 &rhs_offset);
1956 if (!tem)
1957 return (void *)-1;
1958 if (TREE_CODE (tem) == MEM_REF
1959 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
1961 rhs = TREE_OPERAND (tem, 0);
1962 rhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
1964 else if (DECL_P (tem))
1965 rhs = build_fold_addr_expr (tem);
1966 else
1967 return (void *)-1;
1969 if (TREE_CODE (rhs) != SSA_NAME
1970 && TREE_CODE (rhs) != ADDR_EXPR)
1971 return (void *)-1;
1973 copy_size = tree_to_uhwi (gimple_call_arg (def_stmt, 2));
1975 /* The bases of the destination and the references have to agree. */
1976 if ((TREE_CODE (base) != MEM_REF
1977 && !DECL_P (base))
1978 || (TREE_CODE (base) == MEM_REF
1979 && (TREE_OPERAND (base, 0) != lhs
1980 || !tree_fits_uhwi_p (TREE_OPERAND (base, 1))))
1981 || (DECL_P (base)
1982 && (TREE_CODE (lhs) != ADDR_EXPR
1983 || TREE_OPERAND (lhs, 0) != base)))
1984 return (void *)-1;
1986 /* And the access has to be contained within the memcpy destination. */
1987 at = offset / BITS_PER_UNIT;
1988 if (TREE_CODE (base) == MEM_REF)
1989 at += tree_to_uhwi (TREE_OPERAND (base, 1));
1990 if (lhs_offset > at
1991 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
1992 return (void *)-1;
1994 /* Make room for 2 operands in the new reference. */
1995 if (vr->operands.length () < 2)
1997 vec<vn_reference_op_s> old = vr->operands;
1998 vr->operands.safe_grow_cleared (2);
1999 if (old == shared_lookup_references
2000 && vr->operands != old)
2001 shared_lookup_references.create (0);
2003 else
2004 vr->operands.truncate (2);
2006 /* The looked-through reference is a simple MEM_REF. */
2007 memset (&op, 0, sizeof (op));
2008 op.type = vr->type;
2009 op.opcode = MEM_REF;
2010 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
2011 op.off = at - lhs_offset + rhs_offset;
2012 vr->operands[0] = op;
2013 op.type = TREE_TYPE (rhs);
2014 op.opcode = TREE_CODE (rhs);
2015 op.op0 = rhs;
2016 op.off = -1;
2017 vr->operands[1] = op;
2018 vr->hashcode = vn_reference_compute_hash (vr);
2020 /* Adjust *ref from the new operands. */
2021 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2022 return (void *)-1;
2023 /* This can happen with bitfields. */
2024 if (ref->size != r.size)
2025 return (void *)-1;
2026 *ref = r;
2028 /* Do not update last seen VUSE after translating. */
2029 last_vuse_ptr = NULL;
2031 /* Keep looking for the adjusted *REF / VR pair. */
2032 return NULL;
2035 /* Bail out and stop walking. */
2036 return (void *)-1;
2039 /* Lookup a reference operation by it's parts, in the current hash table.
2040 Returns the resulting value number if it exists in the hash table,
2041 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2042 vn_reference_t stored in the hashtable if something is found. */
2044 tree
2045 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2046 vec<vn_reference_op_s> operands,
2047 vn_reference_t *vnresult, vn_lookup_kind kind)
2049 struct vn_reference_s vr1;
2050 vn_reference_t tmp;
2051 tree cst;
2053 if (!vnresult)
2054 vnresult = &tmp;
2055 *vnresult = NULL;
2057 vr1.vuse = vuse_ssa_val (vuse);
2058 shared_lookup_references.truncate (0);
2059 shared_lookup_references.safe_grow (operands.length ());
2060 memcpy (shared_lookup_references.address (),
2061 operands.address (),
2062 sizeof (vn_reference_op_s)
2063 * operands.length ());
2064 vr1.operands = operands = shared_lookup_references
2065 = valueize_refs (shared_lookup_references);
2066 vr1.type = type;
2067 vr1.set = set;
2068 vr1.hashcode = vn_reference_compute_hash (&vr1);
2069 if ((cst = fully_constant_vn_reference_p (&vr1)))
2070 return cst;
2072 vn_reference_lookup_1 (&vr1, vnresult);
2073 if (!*vnresult
2074 && kind != VN_NOWALK
2075 && vr1.vuse)
2077 ao_ref r;
2078 vn_walk_kind = kind;
2079 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2080 *vnresult =
2081 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2082 vn_reference_lookup_2,
2083 vn_reference_lookup_3, &vr1);
2084 if (vr1.operands != operands)
2085 vr1.operands.release ();
2088 if (*vnresult)
2089 return (*vnresult)->result;
2091 return NULL_TREE;
2094 /* Lookup OP in the current hash table, and return the resulting value
2095 number if it exists in the hash table. Return NULL_TREE if it does
2096 not exist in the hash table or if the result field of the structure
2097 was NULL.. VNRESULT will be filled in with the vn_reference_t
2098 stored in the hashtable if one exists. */
2100 tree
2101 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2102 vn_reference_t *vnresult)
2104 vec<vn_reference_op_s> operands;
2105 struct vn_reference_s vr1;
2106 tree cst;
2107 bool valuezied_anything;
2109 if (vnresult)
2110 *vnresult = NULL;
2112 vr1.vuse = vuse_ssa_val (vuse);
2113 vr1.operands = operands
2114 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2115 vr1.type = TREE_TYPE (op);
2116 vr1.set = get_alias_set (op);
2117 vr1.hashcode = vn_reference_compute_hash (&vr1);
2118 if ((cst = fully_constant_vn_reference_p (&vr1)))
2119 return cst;
2121 if (kind != VN_NOWALK
2122 && vr1.vuse)
2124 vn_reference_t wvnresult;
2125 ao_ref r;
2126 /* Make sure to use a valueized reference if we valueized anything.
2127 Otherwise preserve the full reference for advanced TBAA. */
2128 if (!valuezied_anything
2129 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2130 vr1.operands))
2131 ao_ref_init (&r, op);
2132 vn_walk_kind = kind;
2133 wvnresult =
2134 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2135 vn_reference_lookup_2,
2136 vn_reference_lookup_3, &vr1);
2137 if (vr1.operands != operands)
2138 vr1.operands.release ();
2139 if (wvnresult)
2141 if (vnresult)
2142 *vnresult = wvnresult;
2143 return wvnresult->result;
2146 return NULL_TREE;
2149 return vn_reference_lookup_1 (&vr1, vnresult);
2153 /* Insert OP into the current hash table with a value number of
2154 RESULT, and return the resulting reference structure we created. */
2156 vn_reference_t
2157 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2159 vn_reference_s **slot;
2160 vn_reference_t vr1;
2161 bool tem;
2163 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
2164 if (TREE_CODE (result) == SSA_NAME)
2165 vr1->value_id = VN_INFO (result)->value_id;
2166 else
2167 vr1->value_id = get_or_alloc_constant_value_id (result);
2168 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2169 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2170 vr1->type = TREE_TYPE (op);
2171 vr1->set = get_alias_set (op);
2172 vr1->hashcode = vn_reference_compute_hash (vr1);
2173 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2174 vr1->result_vdef = vdef;
2176 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2177 INSERT);
2179 /* Because we lookup stores using vuses, and value number failures
2180 using the vdefs (see visit_reference_op_store for how and why),
2181 it's possible that on failure we may try to insert an already
2182 inserted store. This is not wrong, there is no ssa name for a
2183 store that we could use as a differentiator anyway. Thus, unlike
2184 the other lookup functions, you cannot gcc_assert (!*slot)
2185 here. */
2187 /* But free the old slot in case of a collision. */
2188 if (*slot)
2189 free_reference (*slot);
2191 *slot = vr1;
2192 return vr1;
2195 /* Insert a reference by it's pieces into the current hash table with
2196 a value number of RESULT. Return the resulting reference
2197 structure we created. */
2199 vn_reference_t
2200 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2201 vec<vn_reference_op_s> operands,
2202 tree result, unsigned int value_id)
2205 vn_reference_s **slot;
2206 vn_reference_t vr1;
2208 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
2209 vr1->value_id = value_id;
2210 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2211 vr1->operands = valueize_refs (operands);
2212 vr1->type = type;
2213 vr1->set = set;
2214 vr1->hashcode = vn_reference_compute_hash (vr1);
2215 if (result && TREE_CODE (result) == SSA_NAME)
2216 result = SSA_VAL (result);
2217 vr1->result = result;
2219 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2220 INSERT);
2222 /* At this point we should have all the things inserted that we have
2223 seen before, and we should never try inserting something that
2224 already exists. */
2225 gcc_assert (!*slot);
2226 if (*slot)
2227 free_reference (*slot);
2229 *slot = vr1;
2230 return vr1;
2233 /* Compute and return the hash value for nary operation VBO1. */
2235 hashval_t
2236 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2238 hashval_t hash;
2239 unsigned i;
2241 for (i = 0; i < vno1->length; ++i)
2242 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2243 vno1->op[i] = SSA_VAL (vno1->op[i]);
2245 if (vno1->length == 2
2246 && commutative_tree_code (vno1->opcode)
2247 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2249 tree temp = vno1->op[0];
2250 vno1->op[0] = vno1->op[1];
2251 vno1->op[1] = temp;
2254 hash = iterative_hash_hashval_t (vno1->opcode, 0);
2255 for (i = 0; i < vno1->length; ++i)
2256 hash = iterative_hash_expr (vno1->op[i], hash);
2258 return hash;
2261 /* Compare nary operations VNO1 and VNO2 and return true if they are
2262 equivalent. */
2264 bool
2265 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2267 unsigned i;
2269 if (vno1->hashcode != vno2->hashcode)
2270 return false;
2272 if (vno1->length != vno2->length)
2273 return false;
2275 if (vno1->opcode != vno2->opcode
2276 || !types_compatible_p (vno1->type, vno2->type))
2277 return false;
2279 for (i = 0; i < vno1->length; ++i)
2280 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2281 return false;
2283 return true;
2286 /* Initialize VNO from the pieces provided. */
2288 static void
2289 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2290 enum tree_code code, tree type, tree *ops)
2292 vno->opcode = code;
2293 vno->length = length;
2294 vno->type = type;
2295 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2298 /* Initialize VNO from OP. */
2300 static void
2301 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2303 unsigned i;
2305 vno->opcode = TREE_CODE (op);
2306 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2307 vno->type = TREE_TYPE (op);
2308 for (i = 0; i < vno->length; ++i)
2309 vno->op[i] = TREE_OPERAND (op, i);
2312 /* Return the number of operands for a vn_nary ops structure from STMT. */
2314 static unsigned int
2315 vn_nary_length_from_stmt (gimple stmt)
2317 switch (gimple_assign_rhs_code (stmt))
2319 case REALPART_EXPR:
2320 case IMAGPART_EXPR:
2321 case VIEW_CONVERT_EXPR:
2322 return 1;
2324 case BIT_FIELD_REF:
2325 return 3;
2327 case CONSTRUCTOR:
2328 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2330 default:
2331 return gimple_num_ops (stmt) - 1;
2335 /* Initialize VNO from STMT. */
2337 static void
2338 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
2340 unsigned i;
2342 vno->opcode = gimple_assign_rhs_code (stmt);
2343 vno->type = gimple_expr_type (stmt);
2344 switch (vno->opcode)
2346 case REALPART_EXPR:
2347 case IMAGPART_EXPR:
2348 case VIEW_CONVERT_EXPR:
2349 vno->length = 1;
2350 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2351 break;
2353 case BIT_FIELD_REF:
2354 vno->length = 3;
2355 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2356 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2357 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2358 break;
2360 case CONSTRUCTOR:
2361 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2362 for (i = 0; i < vno->length; ++i)
2363 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2364 break;
2366 default:
2367 gcc_checking_assert (!gimple_assign_single_p (stmt));
2368 vno->length = gimple_num_ops (stmt) - 1;
2369 for (i = 0; i < vno->length; ++i)
2370 vno->op[i] = gimple_op (stmt, i + 1);
2374 /* Compute the hashcode for VNO and look for it in the hash table;
2375 return the resulting value number if it exists in the hash table.
2376 Return NULL_TREE if it does not exist in the hash table or if the
2377 result field of the operation is NULL. VNRESULT will contain the
2378 vn_nary_op_t from the hashtable if it exists. */
2380 static tree
2381 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2383 vn_nary_op_s **slot;
2385 if (vnresult)
2386 *vnresult = NULL;
2388 vno->hashcode = vn_nary_op_compute_hash (vno);
2389 slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2390 NO_INSERT);
2391 if (!slot && current_info == optimistic_info)
2392 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2393 NO_INSERT);
2394 if (!slot)
2395 return NULL_TREE;
2396 if (vnresult)
2397 *vnresult = *slot;
2398 return (*slot)->result;
2401 /* Lookup a n-ary operation by its pieces and return the resulting value
2402 number if it exists in the hash table. Return NULL_TREE if it does
2403 not exist in the hash table or if the result field of the operation
2404 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2405 if it exists. */
2407 tree
2408 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2409 tree type, tree *ops, vn_nary_op_t *vnresult)
2411 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2412 sizeof_vn_nary_op (length));
2413 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2414 return vn_nary_op_lookup_1 (vno1, vnresult);
2417 /* Lookup OP in the current hash table, and return the resulting value
2418 number if it exists in the hash table. Return NULL_TREE if it does
2419 not exist in the hash table or if the result field of the operation
2420 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2421 if it exists. */
2423 tree
2424 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2426 vn_nary_op_t vno1
2427 = XALLOCAVAR (struct vn_nary_op_s,
2428 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2429 init_vn_nary_op_from_op (vno1, op);
2430 return vn_nary_op_lookup_1 (vno1, vnresult);
2433 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2434 value number if it exists in the hash table. Return NULL_TREE if
2435 it does not exist in the hash table. VNRESULT will contain the
2436 vn_nary_op_t from the hashtable if it exists. */
2438 tree
2439 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
2441 vn_nary_op_t vno1
2442 = XALLOCAVAR (struct vn_nary_op_s,
2443 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2444 init_vn_nary_op_from_stmt (vno1, stmt);
2445 return vn_nary_op_lookup_1 (vno1, vnresult);
2448 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2450 static vn_nary_op_t
2451 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2453 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2456 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2457 obstack. */
2459 static vn_nary_op_t
2460 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2462 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2463 &current_info->nary_obstack);
2465 vno1->value_id = value_id;
2466 vno1->length = length;
2467 vno1->result = result;
2469 return vno1;
2472 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2473 VNO->HASHCODE first. */
2475 static vn_nary_op_t
2476 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
2477 bool compute_hash)
2479 vn_nary_op_s **slot;
2481 if (compute_hash)
2482 vno->hashcode = vn_nary_op_compute_hash (vno);
2484 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
2485 gcc_assert (!*slot);
2487 *slot = vno;
2488 return vno;
2491 /* Insert a n-ary operation into the current hash table using it's
2492 pieces. Return the vn_nary_op_t structure we created and put in
2493 the hashtable. */
2495 vn_nary_op_t
2496 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2497 tree type, tree *ops,
2498 tree result, unsigned int value_id)
2500 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2501 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2502 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2505 /* Insert OP into the current hash table with a value number of
2506 RESULT. Return the vn_nary_op_t structure we created and put in
2507 the hashtable. */
2509 vn_nary_op_t
2510 vn_nary_op_insert (tree op, tree result)
2512 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2513 vn_nary_op_t vno1;
2515 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2516 init_vn_nary_op_from_op (vno1, op);
2517 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2520 /* Insert the rhs of STMT into the current hash table with a value number of
2521 RESULT. */
2523 vn_nary_op_t
2524 vn_nary_op_insert_stmt (gimple stmt, tree result)
2526 vn_nary_op_t vno1
2527 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2528 result, VN_INFO (result)->value_id);
2529 init_vn_nary_op_from_stmt (vno1, stmt);
2530 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2533 /* Compute a hashcode for PHI operation VP1 and return it. */
2535 static inline hashval_t
2536 vn_phi_compute_hash (vn_phi_t vp1)
2538 hashval_t result;
2539 int i;
2540 tree phi1op;
2541 tree type;
2543 result = vp1->block->index;
2545 /* If all PHI arguments are constants we need to distinguish
2546 the PHI node via its type. */
2547 type = vp1->type;
2548 result += vn_hash_type (type);
2550 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2552 if (phi1op == VN_TOP)
2553 continue;
2554 result = iterative_hash_expr (phi1op, result);
2557 return result;
2560 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2562 static int
2563 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
2565 if (vp1->hashcode != vp2->hashcode)
2566 return false;
2568 if (vp1->block == vp2->block)
2570 int i;
2571 tree phi1op;
2573 /* If the PHI nodes do not have compatible types
2574 they are not the same. */
2575 if (!types_compatible_p (vp1->type, vp2->type))
2576 return false;
2578 /* Any phi in the same block will have it's arguments in the
2579 same edge order, because of how we store phi nodes. */
2580 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2582 tree phi2op = vp2->phiargs[i];
2583 if (phi1op == VN_TOP || phi2op == VN_TOP)
2584 continue;
2585 if (!expressions_equal_p (phi1op, phi2op))
2586 return false;
2588 return true;
2590 return false;
2593 static vec<tree> shared_lookup_phiargs;
2595 /* Lookup PHI in the current hash table, and return the resulting
2596 value number if it exists in the hash table. Return NULL_TREE if
2597 it does not exist in the hash table. */
2599 static tree
2600 vn_phi_lookup (gimple phi)
2602 vn_phi_s **slot;
2603 struct vn_phi_s vp1;
2604 unsigned i;
2606 shared_lookup_phiargs.truncate (0);
2608 /* Canonicalize the SSA_NAME's to their value number. */
2609 for (i = 0; i < gimple_phi_num_args (phi); i++)
2611 tree def = PHI_ARG_DEF (phi, i);
2612 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2613 shared_lookup_phiargs.safe_push (def);
2615 vp1.type = TREE_TYPE (gimple_phi_result (phi));
2616 vp1.phiargs = shared_lookup_phiargs;
2617 vp1.block = gimple_bb (phi);
2618 vp1.hashcode = vn_phi_compute_hash (&vp1);
2619 slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
2620 NO_INSERT);
2621 if (!slot && current_info == optimistic_info)
2622 slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
2623 NO_INSERT);
2624 if (!slot)
2625 return NULL_TREE;
2626 return (*slot)->result;
2629 /* Insert PHI into the current hash table with a value number of
2630 RESULT. */
2632 static vn_phi_t
2633 vn_phi_insert (gimple phi, tree result)
2635 vn_phi_s **slot;
2636 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
2637 unsigned i;
2638 vec<tree> args = vNULL;
2640 /* Canonicalize the SSA_NAME's to their value number. */
2641 for (i = 0; i < gimple_phi_num_args (phi); i++)
2643 tree def = PHI_ARG_DEF (phi, i);
2644 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2645 args.safe_push (def);
2647 vp1->value_id = VN_INFO (result)->value_id;
2648 vp1->type = TREE_TYPE (gimple_phi_result (phi));
2649 vp1->phiargs = args;
2650 vp1->block = gimple_bb (phi);
2651 vp1->result = result;
2652 vp1->hashcode = vn_phi_compute_hash (vp1);
2654 slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
2656 /* Because we iterate over phi operations more than once, it's
2657 possible the slot might already exist here, hence no assert.*/
2658 *slot = vp1;
2659 return vp1;
2663 /* Print set of components in strongly connected component SCC to OUT. */
2665 static void
2666 print_scc (FILE *out, vec<tree> scc)
2668 tree var;
2669 unsigned int i;
2671 fprintf (out, "SCC consists of:");
2672 FOR_EACH_VEC_ELT (scc, i, var)
2674 fprintf (out, " ");
2675 print_generic_expr (out, var, 0);
2677 fprintf (out, "\n");
2680 /* Set the value number of FROM to TO, return true if it has changed
2681 as a result. */
2683 static inline bool
2684 set_ssa_val_to (tree from, tree to)
2686 tree currval = SSA_VAL (from);
2687 HOST_WIDE_INT toff, coff;
2689 /* The only thing we allow as value numbers are ssa_names
2690 and invariants. So assert that here. We don't allow VN_TOP
2691 as visiting a stmt should produce a value-number other than
2692 that.
2693 ??? Still VN_TOP can happen for unreachable code, so force
2694 it to varying in that case. Not all code is prepared to
2695 get VN_TOP on valueization. */
2696 if (to == VN_TOP)
2698 if (dump_file && (dump_flags & TDF_DETAILS))
2699 fprintf (dump_file, "Forcing value number to varying on "
2700 "receiving VN_TOP\n");
2701 to = from;
2704 gcc_assert (to != NULL_TREE
2705 && (TREE_CODE (to) == SSA_NAME
2706 || is_gimple_min_invariant (to)));
2708 if (from != to)
2710 if (currval == from)
2712 if (dump_file && (dump_flags & TDF_DETAILS))
2714 fprintf (dump_file, "Not changing value number of ");
2715 print_generic_expr (dump_file, from, 0);
2716 fprintf (dump_file, " from VARYING to ");
2717 print_generic_expr (dump_file, to, 0);
2718 fprintf (dump_file, "\n");
2720 return false;
2722 else if (TREE_CODE (to) == SSA_NAME
2723 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2724 to = from;
2727 if (dump_file && (dump_flags & TDF_DETAILS))
2729 fprintf (dump_file, "Setting value number of ");
2730 print_generic_expr (dump_file, from, 0);
2731 fprintf (dump_file, " to ");
2732 print_generic_expr (dump_file, to, 0);
2735 if (currval != to
2736 && !operand_equal_p (currval, to, 0)
2737 /* ??? For addresses involving volatile objects or types operand_equal_p
2738 does not reliably detect ADDR_EXPRs as equal. We know we are only
2739 getting invariant gimple addresses here, so can use
2740 get_addr_base_and_unit_offset to do this comparison. */
2741 && !(TREE_CODE (currval) == ADDR_EXPR
2742 && TREE_CODE (to) == ADDR_EXPR
2743 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
2744 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
2745 && coff == toff))
2747 VN_INFO (from)->valnum = to;
2748 if (dump_file && (dump_flags & TDF_DETAILS))
2749 fprintf (dump_file, " (changed)\n");
2750 return true;
2752 if (dump_file && (dump_flags & TDF_DETAILS))
2753 fprintf (dump_file, "\n");
2754 return false;
2757 /* Mark as processed all the definitions in the defining stmt of USE, or
2758 the USE itself. */
2760 static void
2761 mark_use_processed (tree use)
2763 ssa_op_iter iter;
2764 def_operand_p defp;
2765 gimple stmt = SSA_NAME_DEF_STMT (use);
2767 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
2769 VN_INFO (use)->use_processed = true;
2770 return;
2773 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2775 tree def = DEF_FROM_PTR (defp);
2777 VN_INFO (def)->use_processed = true;
2781 /* Set all definitions in STMT to value number to themselves.
2782 Return true if a value number changed. */
2784 static bool
2785 defs_to_varying (gimple stmt)
2787 bool changed = false;
2788 ssa_op_iter iter;
2789 def_operand_p defp;
2791 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2793 tree def = DEF_FROM_PTR (defp);
2794 changed |= set_ssa_val_to (def, def);
2796 return changed;
2799 static bool expr_has_constants (tree expr);
2801 /* Visit a copy between LHS and RHS, return true if the value number
2802 changed. */
2804 static bool
2805 visit_copy (tree lhs, tree rhs)
2807 /* The copy may have a more interesting constant filled expression
2808 (we don't, since we know our RHS is just an SSA name). */
2809 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2810 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2812 /* And finally valueize. */
2813 rhs = SSA_VAL (rhs);
2815 return set_ssa_val_to (lhs, rhs);
2818 /* Visit a nary operator RHS, value number it, and return true if the
2819 value number of LHS has changed as a result. */
2821 static bool
2822 visit_nary_op (tree lhs, gimple stmt)
2824 bool changed = false;
2825 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2827 if (result)
2828 changed = set_ssa_val_to (lhs, result);
2829 else
2831 changed = set_ssa_val_to (lhs, lhs);
2832 vn_nary_op_insert_stmt (stmt, lhs);
2835 return changed;
2838 /* Visit a call STMT storing into LHS. Return true if the value number
2839 of the LHS has changed as a result. */
2841 static bool
2842 visit_reference_op_call (tree lhs, gimple stmt)
2844 bool changed = false;
2845 struct vn_reference_s vr1;
2846 vn_reference_t vnresult = NULL;
2847 tree vuse = gimple_vuse (stmt);
2848 tree vdef = gimple_vdef (stmt);
2850 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
2851 if (lhs && TREE_CODE (lhs) != SSA_NAME)
2852 lhs = NULL_TREE;
2854 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2855 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
2856 vr1.type = gimple_expr_type (stmt);
2857 vr1.set = 0;
2858 vr1.hashcode = vn_reference_compute_hash (&vr1);
2859 vn_reference_lookup_1 (&vr1, &vnresult);
2861 if (vnresult)
2863 if (vnresult->result_vdef && vdef)
2864 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
2866 if (!vnresult->result && lhs)
2867 vnresult->result = lhs;
2869 if (vnresult->result && lhs)
2871 changed |= set_ssa_val_to (lhs, vnresult->result);
2873 if (VN_INFO (vnresult->result)->has_constants)
2874 VN_INFO (lhs)->has_constants = true;
2877 else
2879 vn_reference_s **slot;
2880 vn_reference_t vr2;
2881 if (vdef)
2882 changed |= set_ssa_val_to (vdef, vdef);
2883 if (lhs)
2884 changed |= set_ssa_val_to (lhs, lhs);
2885 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
2886 vr2->vuse = vr1.vuse;
2887 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
2888 vr2->type = vr1.type;
2889 vr2->set = vr1.set;
2890 vr2->hashcode = vr1.hashcode;
2891 vr2->result = lhs;
2892 vr2->result_vdef = vdef;
2893 slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
2894 INSERT);
2895 if (*slot)
2896 free_reference (*slot);
2897 *slot = vr2;
2900 return changed;
2903 /* Visit a load from a reference operator RHS, part of STMT, value number it,
2904 and return true if the value number of the LHS has changed as a result. */
2906 static bool
2907 visit_reference_op_load (tree lhs, tree op, gimple stmt)
2909 bool changed = false;
2910 tree last_vuse;
2911 tree result;
2913 last_vuse = gimple_vuse (stmt);
2914 last_vuse_ptr = &last_vuse;
2915 result = vn_reference_lookup (op, gimple_vuse (stmt),
2916 default_vn_walk_kind, NULL);
2917 last_vuse_ptr = NULL;
2919 /* If we have a VCE, try looking up its operand as it might be stored in
2920 a different type. */
2921 if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
2922 result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
2923 default_vn_walk_kind, NULL);
2925 /* We handle type-punning through unions by value-numbering based
2926 on offset and size of the access. Be prepared to handle a
2927 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
2928 if (result
2929 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
2931 /* We will be setting the value number of lhs to the value number
2932 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2933 So first simplify and lookup this expression to see if it
2934 is already available. */
2935 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
2936 if ((CONVERT_EXPR_P (val)
2937 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
2938 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
2940 tree tem = vn_get_expr_for (TREE_OPERAND (val, 0));
2941 if ((CONVERT_EXPR_P (tem)
2942 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
2943 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
2944 TREE_TYPE (val), tem)))
2945 val = tem;
2947 result = val;
2948 if (!is_gimple_min_invariant (val)
2949 && TREE_CODE (val) != SSA_NAME)
2950 result = vn_nary_op_lookup (val, NULL);
2951 /* If the expression is not yet available, value-number lhs to
2952 a new SSA_NAME we create. */
2953 if (!result)
2955 result = make_temp_ssa_name (TREE_TYPE (lhs), gimple_build_nop (),
2956 "vntemp");
2957 /* Initialize value-number information properly. */
2958 VN_INFO_GET (result)->valnum = result;
2959 VN_INFO (result)->value_id = get_next_value_id ();
2960 VN_INFO (result)->expr = val;
2961 VN_INFO (result)->has_constants = expr_has_constants (val);
2962 VN_INFO (result)->needs_insertion = true;
2963 /* As all "inserted" statements are singleton SCCs, insert
2964 to the valid table. This is strictly needed to
2965 avoid re-generating new value SSA_NAMEs for the same
2966 expression during SCC iteration over and over (the
2967 optimistic table gets cleared after each iteration).
2968 We do not need to insert into the optimistic table, as
2969 lookups there will fall back to the valid table. */
2970 if (current_info == optimistic_info)
2972 current_info = valid_info;
2973 vn_nary_op_insert (val, result);
2974 current_info = optimistic_info;
2976 else
2977 vn_nary_op_insert (val, result);
2978 if (dump_file && (dump_flags & TDF_DETAILS))
2980 fprintf (dump_file, "Inserting name ");
2981 print_generic_expr (dump_file, result, 0);
2982 fprintf (dump_file, " for expression ");
2983 print_generic_expr (dump_file, val, 0);
2984 fprintf (dump_file, "\n");
2989 if (result)
2991 changed = set_ssa_val_to (lhs, result);
2992 if (TREE_CODE (result) == SSA_NAME
2993 && VN_INFO (result)->has_constants)
2995 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2996 VN_INFO (lhs)->has_constants = true;
2999 else
3001 changed = set_ssa_val_to (lhs, lhs);
3002 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
3005 return changed;
3009 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3010 and return true if the value number of the LHS has changed as a result. */
3012 static bool
3013 visit_reference_op_store (tree lhs, tree op, gimple stmt)
3015 bool changed = false;
3016 vn_reference_t vnresult = NULL;
3017 tree result, assign;
3018 bool resultsame = false;
3019 tree vuse = gimple_vuse (stmt);
3020 tree vdef = gimple_vdef (stmt);
3022 /* First we want to lookup using the *vuses* from the store and see
3023 if there the last store to this location with the same address
3024 had the same value.
3026 The vuses represent the memory state before the store. If the
3027 memory state, address, and value of the store is the same as the
3028 last store to this location, then this store will produce the
3029 same memory state as that store.
3031 In this case the vdef versions for this store are value numbered to those
3032 vuse versions, since they represent the same memory state after
3033 this store.
3035 Otherwise, the vdefs for the store are used when inserting into
3036 the table, since the store generates a new memory state. */
3038 result = vn_reference_lookup (lhs, vuse, VN_NOWALK, NULL);
3040 if (result)
3042 if (TREE_CODE (result) == SSA_NAME)
3043 result = SSA_VAL (result);
3044 if (TREE_CODE (op) == SSA_NAME)
3045 op = SSA_VAL (op);
3046 resultsame = expressions_equal_p (result, op);
3049 if (!result || !resultsame)
3051 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3052 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult);
3053 if (vnresult)
3055 VN_INFO (vdef)->use_processed = true;
3056 return set_ssa_val_to (vdef, vnresult->result_vdef);
3060 if (!result || !resultsame)
3062 if (dump_file && (dump_flags & TDF_DETAILS))
3064 fprintf (dump_file, "No store match\n");
3065 fprintf (dump_file, "Value numbering store ");
3066 print_generic_expr (dump_file, lhs, 0);
3067 fprintf (dump_file, " to ");
3068 print_generic_expr (dump_file, op, 0);
3069 fprintf (dump_file, "\n");
3071 /* Have to set value numbers before insert, since insert is
3072 going to valueize the references in-place. */
3073 if (vdef)
3075 changed |= set_ssa_val_to (vdef, vdef);
3078 /* Do not insert structure copies into the tables. */
3079 if (is_gimple_min_invariant (op)
3080 || is_gimple_reg (op))
3081 vn_reference_insert (lhs, op, vdef, NULL);
3083 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3084 vn_reference_insert (assign, lhs, vuse, vdef);
3086 else
3088 /* We had a match, so value number the vdef to have the value
3089 number of the vuse it came from. */
3091 if (dump_file && (dump_flags & TDF_DETAILS))
3092 fprintf (dump_file, "Store matched earlier value,"
3093 "value numbering store vdefs to matching vuses.\n");
3095 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3098 return changed;
3101 /* Visit and value number PHI, return true if the value number
3102 changed. */
3104 static bool
3105 visit_phi (gimple phi)
3107 bool changed = false;
3108 tree result;
3109 tree sameval = VN_TOP;
3110 bool allsame = true;
3112 /* TODO: We could check for this in init_sccvn, and replace this
3113 with a gcc_assert. */
3114 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3115 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3117 /* See if all non-TOP arguments have the same value. TOP is
3118 equivalent to everything, so we can ignore it. */
3119 edge_iterator ei;
3120 edge e;
3121 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3122 if (e->flags & EDGE_EXECUTABLE)
3124 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3126 if (TREE_CODE (def) == SSA_NAME)
3127 def = SSA_VAL (def);
3128 if (def == VN_TOP)
3129 continue;
3130 if (sameval == VN_TOP)
3132 sameval = def;
3134 else
3136 if (!expressions_equal_p (def, sameval))
3138 allsame = false;
3139 break;
3144 /* If all value numbered to the same value, the phi node has that
3145 value. */
3146 if (allsame)
3147 return set_ssa_val_to (PHI_RESULT (phi), sameval);
3149 /* Otherwise, see if it is equivalent to a phi node in this block. */
3150 result = vn_phi_lookup (phi);
3151 if (result)
3152 changed = set_ssa_val_to (PHI_RESULT (phi), result);
3153 else
3155 vn_phi_insert (phi, PHI_RESULT (phi));
3156 VN_INFO (PHI_RESULT (phi))->has_constants = false;
3157 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
3158 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3161 return changed;
3164 /* Return true if EXPR contains constants. */
3166 static bool
3167 expr_has_constants (tree expr)
3169 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
3171 case tcc_unary:
3172 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
3174 case tcc_binary:
3175 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
3176 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
3177 /* Constants inside reference ops are rarely interesting, but
3178 it can take a lot of looking to find them. */
3179 case tcc_reference:
3180 case tcc_declaration:
3181 return false;
3182 default:
3183 return is_gimple_min_invariant (expr);
3185 return false;
3188 /* Return true if STMT contains constants. */
3190 static bool
3191 stmt_has_constants (gimple stmt)
3193 tree tem;
3195 if (gimple_code (stmt) != GIMPLE_ASSIGN)
3196 return false;
3198 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
3200 case GIMPLE_TERNARY_RHS:
3201 tem = gimple_assign_rhs3 (stmt);
3202 if (TREE_CODE (tem) == SSA_NAME)
3203 tem = SSA_VAL (tem);
3204 if (is_gimple_min_invariant (tem))
3205 return true;
3206 /* Fallthru. */
3208 case GIMPLE_BINARY_RHS:
3209 tem = gimple_assign_rhs2 (stmt);
3210 if (TREE_CODE (tem) == SSA_NAME)
3211 tem = SSA_VAL (tem);
3212 if (is_gimple_min_invariant (tem))
3213 return true;
3214 /* Fallthru. */
3216 case GIMPLE_SINGLE_RHS:
3217 /* Constants inside reference ops are rarely interesting, but
3218 it can take a lot of looking to find them. */
3219 case GIMPLE_UNARY_RHS:
3220 tem = gimple_assign_rhs1 (stmt);
3221 if (TREE_CODE (tem) == SSA_NAME)
3222 tem = SSA_VAL (tem);
3223 return is_gimple_min_invariant (tem);
3225 default:
3226 gcc_unreachable ();
3228 return false;
3231 /* Simplify the binary expression RHS, and return the result if
3232 simplified. */
3234 static tree
3235 simplify_binary_expression (gimple stmt)
3237 tree result = NULL_TREE;
3238 tree op0 = gimple_assign_rhs1 (stmt);
3239 tree op1 = gimple_assign_rhs2 (stmt);
3240 enum tree_code code = gimple_assign_rhs_code (stmt);
3242 /* This will not catch every single case we could combine, but will
3243 catch those with constants. The goal here is to simultaneously
3244 combine constants between expressions, but avoid infinite
3245 expansion of expressions during simplification. */
3246 op0 = vn_valueize (op0);
3247 if (TREE_CODE (op0) == SSA_NAME
3248 && (VN_INFO (op0)->has_constants
3249 || TREE_CODE_CLASS (code) == tcc_comparison
3250 || code == COMPLEX_EXPR))
3251 op0 = vn_get_expr_for (op0);
3253 op1 = vn_valueize (op1);
3254 if (TREE_CODE (op1) == SSA_NAME
3255 && (VN_INFO (op1)->has_constants
3256 || code == COMPLEX_EXPR))
3257 op1 = vn_get_expr_for (op1);
3259 /* Pointer plus constant can be represented as invariant address.
3260 Do so to allow further propatation, see also tree forwprop. */
3261 if (code == POINTER_PLUS_EXPR
3262 && tree_fits_uhwi_p (op1)
3263 && TREE_CODE (op0) == ADDR_EXPR
3264 && is_gimple_min_invariant (op0))
3265 return build_invariant_address (TREE_TYPE (op0),
3266 TREE_OPERAND (op0, 0),
3267 tree_to_uhwi (op1));
3269 /* Avoid folding if nothing changed. */
3270 if (op0 == gimple_assign_rhs1 (stmt)
3271 && op1 == gimple_assign_rhs2 (stmt))
3272 return NULL_TREE;
3274 fold_defer_overflow_warnings ();
3276 result = fold_binary (code, gimple_expr_type (stmt), op0, op1);
3277 if (result)
3278 STRIP_USELESS_TYPE_CONVERSION (result);
3280 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
3281 stmt, 0);
3283 /* Make sure result is not a complex expression consisting
3284 of operators of operators (IE (a + b) + (a + c))
3285 Otherwise, we will end up with unbounded expressions if
3286 fold does anything at all. */
3287 if (result && valid_gimple_rhs_p (result))
3288 return result;
3290 return NULL_TREE;
3293 /* Simplify the unary expression RHS, and return the result if
3294 simplified. */
3296 static tree
3297 simplify_unary_expression (gimple stmt)
3299 tree result = NULL_TREE;
3300 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
3301 enum tree_code code = gimple_assign_rhs_code (stmt);
3303 /* We handle some tcc_reference codes here that are all
3304 GIMPLE_ASSIGN_SINGLE codes. */
3305 if (code == REALPART_EXPR
3306 || code == IMAGPART_EXPR
3307 || code == VIEW_CONVERT_EXPR
3308 || code == BIT_FIELD_REF)
3309 op0 = TREE_OPERAND (op0, 0);
3311 orig_op0 = op0;
3312 op0 = vn_valueize (op0);
3313 if (TREE_CODE (op0) == SSA_NAME)
3315 if (VN_INFO (op0)->has_constants)
3316 op0 = vn_get_expr_for (op0);
3317 else if (CONVERT_EXPR_CODE_P (code)
3318 || code == REALPART_EXPR
3319 || code == IMAGPART_EXPR
3320 || code == VIEW_CONVERT_EXPR
3321 || code == BIT_FIELD_REF)
3323 /* We want to do tree-combining on conversion-like expressions.
3324 Make sure we feed only SSA_NAMEs or constants to fold though. */
3325 tree tem = vn_get_expr_for (op0);
3326 if (UNARY_CLASS_P (tem)
3327 || BINARY_CLASS_P (tem)
3328 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
3329 || TREE_CODE (tem) == SSA_NAME
3330 || TREE_CODE (tem) == CONSTRUCTOR
3331 || is_gimple_min_invariant (tem))
3332 op0 = tem;
3336 /* Avoid folding if nothing changed, but remember the expression. */
3337 if (op0 == orig_op0)
3338 return NULL_TREE;
3340 if (code == BIT_FIELD_REF)
3342 tree rhs = gimple_assign_rhs1 (stmt);
3343 result = fold_ternary (BIT_FIELD_REF, TREE_TYPE (rhs),
3344 op0, TREE_OPERAND (rhs, 1), TREE_OPERAND (rhs, 2));
3346 else
3347 result = fold_unary_ignore_overflow (code, gimple_expr_type (stmt), op0);
3348 if (result)
3350 STRIP_USELESS_TYPE_CONVERSION (result);
3351 if (valid_gimple_rhs_p (result))
3352 return result;
3355 return NULL_TREE;
3358 /* Try to simplify RHS using equivalences and constant folding. */
3360 static tree
3361 try_to_simplify (gimple stmt)
3363 enum tree_code code = gimple_assign_rhs_code (stmt);
3364 tree tem;
3366 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3367 in this case, there is no point in doing extra work. */
3368 if (code == SSA_NAME)
3369 return NULL_TREE;
3371 /* First try constant folding based on our current lattice. */
3372 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize);
3373 if (tem
3374 && (TREE_CODE (tem) == SSA_NAME
3375 || is_gimple_min_invariant (tem)))
3376 return tem;
3378 /* If that didn't work try combining multiple statements. */
3379 switch (TREE_CODE_CLASS (code))
3381 case tcc_reference:
3382 /* Fallthrough for some unary codes that can operate on registers. */
3383 if (!(code == REALPART_EXPR
3384 || code == IMAGPART_EXPR
3385 || code == VIEW_CONVERT_EXPR
3386 || code == BIT_FIELD_REF))
3387 break;
3388 /* We could do a little more with unary ops, if they expand
3389 into binary ops, but it's debatable whether it is worth it. */
3390 case tcc_unary:
3391 return simplify_unary_expression (stmt);
3393 case tcc_comparison:
3394 case tcc_binary:
3395 return simplify_binary_expression (stmt);
3397 default:
3398 break;
3401 return NULL_TREE;
3404 /* Visit and value number USE, return true if the value number
3405 changed. */
3407 static bool
3408 visit_use (tree use)
3410 bool changed = false;
3411 gimple stmt = SSA_NAME_DEF_STMT (use);
3413 mark_use_processed (use);
3415 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3416 if (dump_file && (dump_flags & TDF_DETAILS)
3417 && !SSA_NAME_IS_DEFAULT_DEF (use))
3419 fprintf (dump_file, "Value numbering ");
3420 print_generic_expr (dump_file, use, 0);
3421 fprintf (dump_file, " stmt = ");
3422 print_gimple_stmt (dump_file, stmt, 0, 0);
3425 /* Handle uninitialized uses. */
3426 if (SSA_NAME_IS_DEFAULT_DEF (use))
3427 changed = set_ssa_val_to (use, use);
3428 else
3430 if (gimple_code (stmt) == GIMPLE_PHI)
3431 changed = visit_phi (stmt);
3432 else if (gimple_has_volatile_ops (stmt))
3433 changed = defs_to_varying (stmt);
3434 else if (is_gimple_assign (stmt))
3436 enum tree_code code = gimple_assign_rhs_code (stmt);
3437 tree lhs = gimple_assign_lhs (stmt);
3438 tree rhs1 = gimple_assign_rhs1 (stmt);
3439 tree simplified;
3441 /* Shortcut for copies. Simplifying copies is pointless,
3442 since we copy the expression and value they represent. */
3443 if (code == SSA_NAME
3444 && TREE_CODE (lhs) == SSA_NAME)
3446 changed = visit_copy (lhs, rhs1);
3447 goto done;
3449 simplified = try_to_simplify (stmt);
3450 if (simplified)
3452 if (dump_file && (dump_flags & TDF_DETAILS))
3454 fprintf (dump_file, "RHS ");
3455 print_gimple_expr (dump_file, stmt, 0, 0);
3456 fprintf (dump_file, " simplified to ");
3457 print_generic_expr (dump_file, simplified, 0);
3458 if (TREE_CODE (lhs) == SSA_NAME)
3459 fprintf (dump_file, " has constants %d\n",
3460 expr_has_constants (simplified));
3461 else
3462 fprintf (dump_file, "\n");
3465 /* Setting value numbers to constants will occasionally
3466 screw up phi congruence because constants are not
3467 uniquely associated with a single ssa name that can be
3468 looked up. */
3469 if (simplified
3470 && is_gimple_min_invariant (simplified)
3471 && TREE_CODE (lhs) == SSA_NAME)
3473 VN_INFO (lhs)->expr = simplified;
3474 VN_INFO (lhs)->has_constants = true;
3475 changed = set_ssa_val_to (lhs, simplified);
3476 goto done;
3478 else if (simplified
3479 && TREE_CODE (simplified) == SSA_NAME
3480 && TREE_CODE (lhs) == SSA_NAME)
3482 changed = visit_copy (lhs, simplified);
3483 goto done;
3485 else if (simplified)
3487 if (TREE_CODE (lhs) == SSA_NAME)
3489 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
3490 /* We have to unshare the expression or else
3491 valuizing may change the IL stream. */
3492 VN_INFO (lhs)->expr = unshare_expr (simplified);
3495 else if (stmt_has_constants (stmt)
3496 && TREE_CODE (lhs) == SSA_NAME)
3497 VN_INFO (lhs)->has_constants = true;
3498 else if (TREE_CODE (lhs) == SSA_NAME)
3500 /* We reset expr and constantness here because we may
3501 have been value numbering optimistically, and
3502 iterating. They may become non-constant in this case,
3503 even if they were optimistically constant. */
3505 VN_INFO (lhs)->has_constants = false;
3506 VN_INFO (lhs)->expr = NULL_TREE;
3509 if ((TREE_CODE (lhs) == SSA_NAME
3510 /* We can substitute SSA_NAMEs that are live over
3511 abnormal edges with their constant value. */
3512 && !(gimple_assign_copy_p (stmt)
3513 && is_gimple_min_invariant (rhs1))
3514 && !(simplified
3515 && is_gimple_min_invariant (simplified))
3516 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3517 /* Stores or copies from SSA_NAMEs that are live over
3518 abnormal edges are a problem. */
3519 || (code == SSA_NAME
3520 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3521 changed = defs_to_varying (stmt);
3522 else if (REFERENCE_CLASS_P (lhs)
3523 || DECL_P (lhs))
3524 changed = visit_reference_op_store (lhs, rhs1, stmt);
3525 else if (TREE_CODE (lhs) == SSA_NAME)
3527 if ((gimple_assign_copy_p (stmt)
3528 && is_gimple_min_invariant (rhs1))
3529 || (simplified
3530 && is_gimple_min_invariant (simplified)))
3532 VN_INFO (lhs)->has_constants = true;
3533 if (simplified)
3534 changed = set_ssa_val_to (lhs, simplified);
3535 else
3536 changed = set_ssa_val_to (lhs, rhs1);
3538 else
3540 /* First try to lookup the simplified expression. */
3541 if (simplified)
3543 enum gimple_rhs_class rhs_class;
3546 rhs_class = get_gimple_rhs_class (TREE_CODE (simplified));
3547 if ((rhs_class == GIMPLE_UNARY_RHS
3548 || rhs_class == GIMPLE_BINARY_RHS
3549 || rhs_class == GIMPLE_TERNARY_RHS)
3550 && valid_gimple_rhs_p (simplified))
3552 tree result = vn_nary_op_lookup (simplified, NULL);
3553 if (result)
3555 changed = set_ssa_val_to (lhs, result);
3556 goto done;
3561 /* Otherwise visit the original statement. */
3562 switch (vn_get_stmt_kind (stmt))
3564 case VN_NARY:
3565 changed = visit_nary_op (lhs, stmt);
3566 break;
3567 case VN_REFERENCE:
3568 changed = visit_reference_op_load (lhs, rhs1, stmt);
3569 break;
3570 default:
3571 changed = defs_to_varying (stmt);
3572 break;
3576 else
3577 changed = defs_to_varying (stmt);
3579 else if (is_gimple_call (stmt))
3581 tree lhs = gimple_call_lhs (stmt);
3582 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3584 /* Try constant folding based on our current lattice. */
3585 tree simplified = gimple_fold_stmt_to_constant_1 (stmt,
3586 vn_valueize);
3587 if (simplified)
3589 if (dump_file && (dump_flags & TDF_DETAILS))
3591 fprintf (dump_file, "call ");
3592 print_gimple_expr (dump_file, stmt, 0, 0);
3593 fprintf (dump_file, " simplified to ");
3594 print_generic_expr (dump_file, simplified, 0);
3595 if (TREE_CODE (lhs) == SSA_NAME)
3596 fprintf (dump_file, " has constants %d\n",
3597 expr_has_constants (simplified));
3598 else
3599 fprintf (dump_file, "\n");
3602 /* Setting value numbers to constants will occasionally
3603 screw up phi congruence because constants are not
3604 uniquely associated with a single ssa name that can be
3605 looked up. */
3606 if (simplified
3607 && is_gimple_min_invariant (simplified))
3609 VN_INFO (lhs)->expr = simplified;
3610 VN_INFO (lhs)->has_constants = true;
3611 changed = set_ssa_val_to (lhs, simplified);
3612 if (gimple_vdef (stmt))
3613 changed |= set_ssa_val_to (gimple_vdef (stmt),
3614 gimple_vuse (stmt));
3615 goto done;
3617 else if (simplified
3618 && TREE_CODE (simplified) == SSA_NAME)
3620 changed = visit_copy (lhs, simplified);
3621 if (gimple_vdef (stmt))
3622 changed |= set_ssa_val_to (gimple_vdef (stmt),
3623 gimple_vuse (stmt));
3624 goto done;
3626 else
3628 if (stmt_has_constants (stmt))
3629 VN_INFO (lhs)->has_constants = true;
3630 else
3632 /* We reset expr and constantness here because we may
3633 have been value numbering optimistically, and
3634 iterating. They may become non-constant in this case,
3635 even if they were optimistically constant. */
3636 VN_INFO (lhs)->has_constants = false;
3637 VN_INFO (lhs)->expr = NULL_TREE;
3640 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3642 changed = defs_to_varying (stmt);
3643 goto done;
3648 if (!gimple_call_internal_p (stmt)
3649 && (/* Calls to the same function with the same vuse
3650 and the same operands do not necessarily return the same
3651 value, unless they're pure or const. */
3652 gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST)
3653 /* If calls have a vdef, subsequent calls won't have
3654 the same incoming vuse. So, if 2 calls with vdef have the
3655 same vuse, we know they're not subsequent.
3656 We can value number 2 calls to the same function with the
3657 same vuse and the same operands which are not subsequent
3658 the same, because there is no code in the program that can
3659 compare the 2 values... */
3660 || (gimple_vdef (stmt)
3661 /* ... unless the call returns a pointer which does
3662 not alias with anything else. In which case the
3663 information that the values are distinct are encoded
3664 in the IL. */
3665 && !(gimple_call_return_flags (stmt) & ERF_NOALIAS))))
3666 changed = visit_reference_op_call (lhs, stmt);
3667 else
3668 changed = defs_to_varying (stmt);
3670 else
3671 changed = defs_to_varying (stmt);
3673 done:
3674 return changed;
3677 /* Compare two operands by reverse postorder index */
3679 static int
3680 compare_ops (const void *pa, const void *pb)
3682 const tree opa = *((const tree *)pa);
3683 const tree opb = *((const tree *)pb);
3684 gimple opstmta = SSA_NAME_DEF_STMT (opa);
3685 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
3686 basic_block bba;
3687 basic_block bbb;
3689 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3690 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3691 else if (gimple_nop_p (opstmta))
3692 return -1;
3693 else if (gimple_nop_p (opstmtb))
3694 return 1;
3696 bba = gimple_bb (opstmta);
3697 bbb = gimple_bb (opstmtb);
3699 if (!bba && !bbb)
3700 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3701 else if (!bba)
3702 return -1;
3703 else if (!bbb)
3704 return 1;
3706 if (bba == bbb)
3708 if (gimple_code (opstmta) == GIMPLE_PHI
3709 && gimple_code (opstmtb) == GIMPLE_PHI)
3710 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3711 else if (gimple_code (opstmta) == GIMPLE_PHI)
3712 return -1;
3713 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3714 return 1;
3715 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3716 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3717 else
3718 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3720 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3723 /* Sort an array containing members of a strongly connected component
3724 SCC so that the members are ordered by RPO number.
3725 This means that when the sort is complete, iterating through the
3726 array will give you the members in RPO order. */
3728 static void
3729 sort_scc (vec<tree> scc)
3731 scc.qsort (compare_ops);
3734 /* Insert the no longer used nary ONARY to the hash INFO. */
3736 static void
3737 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3739 size_t size = sizeof_vn_nary_op (onary->length);
3740 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3741 &info->nary_obstack);
3742 memcpy (nary, onary, size);
3743 vn_nary_op_insert_into (nary, info->nary, false);
3746 /* Insert the no longer used phi OPHI to the hash INFO. */
3748 static void
3749 copy_phi (vn_phi_t ophi, vn_tables_t info)
3751 vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
3752 vn_phi_s **slot;
3753 memcpy (phi, ophi, sizeof (*phi));
3754 ophi->phiargs.create (0);
3755 slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
3756 gcc_assert (!*slot);
3757 *slot = phi;
3760 /* Insert the no longer used reference OREF to the hash INFO. */
3762 static void
3763 copy_reference (vn_reference_t oref, vn_tables_t info)
3765 vn_reference_t ref;
3766 vn_reference_s **slot;
3767 ref = (vn_reference_t) pool_alloc (info->references_pool);
3768 memcpy (ref, oref, sizeof (*ref));
3769 oref->operands.create (0);
3770 slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
3771 if (*slot)
3772 free_reference (*slot);
3773 *slot = ref;
3776 /* Process a strongly connected component in the SSA graph. */
3778 static void
3779 process_scc (vec<tree> scc)
3781 tree var;
3782 unsigned int i;
3783 unsigned int iterations = 0;
3784 bool changed = true;
3785 vn_nary_op_iterator_type hin;
3786 vn_phi_iterator_type hip;
3787 vn_reference_iterator_type hir;
3788 vn_nary_op_t nary;
3789 vn_phi_t phi;
3790 vn_reference_t ref;
3792 /* If the SCC has a single member, just visit it. */
3793 if (scc.length () == 1)
3795 tree use = scc[0];
3796 if (VN_INFO (use)->use_processed)
3797 return;
3798 /* We need to make sure it doesn't form a cycle itself, which can
3799 happen for self-referential PHI nodes. In that case we would
3800 end up inserting an expression with VN_TOP operands into the
3801 valid table which makes us derive bogus equivalences later.
3802 The cheapest way to check this is to assume it for all PHI nodes. */
3803 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3804 /* Fallthru to iteration. */ ;
3805 else
3807 visit_use (use);
3808 return;
3812 if (dump_file && (dump_flags & TDF_DETAILS))
3813 print_scc (dump_file, scc);
3815 /* Iterate over the SCC with the optimistic table until it stops
3816 changing. */
3817 current_info = optimistic_info;
3818 while (changed)
3820 changed = false;
3821 iterations++;
3822 if (dump_file && (dump_flags & TDF_DETAILS))
3823 fprintf (dump_file, "Starting iteration %d\n", iterations);
3824 /* As we are value-numbering optimistically we have to
3825 clear the expression tables and the simplified expressions
3826 in each iteration until we converge. */
3827 optimistic_info->nary->empty ();
3828 optimistic_info->phis->empty ();
3829 optimistic_info->references->empty ();
3830 obstack_free (&optimistic_info->nary_obstack, NULL);
3831 gcc_obstack_init (&optimistic_info->nary_obstack);
3832 empty_alloc_pool (optimistic_info->phis_pool);
3833 empty_alloc_pool (optimistic_info->references_pool);
3834 FOR_EACH_VEC_ELT (scc, i, var)
3835 VN_INFO (var)->expr = NULL_TREE;
3836 FOR_EACH_VEC_ELT (scc, i, var)
3837 changed |= visit_use (var);
3840 if (dump_file && (dump_flags & TDF_DETAILS))
3841 fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
3842 statistics_histogram_event (cfun, "SCC iterations", iterations);
3844 /* Finally, copy the contents of the no longer used optimistic
3845 table to the valid table. */
3846 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
3847 copy_nary (nary, valid_info);
3848 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
3849 copy_phi (phi, valid_info);
3850 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
3851 ref, vn_reference_t, hir)
3852 copy_reference (ref, valid_info);
3854 current_info = valid_info;
3858 /* Pop the components of the found SCC for NAME off the SCC stack
3859 and process them. Returns true if all went well, false if
3860 we run into resource limits. */
3862 static bool
3863 extract_and_process_scc_for_name (tree name)
3865 auto_vec<tree> scc;
3866 tree x;
3868 /* Found an SCC, pop the components off the SCC stack and
3869 process them. */
3872 x = sccstack.pop ();
3874 VN_INFO (x)->on_sccstack = false;
3875 scc.safe_push (x);
3876 } while (x != name);
3878 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
3879 if (scc.length ()
3880 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
3882 if (dump_file)
3883 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
3884 "SCC size %u exceeding %u\n", scc.length (),
3885 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
3887 return false;
3890 if (scc.length () > 1)
3891 sort_scc (scc);
3893 process_scc (scc);
3895 return true;
3898 /* Depth first search on NAME to discover and process SCC's in the SSA
3899 graph.
3900 Execution of this algorithm relies on the fact that the SCC's are
3901 popped off the stack in topological order.
3902 Returns true if successful, false if we stopped processing SCC's due
3903 to resource constraints. */
3905 static bool
3906 DFS (tree name)
3908 vec<ssa_op_iter> itervec = vNULL;
3909 vec<tree> namevec = vNULL;
3910 use_operand_p usep = NULL;
3911 gimple defstmt;
3912 tree use;
3913 ssa_op_iter iter;
3915 start_over:
3916 /* SCC info */
3917 VN_INFO (name)->dfsnum = next_dfs_num++;
3918 VN_INFO (name)->visited = true;
3919 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
3921 sccstack.safe_push (name);
3922 VN_INFO (name)->on_sccstack = true;
3923 defstmt = SSA_NAME_DEF_STMT (name);
3925 /* Recursively DFS on our operands, looking for SCC's. */
3926 if (!gimple_nop_p (defstmt))
3928 /* Push a new iterator. */
3929 if (gimple_code (defstmt) == GIMPLE_PHI)
3930 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
3931 else
3932 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
3934 else
3935 clear_and_done_ssa_iter (&iter);
3937 while (1)
3939 /* If we are done processing uses of a name, go up the stack
3940 of iterators and process SCCs as we found them. */
3941 if (op_iter_done (&iter))
3943 /* See if we found an SCC. */
3944 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
3945 if (!extract_and_process_scc_for_name (name))
3947 namevec.release ();
3948 itervec.release ();
3949 return false;
3952 /* Check if we are done. */
3953 if (namevec.is_empty ())
3955 namevec.release ();
3956 itervec.release ();
3957 return true;
3960 /* Restore the last use walker and continue walking there. */
3961 use = name;
3962 name = namevec.pop ();
3963 memcpy (&iter, &itervec.last (),
3964 sizeof (ssa_op_iter));
3965 itervec.pop ();
3966 goto continue_walking;
3969 use = USE_FROM_PTR (usep);
3971 /* Since we handle phi nodes, we will sometimes get
3972 invariants in the use expression. */
3973 if (TREE_CODE (use) == SSA_NAME)
3975 if (! (VN_INFO (use)->visited))
3977 /* Recurse by pushing the current use walking state on
3978 the stack and starting over. */
3979 itervec.safe_push (iter);
3980 namevec.safe_push (name);
3981 name = use;
3982 goto start_over;
3984 continue_walking:
3985 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3986 VN_INFO (use)->low);
3988 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3989 && VN_INFO (use)->on_sccstack)
3991 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3992 VN_INFO (name)->low);
3996 usep = op_iter_next_use (&iter);
4000 /* Allocate a value number table. */
4002 static void
4003 allocate_vn_table (vn_tables_t table)
4005 table->phis = new vn_phi_table_type (23);
4006 table->nary = new vn_nary_op_table_type (23);
4007 table->references = new vn_reference_table_type (23);
4009 gcc_obstack_init (&table->nary_obstack);
4010 table->phis_pool = create_alloc_pool ("VN phis",
4011 sizeof (struct vn_phi_s),
4012 30);
4013 table->references_pool = create_alloc_pool ("VN references",
4014 sizeof (struct vn_reference_s),
4015 30);
4018 /* Free a value number table. */
4020 static void
4021 free_vn_table (vn_tables_t table)
4023 delete table->phis;
4024 table->phis = NULL;
4025 delete table->nary;
4026 table->nary = NULL;
4027 delete table->references;
4028 table->references = NULL;
4029 obstack_free (&table->nary_obstack, NULL);
4030 free_alloc_pool (table->phis_pool);
4031 free_alloc_pool (table->references_pool);
4034 static void
4035 init_scc_vn (void)
4037 size_t i;
4038 int j;
4039 int *rpo_numbers_temp;
4041 calculate_dominance_info (CDI_DOMINATORS);
4042 sccstack.create (0);
4043 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
4045 constant_value_ids = BITMAP_ALLOC (NULL);
4047 next_dfs_num = 1;
4048 next_value_id = 1;
4050 vn_ssa_aux_table.create (num_ssa_names + 1);
4051 /* VEC_alloc doesn't actually grow it to the right size, it just
4052 preallocates the space to do so. */
4053 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
4054 gcc_obstack_init (&vn_ssa_aux_obstack);
4056 shared_lookup_phiargs.create (0);
4057 shared_lookup_references.create (0);
4058 rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
4059 rpo_numbers_temp =
4060 XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
4061 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4063 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4064 the i'th block in RPO order is bb. We want to map bb's to RPO
4065 numbers, so we need to rearrange this array. */
4066 for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
4067 rpo_numbers[rpo_numbers_temp[j]] = j;
4069 XDELETE (rpo_numbers_temp);
4071 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
4073 /* Create the VN_INFO structures, and initialize value numbers to
4074 TOP. */
4075 for (i = 0; i < num_ssa_names; i++)
4077 tree name = ssa_name (i);
4078 if (name)
4080 VN_INFO_GET (name)->valnum = VN_TOP;
4081 VN_INFO (name)->expr = NULL_TREE;
4082 VN_INFO (name)->value_id = 0;
4086 renumber_gimple_stmt_uids ();
4088 /* Create the valid and optimistic value numbering tables. */
4089 valid_info = XCNEW (struct vn_tables_s);
4090 allocate_vn_table (valid_info);
4091 optimistic_info = XCNEW (struct vn_tables_s);
4092 allocate_vn_table (optimistic_info);
4095 void
4096 free_scc_vn (void)
4098 size_t i;
4100 delete constant_to_value_id;
4101 constant_to_value_id = NULL;
4102 BITMAP_FREE (constant_value_ids);
4103 shared_lookup_phiargs.release ();
4104 shared_lookup_references.release ();
4105 XDELETEVEC (rpo_numbers);
4107 for (i = 0; i < num_ssa_names; i++)
4109 tree name = ssa_name (i);
4110 if (name
4111 && VN_INFO (name)->needs_insertion)
4112 release_ssa_name (name);
4114 obstack_free (&vn_ssa_aux_obstack, NULL);
4115 vn_ssa_aux_table.release ();
4117 sccstack.release ();
4118 free_vn_table (valid_info);
4119 XDELETE (valid_info);
4120 free_vn_table (optimistic_info);
4121 XDELETE (optimistic_info);
4124 /* Set *ID according to RESULT. */
4126 static void
4127 set_value_id_for_result (tree result, unsigned int *id)
4129 if (result && TREE_CODE (result) == SSA_NAME)
4130 *id = VN_INFO (result)->value_id;
4131 else if (result && is_gimple_min_invariant (result))
4132 *id = get_or_alloc_constant_value_id (result);
4133 else
4134 *id = get_next_value_id ();
4137 /* Set the value ids in the valid hash tables. */
4139 static void
4140 set_hashtable_value_ids (void)
4142 vn_nary_op_iterator_type hin;
4143 vn_phi_iterator_type hip;
4144 vn_reference_iterator_type hir;
4145 vn_nary_op_t vno;
4146 vn_reference_t vr;
4147 vn_phi_t vp;
4149 /* Now set the value ids of the things we had put in the hash
4150 table. */
4152 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4153 set_value_id_for_result (vno->result, &vno->value_id);
4155 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4156 set_value_id_for_result (vp->result, &vp->value_id);
4158 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4159 hir)
4160 set_value_id_for_result (vr->result, &vr->value_id);
4163 class cond_dom_walker : public dom_walker
4165 public:
4166 cond_dom_walker () : dom_walker (CDI_DOMINATORS), fail (false) {}
4168 virtual void before_dom_children (basic_block);
4170 bool fail;
4173 void
4174 cond_dom_walker::before_dom_children (basic_block bb)
4176 edge e;
4177 edge_iterator ei;
4179 if (fail)
4180 return;
4182 /* If any of the predecessor edges that do not come from blocks dominated
4183 by us are still marked as possibly executable consider this block
4184 reachable. */
4185 bool reachable = bb == ENTRY_BLOCK_PTR_FOR_FN (cfun);
4186 FOR_EACH_EDGE (e, ei, bb->preds)
4187 if (!dominated_by_p (CDI_DOMINATORS, e->src, bb))
4188 reachable |= (e->flags & EDGE_EXECUTABLE);
4190 /* If the block is not reachable all outgoing edges are not
4191 executable. */
4192 if (!reachable)
4194 if (dump_file && (dump_flags & TDF_DETAILS))
4195 fprintf (dump_file, "Marking all outgoing edges of unreachable "
4196 "BB %d as not executable\n", bb->index);
4198 FOR_EACH_EDGE (e, ei, bb->succs)
4199 e->flags &= ~EDGE_EXECUTABLE;
4200 return;
4203 gimple stmt = last_stmt (bb);
4204 if (!stmt)
4205 return;
4207 enum gimple_code code = gimple_code (stmt);
4208 if (code != GIMPLE_COND
4209 && code != GIMPLE_SWITCH
4210 && code != GIMPLE_GOTO)
4211 return;
4213 if (dump_file && (dump_flags & TDF_DETAILS))
4215 fprintf (dump_file, "Value-numbering operands of stmt ending BB %d: ",
4216 bb->index);
4217 print_gimple_stmt (dump_file, stmt, 0, 0);
4220 /* Value-number the last stmts SSA uses. */
4221 ssa_op_iter i;
4222 tree op;
4223 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_USE)
4224 if (VN_INFO (op)->visited == false
4225 && !DFS (op))
4227 fail = true;
4228 return;
4231 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
4232 if value-numbering can prove they are not reachable. Handling
4233 computed gotos is also possible. */
4234 tree val;
4235 switch (code)
4237 case GIMPLE_COND:
4239 tree lhs = gimple_cond_lhs (stmt);
4240 tree rhs = gimple_cond_rhs (stmt);
4241 /* Work hard in computing the condition and take into account
4242 the valueization of the defining stmt. */
4243 if (TREE_CODE (lhs) == SSA_NAME)
4244 lhs = vn_get_expr_for (lhs);
4245 if (TREE_CODE (rhs) == SSA_NAME)
4246 rhs = vn_get_expr_for (rhs);
4247 val = fold_binary (gimple_cond_code (stmt),
4248 boolean_type_node, lhs, rhs);
4249 break;
4251 case GIMPLE_SWITCH:
4252 val = gimple_switch_index (stmt);
4253 break;
4254 case GIMPLE_GOTO:
4255 val = gimple_goto_dest (stmt);
4256 break;
4257 default:
4258 gcc_unreachable ();
4260 if (!val)
4261 return;
4263 edge taken = find_taken_edge (bb, vn_valueize (val));
4264 if (!taken)
4265 return;
4267 if (dump_file && (dump_flags & TDF_DETAILS))
4268 fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
4269 "not executable\n", bb->index, bb->index, taken->dest->index);
4271 FOR_EACH_EDGE (e, ei, bb->succs)
4272 if (e != taken)
4273 e->flags &= ~EDGE_EXECUTABLE;
4276 /* Do SCCVN. Returns true if it finished, false if we bailed out
4277 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
4278 how we use the alias oracle walking during the VN process. */
4280 bool
4281 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
4283 basic_block bb;
4284 size_t i;
4285 tree param;
4287 default_vn_walk_kind = default_vn_walk_kind_;
4289 init_scc_vn ();
4290 current_info = valid_info;
4292 for (param = DECL_ARGUMENTS (current_function_decl);
4293 param;
4294 param = DECL_CHAIN (param))
4296 tree def = ssa_default_def (cfun, param);
4297 if (def)
4299 VN_INFO (def)->visited = true;
4300 VN_INFO (def)->valnum = def;
4304 /* Mark all edges as possibly executable. */
4305 FOR_ALL_BB_FN (bb, cfun)
4307 edge_iterator ei;
4308 edge e;
4309 FOR_EACH_EDGE (e, ei, bb->succs)
4310 e->flags |= EDGE_EXECUTABLE;
4313 /* Walk all blocks in dominator order, value-numbering the last stmts
4314 SSA uses and decide whether outgoing edges are not executable. */
4315 cond_dom_walker walker;
4316 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
4317 if (walker.fail)
4319 free_scc_vn ();
4320 return false;
4323 /* Value-number remaining SSA names. */
4324 for (i = 1; i < num_ssa_names; ++i)
4326 tree name = ssa_name (i);
4327 if (name
4328 && VN_INFO (name)->visited == false
4329 && !has_zero_uses (name))
4330 if (!DFS (name))
4332 free_scc_vn ();
4333 return false;
4337 /* Initialize the value ids. */
4339 for (i = 1; i < num_ssa_names; ++i)
4341 tree name = ssa_name (i);
4342 vn_ssa_aux_t info;
4343 if (!name)
4344 continue;
4345 info = VN_INFO (name);
4346 if (info->valnum == name
4347 || info->valnum == VN_TOP)
4348 info->value_id = get_next_value_id ();
4349 else if (is_gimple_min_invariant (info->valnum))
4350 info->value_id = get_or_alloc_constant_value_id (info->valnum);
4353 /* Propagate. */
4354 for (i = 1; i < num_ssa_names; ++i)
4356 tree name = ssa_name (i);
4357 vn_ssa_aux_t info;
4358 if (!name)
4359 continue;
4360 info = VN_INFO (name);
4361 if (TREE_CODE (info->valnum) == SSA_NAME
4362 && info->valnum != name
4363 && info->value_id != VN_INFO (info->valnum)->value_id)
4364 info->value_id = VN_INFO (info->valnum)->value_id;
4367 set_hashtable_value_ids ();
4369 if (dump_file && (dump_flags & TDF_DETAILS))
4371 fprintf (dump_file, "Value numbers:\n");
4372 for (i = 0; i < num_ssa_names; i++)
4374 tree name = ssa_name (i);
4375 if (name
4376 && VN_INFO (name)->visited
4377 && SSA_VAL (name) != name)
4379 print_generic_expr (dump_file, name, 0);
4380 fprintf (dump_file, " = ");
4381 print_generic_expr (dump_file, SSA_VAL (name), 0);
4382 fprintf (dump_file, "\n");
4387 return true;
4390 /* Return the maximum value id we have ever seen. */
4392 unsigned int
4393 get_max_value_id (void)
4395 return next_value_id;
4398 /* Return the next unique value id. */
4400 unsigned int
4401 get_next_value_id (void)
4403 return next_value_id++;
4407 /* Compare two expressions E1 and E2 and return true if they are equal. */
4409 bool
4410 expressions_equal_p (tree e1, tree e2)
4412 /* The obvious case. */
4413 if (e1 == e2)
4414 return true;
4416 /* If only one of them is null, they cannot be equal. */
4417 if (!e1 || !e2)
4418 return false;
4420 /* Now perform the actual comparison. */
4421 if (TREE_CODE (e1) == TREE_CODE (e2)
4422 && operand_equal_p (e1, e2, OEP_PURE_SAME))
4423 return true;
4425 return false;
4429 /* Return true if the nary operation NARY may trap. This is a copy
4430 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4432 bool
4433 vn_nary_may_trap (vn_nary_op_t nary)
4435 tree type;
4436 tree rhs2 = NULL_TREE;
4437 bool honor_nans = false;
4438 bool honor_snans = false;
4439 bool fp_operation = false;
4440 bool honor_trapv = false;
4441 bool handled, ret;
4442 unsigned i;
4444 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4445 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4446 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4448 type = nary->type;
4449 fp_operation = FLOAT_TYPE_P (type);
4450 if (fp_operation)
4452 honor_nans = flag_trapping_math && !flag_finite_math_only;
4453 honor_snans = flag_signaling_nans != 0;
4455 else if (INTEGRAL_TYPE_P (type)
4456 && TYPE_OVERFLOW_TRAPS (type))
4457 honor_trapv = true;
4459 if (nary->length >= 2)
4460 rhs2 = nary->op[1];
4461 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4462 honor_trapv,
4463 honor_nans, honor_snans, rhs2,
4464 &handled);
4465 if (handled
4466 && ret)
4467 return true;
4469 for (i = 0; i < nary->length; ++i)
4470 if (tree_could_trap_p (nary->op[i]))
4471 return true;
4473 return false;