* include/parallel/numeric.h: Do not use default arguments in function
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blob7c9bf6d3ee5f17d9ca1bc3fa9668c0a956002e8c
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2014 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "stor-layout.h"
27 #include "predict.h"
28 #include "vec.h"
29 #include "hashtab.h"
30 #include "hash-set.h"
31 #include "machmode.h"
32 #include "hard-reg-set.h"
33 #include "input.h"
34 #include "function.h"
35 #include "dominance.h"
36 #include "cfg.h"
37 #include "cfganal.h"
38 #include "basic-block.h"
39 #include "gimple-pretty-print.h"
40 #include "tree-inline.h"
41 #include "hash-table.h"
42 #include "tree-ssa-alias.h"
43 #include "internal-fn.h"
44 #include "inchash.h"
45 #include "gimple-fold.h"
46 #include "tree-eh.h"
47 #include "gimple-expr.h"
48 #include "is-a.h"
49 #include "gimple.h"
50 #include "gimplify.h"
51 #include "gimple-ssa.h"
52 #include "tree-phinodes.h"
53 #include "ssa-iterators.h"
54 #include "stringpool.h"
55 #include "tree-ssanames.h"
56 #include "expr.h"
57 #include "tree-dfa.h"
58 #include "tree-ssa.h"
59 #include "dumpfile.h"
60 #include "alloc-pool.h"
61 #include "flags.h"
62 #include "cfgloop.h"
63 #include "params.h"
64 #include "tree-ssa-propagate.h"
65 #include "tree-ssa-sccvn.h"
66 #include "tree-cfg.h"
67 #include "domwalk.h"
69 /* This algorithm is based on the SCC algorithm presented by Keith
70 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
71 (http://citeseer.ist.psu.edu/41805.html). In
72 straight line code, it is equivalent to a regular hash based value
73 numbering that is performed in reverse postorder.
75 For code with cycles, there are two alternatives, both of which
76 require keeping the hashtables separate from the actual list of
77 value numbers for SSA names.
79 1. Iterate value numbering in an RPO walk of the blocks, removing
80 all the entries from the hashtable after each iteration (but
81 keeping the SSA name->value number mapping between iterations).
82 Iterate until it does not change.
84 2. Perform value numbering as part of an SCC walk on the SSA graph,
85 iterating only the cycles in the SSA graph until they do not change
86 (using a separate, optimistic hashtable for value numbering the SCC
87 operands).
89 The second is not just faster in practice (because most SSA graph
90 cycles do not involve all the variables in the graph), it also has
91 some nice properties.
93 One of these nice properties is that when we pop an SCC off the
94 stack, we are guaranteed to have processed all the operands coming from
95 *outside of that SCC*, so we do not need to do anything special to
96 ensure they have value numbers.
98 Another nice property is that the SCC walk is done as part of a DFS
99 of the SSA graph, which makes it easy to perform combining and
100 simplifying operations at the same time.
102 The code below is deliberately written in a way that makes it easy
103 to separate the SCC walk from the other work it does.
105 In order to propagate constants through the code, we track which
106 expressions contain constants, and use those while folding. In
107 theory, we could also track expressions whose value numbers are
108 replaced, in case we end up folding based on expression
109 identities.
111 In order to value number memory, we assign value numbers to vuses.
112 This enables us to note that, for example, stores to the same
113 address of the same value from the same starting memory states are
114 equivalent.
115 TODO:
117 1. We can iterate only the changing portions of the SCC's, but
118 I have not seen an SCC big enough for this to be a win.
119 2. If you differentiate between phi nodes for loops and phi nodes
120 for if-then-else, you can properly consider phi nodes in different
121 blocks for equivalence.
122 3. We could value number vuses in more cases, particularly, whole
123 structure copies.
127 /* vn_nary_op hashtable helpers. */
129 struct vn_nary_op_hasher : typed_noop_remove <vn_nary_op_s>
131 typedef vn_nary_op_s value_type;
132 typedef vn_nary_op_s compare_type;
133 static inline hashval_t hash (const value_type *);
134 static inline bool equal (const value_type *, const compare_type *);
137 /* Return the computed hashcode for nary operation P1. */
139 inline hashval_t
140 vn_nary_op_hasher::hash (const value_type *vno1)
142 return vno1->hashcode;
145 /* Compare nary operations P1 and P2 and return true if they are
146 equivalent. */
148 inline bool
149 vn_nary_op_hasher::equal (const value_type *vno1, const compare_type *vno2)
151 return vn_nary_op_eq (vno1, vno2);
154 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
155 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
158 /* vn_phi hashtable helpers. */
160 static int
161 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
163 struct vn_phi_hasher
165 typedef vn_phi_s value_type;
166 typedef vn_phi_s compare_type;
167 static inline hashval_t hash (const value_type *);
168 static inline bool equal (const value_type *, const compare_type *);
169 static inline void remove (value_type *);
172 /* Return the computed hashcode for phi operation P1. */
174 inline hashval_t
175 vn_phi_hasher::hash (const value_type *vp1)
177 return vp1->hashcode;
180 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
182 inline bool
183 vn_phi_hasher::equal (const value_type *vp1, const compare_type *vp2)
185 return vn_phi_eq (vp1, vp2);
188 /* Free a phi operation structure VP. */
190 inline void
191 vn_phi_hasher::remove (value_type *phi)
193 phi->phiargs.release ();
196 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
197 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
200 /* Compare two reference operands P1 and P2 for equality. Return true if
201 they are equal, and false otherwise. */
203 static int
204 vn_reference_op_eq (const void *p1, const void *p2)
206 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
207 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
209 return (vro1->opcode == vro2->opcode
210 /* We do not care for differences in type qualification. */
211 && (vro1->type == vro2->type
212 || (vro1->type && vro2->type
213 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
214 TYPE_MAIN_VARIANT (vro2->type))))
215 && expressions_equal_p (vro1->op0, vro2->op0)
216 && expressions_equal_p (vro1->op1, vro2->op1)
217 && expressions_equal_p (vro1->op2, vro2->op2));
220 /* Free a reference operation structure VP. */
222 static inline void
223 free_reference (vn_reference_s *vr)
225 vr->operands.release ();
229 /* vn_reference hashtable helpers. */
231 struct vn_reference_hasher
233 typedef vn_reference_s value_type;
234 typedef vn_reference_s compare_type;
235 static inline hashval_t hash (const value_type *);
236 static inline bool equal (const value_type *, const compare_type *);
237 static inline void remove (value_type *);
240 /* Return the hashcode for a given reference operation P1. */
242 inline hashval_t
243 vn_reference_hasher::hash (const value_type *vr1)
245 return vr1->hashcode;
248 inline bool
249 vn_reference_hasher::equal (const value_type *v, const compare_type *c)
251 return vn_reference_eq (v, c);
254 inline void
255 vn_reference_hasher::remove (value_type *v)
257 free_reference (v);
260 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
261 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
264 /* The set of hashtables and alloc_pool's for their items. */
266 typedef struct vn_tables_s
268 vn_nary_op_table_type *nary;
269 vn_phi_table_type *phis;
270 vn_reference_table_type *references;
271 struct obstack nary_obstack;
272 alloc_pool phis_pool;
273 alloc_pool references_pool;
274 } *vn_tables_t;
277 /* vn_constant hashtable helpers. */
279 struct vn_constant_hasher : typed_free_remove <vn_constant_s>
281 typedef vn_constant_s value_type;
282 typedef vn_constant_s compare_type;
283 static inline hashval_t hash (const value_type *);
284 static inline bool equal (const value_type *, const compare_type *);
287 /* Hash table hash function for vn_constant_t. */
289 inline hashval_t
290 vn_constant_hasher::hash (const value_type *vc1)
292 return vc1->hashcode;
295 /* Hash table equality function for vn_constant_t. */
297 inline bool
298 vn_constant_hasher::equal (const value_type *vc1, const compare_type *vc2)
300 if (vc1->hashcode != vc2->hashcode)
301 return false;
303 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
306 static hash_table<vn_constant_hasher> *constant_to_value_id;
307 static bitmap constant_value_ids;
310 /* Valid hashtables storing information we have proven to be
311 correct. */
313 static vn_tables_t valid_info;
315 /* Optimistic hashtables storing information we are making assumptions about
316 during iterations. */
318 static vn_tables_t optimistic_info;
320 /* Pointer to the set of hashtables that is currently being used.
321 Should always point to either the optimistic_info, or the
322 valid_info. */
324 static vn_tables_t current_info;
327 /* Reverse post order index for each basic block. */
329 static int *rpo_numbers;
331 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
333 /* Return the SSA value of the VUSE x, supporting released VDEFs
334 during elimination which will value-number the VDEF to the
335 associated VUSE (but not substitute in the whole lattice). */
337 static inline tree
338 vuse_ssa_val (tree x)
340 if (!x)
341 return NULL_TREE;
345 x = SSA_VAL (x);
347 while (SSA_NAME_IN_FREE_LIST (x));
349 return x;
352 /* This represents the top of the VN lattice, which is the universal
353 value. */
355 tree VN_TOP;
357 /* Unique counter for our value ids. */
359 static unsigned int next_value_id;
361 /* Next DFS number and the stack for strongly connected component
362 detection. */
364 static unsigned int next_dfs_num;
365 static vec<tree> sccstack;
369 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
370 are allocated on an obstack for locality reasons, and to free them
371 without looping over the vec. */
373 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
374 static struct obstack vn_ssa_aux_obstack;
376 /* Return the value numbering information for a given SSA name. */
378 vn_ssa_aux_t
379 VN_INFO (tree name)
381 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
382 gcc_checking_assert (res);
383 return res;
386 /* Set the value numbering info for a given SSA name to a given
387 value. */
389 static inline void
390 VN_INFO_SET (tree name, vn_ssa_aux_t value)
392 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
395 /* Initialize the value numbering info for a given SSA name.
396 This should be called just once for every SSA name. */
398 vn_ssa_aux_t
399 VN_INFO_GET (tree name)
401 vn_ssa_aux_t newinfo;
403 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
404 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
405 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
406 vn_ssa_aux_table.safe_grow (SSA_NAME_VERSION (name) + 1);
407 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
408 return newinfo;
412 /* Get the representative expression for the SSA_NAME NAME. Returns
413 the representative SSA_NAME if there is no expression associated with it. */
415 tree
416 vn_get_expr_for (tree name)
418 vn_ssa_aux_t vn = VN_INFO (name);
419 gimple def_stmt;
420 tree expr = NULL_TREE;
421 enum tree_code code;
423 if (vn->valnum == VN_TOP)
424 return name;
426 /* If the value-number is a constant it is the representative
427 expression. */
428 if (TREE_CODE (vn->valnum) != SSA_NAME)
429 return vn->valnum;
431 /* Get to the information of the value of this SSA_NAME. */
432 vn = VN_INFO (vn->valnum);
434 /* If the value-number is a constant it is the representative
435 expression. */
436 if (TREE_CODE (vn->valnum) != SSA_NAME)
437 return vn->valnum;
439 /* Else if we have an expression, return it. */
440 if (vn->expr != NULL_TREE)
441 return vn->expr;
443 /* Otherwise use the defining statement to build the expression. */
444 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
446 /* If the value number is not an assignment use it directly. */
447 if (!is_gimple_assign (def_stmt))
448 return vn->valnum;
450 /* Note that we can valueize here because we clear the cached
451 simplified expressions after each optimistic iteration. */
452 code = gimple_assign_rhs_code (def_stmt);
453 switch (TREE_CODE_CLASS (code))
455 case tcc_reference:
456 if ((code == REALPART_EXPR
457 || code == IMAGPART_EXPR
458 || code == VIEW_CONVERT_EXPR)
459 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (def_stmt),
460 0)) == SSA_NAME)
461 expr = fold_build1 (code,
462 gimple_expr_type (def_stmt),
463 vn_valueize (TREE_OPERAND
464 (gimple_assign_rhs1 (def_stmt), 0)));
465 break;
467 case tcc_unary:
468 expr = fold_build1 (code,
469 gimple_expr_type (def_stmt),
470 vn_valueize (gimple_assign_rhs1 (def_stmt)));
471 break;
473 case tcc_binary:
474 expr = fold_build2 (code,
475 gimple_expr_type (def_stmt),
476 vn_valueize (gimple_assign_rhs1 (def_stmt)),
477 vn_valueize (gimple_assign_rhs2 (def_stmt)));
478 break;
480 case tcc_exceptional:
481 if (code == CONSTRUCTOR
482 && TREE_CODE
483 (TREE_TYPE (gimple_assign_rhs1 (def_stmt))) == VECTOR_TYPE)
484 expr = gimple_assign_rhs1 (def_stmt);
485 break;
487 default:;
489 if (expr == NULL_TREE)
490 return vn->valnum;
492 /* Cache the expression. */
493 vn->expr = expr;
495 return expr;
498 /* Return the vn_kind the expression computed by the stmt should be
499 associated with. */
501 enum vn_kind
502 vn_get_stmt_kind (gimple stmt)
504 switch (gimple_code (stmt))
506 case GIMPLE_CALL:
507 return VN_REFERENCE;
508 case GIMPLE_PHI:
509 return VN_PHI;
510 case GIMPLE_ASSIGN:
512 enum tree_code code = gimple_assign_rhs_code (stmt);
513 tree rhs1 = gimple_assign_rhs1 (stmt);
514 switch (get_gimple_rhs_class (code))
516 case GIMPLE_UNARY_RHS:
517 case GIMPLE_BINARY_RHS:
518 case GIMPLE_TERNARY_RHS:
519 return VN_NARY;
520 case GIMPLE_SINGLE_RHS:
521 switch (TREE_CODE_CLASS (code))
523 case tcc_reference:
524 /* VOP-less references can go through unary case. */
525 if ((code == REALPART_EXPR
526 || code == IMAGPART_EXPR
527 || code == VIEW_CONVERT_EXPR
528 || code == BIT_FIELD_REF)
529 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
530 return VN_NARY;
532 /* Fallthrough. */
533 case tcc_declaration:
534 return VN_REFERENCE;
536 case tcc_constant:
537 return VN_CONSTANT;
539 default:
540 if (code == ADDR_EXPR)
541 return (is_gimple_min_invariant (rhs1)
542 ? VN_CONSTANT : VN_REFERENCE);
543 else if (code == CONSTRUCTOR)
544 return VN_NARY;
545 return VN_NONE;
547 default:
548 return VN_NONE;
551 default:
552 return VN_NONE;
556 /* Lookup a value id for CONSTANT and return it. If it does not
557 exist returns 0. */
559 unsigned int
560 get_constant_value_id (tree constant)
562 vn_constant_s **slot;
563 struct vn_constant_s vc;
565 vc.hashcode = vn_hash_constant_with_type (constant);
566 vc.constant = constant;
567 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
568 if (slot)
569 return (*slot)->value_id;
570 return 0;
573 /* Lookup a value id for CONSTANT, and if it does not exist, create a
574 new one and return it. If it does exist, return it. */
576 unsigned int
577 get_or_alloc_constant_value_id (tree constant)
579 vn_constant_s **slot;
580 struct vn_constant_s vc;
581 vn_constant_t vcp;
583 vc.hashcode = vn_hash_constant_with_type (constant);
584 vc.constant = constant;
585 slot = constant_to_value_id->find_slot (&vc, INSERT);
586 if (*slot)
587 return (*slot)->value_id;
589 vcp = XNEW (struct vn_constant_s);
590 vcp->hashcode = vc.hashcode;
591 vcp->constant = constant;
592 vcp->value_id = get_next_value_id ();
593 *slot = vcp;
594 bitmap_set_bit (constant_value_ids, vcp->value_id);
595 return vcp->value_id;
598 /* Return true if V is a value id for a constant. */
600 bool
601 value_id_constant_p (unsigned int v)
603 return bitmap_bit_p (constant_value_ids, v);
606 /* Compute the hash for a reference operand VRO1. */
608 static void
609 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
611 hstate.add_int (vro1->opcode);
612 if (vro1->op0)
613 inchash::add_expr (vro1->op0, hstate);
614 if (vro1->op1)
615 inchash::add_expr (vro1->op1, hstate);
616 if (vro1->op2)
617 inchash::add_expr (vro1->op2, hstate);
620 /* Compute a hash for the reference operation VR1 and return it. */
622 static hashval_t
623 vn_reference_compute_hash (const vn_reference_t vr1)
625 inchash::hash hstate;
626 hashval_t result;
627 int i;
628 vn_reference_op_t vro;
629 HOST_WIDE_INT off = -1;
630 bool deref = false;
632 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
634 if (vro->opcode == MEM_REF)
635 deref = true;
636 else if (vro->opcode != ADDR_EXPR)
637 deref = false;
638 if (vro->off != -1)
640 if (off == -1)
641 off = 0;
642 off += vro->off;
644 else
646 if (off != -1
647 && off != 0)
648 hstate.add_int (off);
649 off = -1;
650 if (deref
651 && vro->opcode == ADDR_EXPR)
653 if (vro->op0)
655 tree op = TREE_OPERAND (vro->op0, 0);
656 hstate.add_int (TREE_CODE (op));
657 inchash::add_expr (op, hstate);
660 else
661 vn_reference_op_compute_hash (vro, hstate);
664 result = hstate.end ();
665 /* ??? We would ICE later if we hash instead of adding that in. */
666 if (vr1->vuse)
667 result += SSA_NAME_VERSION (vr1->vuse);
669 return result;
672 /* Return true if reference operations VR1 and VR2 are equivalent. This
673 means they have the same set of operands and vuses. */
675 bool
676 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
678 unsigned i, j;
680 /* Early out if this is not a hash collision. */
681 if (vr1->hashcode != vr2->hashcode)
682 return false;
684 /* The VOP needs to be the same. */
685 if (vr1->vuse != vr2->vuse)
686 return false;
688 /* If the operands are the same we are done. */
689 if (vr1->operands == vr2->operands)
690 return true;
692 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
693 return false;
695 if (INTEGRAL_TYPE_P (vr1->type)
696 && INTEGRAL_TYPE_P (vr2->type))
698 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
699 return false;
701 else if (INTEGRAL_TYPE_P (vr1->type)
702 && (TYPE_PRECISION (vr1->type)
703 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
704 return false;
705 else if (INTEGRAL_TYPE_P (vr2->type)
706 && (TYPE_PRECISION (vr2->type)
707 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
708 return false;
710 i = 0;
711 j = 0;
714 HOST_WIDE_INT off1 = 0, off2 = 0;
715 vn_reference_op_t vro1, vro2;
716 vn_reference_op_s tem1, tem2;
717 bool deref1 = false, deref2 = false;
718 for (; vr1->operands.iterate (i, &vro1); i++)
720 if (vro1->opcode == MEM_REF)
721 deref1 = true;
722 if (vro1->off == -1)
723 break;
724 off1 += vro1->off;
726 for (; vr2->operands.iterate (j, &vro2); j++)
728 if (vro2->opcode == MEM_REF)
729 deref2 = true;
730 if (vro2->off == -1)
731 break;
732 off2 += vro2->off;
734 if (off1 != off2)
735 return false;
736 if (deref1 && vro1->opcode == ADDR_EXPR)
738 memset (&tem1, 0, sizeof (tem1));
739 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
740 tem1.type = TREE_TYPE (tem1.op0);
741 tem1.opcode = TREE_CODE (tem1.op0);
742 vro1 = &tem1;
743 deref1 = false;
745 if (deref2 && vro2->opcode == ADDR_EXPR)
747 memset (&tem2, 0, sizeof (tem2));
748 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
749 tem2.type = TREE_TYPE (tem2.op0);
750 tem2.opcode = TREE_CODE (tem2.op0);
751 vro2 = &tem2;
752 deref2 = false;
754 if (deref1 != deref2)
755 return false;
756 if (!vn_reference_op_eq (vro1, vro2))
757 return false;
758 ++j;
759 ++i;
761 while (vr1->operands.length () != i
762 || vr2->operands.length () != j);
764 return true;
767 /* Copy the operations present in load/store REF into RESULT, a vector of
768 vn_reference_op_s's. */
770 static void
771 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
773 if (TREE_CODE (ref) == TARGET_MEM_REF)
775 vn_reference_op_s temp;
777 result->reserve (3);
779 memset (&temp, 0, sizeof (temp));
780 temp.type = TREE_TYPE (ref);
781 temp.opcode = TREE_CODE (ref);
782 temp.op0 = TMR_INDEX (ref);
783 temp.op1 = TMR_STEP (ref);
784 temp.op2 = TMR_OFFSET (ref);
785 temp.off = -1;
786 result->quick_push (temp);
788 memset (&temp, 0, sizeof (temp));
789 temp.type = NULL_TREE;
790 temp.opcode = ERROR_MARK;
791 temp.op0 = TMR_INDEX2 (ref);
792 temp.off = -1;
793 result->quick_push (temp);
795 memset (&temp, 0, sizeof (temp));
796 temp.type = NULL_TREE;
797 temp.opcode = TREE_CODE (TMR_BASE (ref));
798 temp.op0 = TMR_BASE (ref);
799 temp.off = -1;
800 result->quick_push (temp);
801 return;
804 /* For non-calls, store the information that makes up the address. */
805 tree orig = ref;
806 while (ref)
808 vn_reference_op_s temp;
810 memset (&temp, 0, sizeof (temp));
811 temp.type = TREE_TYPE (ref);
812 temp.opcode = TREE_CODE (ref);
813 temp.off = -1;
815 switch (temp.opcode)
817 case MODIFY_EXPR:
818 temp.op0 = TREE_OPERAND (ref, 1);
819 break;
820 case WITH_SIZE_EXPR:
821 temp.op0 = TREE_OPERAND (ref, 1);
822 temp.off = 0;
823 break;
824 case MEM_REF:
825 /* The base address gets its own vn_reference_op_s structure. */
826 temp.op0 = TREE_OPERAND (ref, 1);
827 if (tree_fits_shwi_p (TREE_OPERAND (ref, 1)))
828 temp.off = tree_to_shwi (TREE_OPERAND (ref, 1));
829 break;
830 case BIT_FIELD_REF:
831 /* Record bits and position. */
832 temp.op0 = TREE_OPERAND (ref, 1);
833 temp.op1 = TREE_OPERAND (ref, 2);
834 break;
835 case COMPONENT_REF:
836 /* The field decl is enough to unambiguously specify the field,
837 a matching type is not necessary and a mismatching type
838 is always a spurious difference. */
839 temp.type = NULL_TREE;
840 temp.op0 = TREE_OPERAND (ref, 1);
841 temp.op1 = TREE_OPERAND (ref, 2);
843 tree this_offset = component_ref_field_offset (ref);
844 if (this_offset
845 && TREE_CODE (this_offset) == INTEGER_CST)
847 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
848 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
850 offset_int off
851 = (wi::to_offset (this_offset)
852 + wi::lrshift (wi::to_offset (bit_offset),
853 LOG2_BITS_PER_UNIT));
854 if (wi::fits_shwi_p (off)
855 /* Probibit value-numbering zero offset components
856 of addresses the same before the pass folding
857 __builtin_object_size had a chance to run
858 (checking cfun->after_inlining does the
859 trick here). */
860 && (TREE_CODE (orig) != ADDR_EXPR
861 || off != 0
862 || cfun->after_inlining))
863 temp.off = off.to_shwi ();
867 break;
868 case ARRAY_RANGE_REF:
869 case ARRAY_REF:
870 /* Record index as operand. */
871 temp.op0 = TREE_OPERAND (ref, 1);
872 /* Always record lower bounds and element size. */
873 temp.op1 = array_ref_low_bound (ref);
874 temp.op2 = array_ref_element_size (ref);
875 if (TREE_CODE (temp.op0) == INTEGER_CST
876 && TREE_CODE (temp.op1) == INTEGER_CST
877 && TREE_CODE (temp.op2) == INTEGER_CST)
879 offset_int off = ((wi::to_offset (temp.op0)
880 - wi::to_offset (temp.op1))
881 * wi::to_offset (temp.op2));
882 if (wi::fits_shwi_p (off))
883 temp.off = off.to_shwi();
885 break;
886 case VAR_DECL:
887 if (DECL_HARD_REGISTER (ref))
889 temp.op0 = ref;
890 break;
892 /* Fallthru. */
893 case PARM_DECL:
894 case CONST_DECL:
895 case RESULT_DECL:
896 /* Canonicalize decls to MEM[&decl] which is what we end up with
897 when valueizing MEM[ptr] with ptr = &decl. */
898 temp.opcode = MEM_REF;
899 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
900 temp.off = 0;
901 result->safe_push (temp);
902 temp.opcode = ADDR_EXPR;
903 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
904 temp.type = TREE_TYPE (temp.op0);
905 temp.off = -1;
906 break;
907 case STRING_CST:
908 case INTEGER_CST:
909 case COMPLEX_CST:
910 case VECTOR_CST:
911 case REAL_CST:
912 case FIXED_CST:
913 case CONSTRUCTOR:
914 case SSA_NAME:
915 temp.op0 = ref;
916 break;
917 case ADDR_EXPR:
918 if (is_gimple_min_invariant (ref))
920 temp.op0 = ref;
921 break;
923 /* Fallthrough. */
924 /* These are only interesting for their operands, their
925 existence, and their type. They will never be the last
926 ref in the chain of references (IE they require an
927 operand), so we don't have to put anything
928 for op* as it will be handled by the iteration */
929 case REALPART_EXPR:
930 case VIEW_CONVERT_EXPR:
931 temp.off = 0;
932 break;
933 case IMAGPART_EXPR:
934 /* This is only interesting for its constant offset. */
935 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
936 break;
937 default:
938 gcc_unreachable ();
940 result->safe_push (temp);
942 if (REFERENCE_CLASS_P (ref)
943 || TREE_CODE (ref) == MODIFY_EXPR
944 || TREE_CODE (ref) == WITH_SIZE_EXPR
945 || (TREE_CODE (ref) == ADDR_EXPR
946 && !is_gimple_min_invariant (ref)))
947 ref = TREE_OPERAND (ref, 0);
948 else
949 ref = NULL_TREE;
953 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
954 operands in *OPS, the reference alias set SET and the reference type TYPE.
955 Return true if something useful was produced. */
957 bool
958 ao_ref_init_from_vn_reference (ao_ref *ref,
959 alias_set_type set, tree type,
960 vec<vn_reference_op_s> ops)
962 vn_reference_op_t op;
963 unsigned i;
964 tree base = NULL_TREE;
965 tree *op0_p = &base;
966 HOST_WIDE_INT offset = 0;
967 HOST_WIDE_INT max_size;
968 HOST_WIDE_INT size = -1;
969 tree size_tree = NULL_TREE;
970 alias_set_type base_alias_set = -1;
972 /* First get the final access size from just the outermost expression. */
973 op = &ops[0];
974 if (op->opcode == COMPONENT_REF)
975 size_tree = DECL_SIZE (op->op0);
976 else if (op->opcode == BIT_FIELD_REF)
977 size_tree = op->op0;
978 else
980 machine_mode mode = TYPE_MODE (type);
981 if (mode == BLKmode)
982 size_tree = TYPE_SIZE (type);
983 else
984 size = GET_MODE_BITSIZE (mode);
986 if (size_tree != NULL_TREE)
988 if (!tree_fits_uhwi_p (size_tree))
989 size = -1;
990 else
991 size = tree_to_uhwi (size_tree);
994 /* Initially, maxsize is the same as the accessed element size.
995 In the following it will only grow (or become -1). */
996 max_size = size;
998 /* Compute cumulative bit-offset for nested component-refs and array-refs,
999 and find the ultimate containing object. */
1000 FOR_EACH_VEC_ELT (ops, i, op)
1002 switch (op->opcode)
1004 /* These may be in the reference ops, but we cannot do anything
1005 sensible with them here. */
1006 case ADDR_EXPR:
1007 /* Apart from ADDR_EXPR arguments to MEM_REF. */
1008 if (base != NULL_TREE
1009 && TREE_CODE (base) == MEM_REF
1010 && op->op0
1011 && DECL_P (TREE_OPERAND (op->op0, 0)))
1013 vn_reference_op_t pop = &ops[i-1];
1014 base = TREE_OPERAND (op->op0, 0);
1015 if (pop->off == -1)
1017 max_size = -1;
1018 offset = 0;
1020 else
1021 offset += pop->off * BITS_PER_UNIT;
1022 op0_p = NULL;
1023 break;
1025 /* Fallthru. */
1026 case CALL_EXPR:
1027 return false;
1029 /* Record the base objects. */
1030 case MEM_REF:
1031 base_alias_set = get_deref_alias_set (op->op0);
1032 *op0_p = build2 (MEM_REF, op->type,
1033 NULL_TREE, op->op0);
1034 op0_p = &TREE_OPERAND (*op0_p, 0);
1035 break;
1037 case VAR_DECL:
1038 case PARM_DECL:
1039 case RESULT_DECL:
1040 case SSA_NAME:
1041 *op0_p = op->op0;
1042 op0_p = NULL;
1043 break;
1045 /* And now the usual component-reference style ops. */
1046 case BIT_FIELD_REF:
1047 offset += tree_to_shwi (op->op1);
1048 break;
1050 case COMPONENT_REF:
1052 tree field = op->op0;
1053 /* We do not have a complete COMPONENT_REF tree here so we
1054 cannot use component_ref_field_offset. Do the interesting
1055 parts manually. */
1057 if (op->op1
1058 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (field)))
1059 max_size = -1;
1060 else
1062 offset += (tree_to_uhwi (DECL_FIELD_OFFSET (field))
1063 * BITS_PER_UNIT);
1064 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1066 break;
1069 case ARRAY_RANGE_REF:
1070 case ARRAY_REF:
1071 /* We recorded the lower bound and the element size. */
1072 if (!tree_fits_shwi_p (op->op0)
1073 || !tree_fits_shwi_p (op->op1)
1074 || !tree_fits_shwi_p (op->op2))
1075 max_size = -1;
1076 else
1078 HOST_WIDE_INT hindex = tree_to_shwi (op->op0);
1079 hindex -= tree_to_shwi (op->op1);
1080 hindex *= tree_to_shwi (op->op2);
1081 hindex *= BITS_PER_UNIT;
1082 offset += hindex;
1084 break;
1086 case REALPART_EXPR:
1087 break;
1089 case IMAGPART_EXPR:
1090 offset += size;
1091 break;
1093 case VIEW_CONVERT_EXPR:
1094 break;
1096 case STRING_CST:
1097 case INTEGER_CST:
1098 case COMPLEX_CST:
1099 case VECTOR_CST:
1100 case REAL_CST:
1101 case CONSTRUCTOR:
1102 case CONST_DECL:
1103 return false;
1105 default:
1106 return false;
1110 if (base == NULL_TREE)
1111 return false;
1113 ref->ref = NULL_TREE;
1114 ref->base = base;
1115 ref->offset = offset;
1116 ref->size = size;
1117 ref->max_size = max_size;
1118 ref->ref_alias_set = set;
1119 if (base_alias_set != -1)
1120 ref->base_alias_set = base_alias_set;
1121 else
1122 ref->base_alias_set = get_alias_set (base);
1123 /* We discount volatiles from value-numbering elsewhere. */
1124 ref->volatile_p = false;
1126 return true;
1129 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1130 vn_reference_op_s's. */
1132 static void
1133 copy_reference_ops_from_call (gimple call,
1134 vec<vn_reference_op_s> *result)
1136 vn_reference_op_s temp;
1137 unsigned i;
1138 tree lhs = gimple_call_lhs (call);
1139 int lr;
1141 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1142 different. By adding the lhs here in the vector, we ensure that the
1143 hashcode is different, guaranteeing a different value number. */
1144 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1146 memset (&temp, 0, sizeof (temp));
1147 temp.opcode = MODIFY_EXPR;
1148 temp.type = TREE_TYPE (lhs);
1149 temp.op0 = lhs;
1150 temp.off = -1;
1151 result->safe_push (temp);
1154 /* Copy the type, opcode, function, static chain and EH region, if any. */
1155 memset (&temp, 0, sizeof (temp));
1156 temp.type = gimple_call_return_type (call);
1157 temp.opcode = CALL_EXPR;
1158 temp.op0 = gimple_call_fn (call);
1159 temp.op1 = gimple_call_chain (call);
1160 if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1161 temp.op2 = size_int (lr);
1162 temp.off = -1;
1163 result->safe_push (temp);
1165 /* Copy the call arguments. As they can be references as well,
1166 just chain them together. */
1167 for (i = 0; i < gimple_call_num_args (call); ++i)
1169 tree callarg = gimple_call_arg (call, i);
1170 copy_reference_ops_from_ref (callarg, result);
1174 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1175 *I_P to point to the last element of the replacement. */
1176 void
1177 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1178 unsigned int *i_p)
1180 unsigned int i = *i_p;
1181 vn_reference_op_t op = &(*ops)[i];
1182 vn_reference_op_t mem_op = &(*ops)[i - 1];
1183 tree addr_base;
1184 HOST_WIDE_INT addr_offset = 0;
1186 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1187 from .foo.bar to the preceding MEM_REF offset and replace the
1188 address with &OBJ. */
1189 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1190 &addr_offset);
1191 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1192 if (addr_base != TREE_OPERAND (op->op0, 0))
1194 offset_int off = offset_int::from (mem_op->op0, SIGNED);
1195 off += addr_offset;
1196 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1197 op->op0 = build_fold_addr_expr (addr_base);
1198 if (tree_fits_shwi_p (mem_op->op0))
1199 mem_op->off = tree_to_shwi (mem_op->op0);
1200 else
1201 mem_op->off = -1;
1205 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1206 *I_P to point to the last element of the replacement. */
1207 static void
1208 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1209 unsigned int *i_p)
1211 unsigned int i = *i_p;
1212 vn_reference_op_t op = &(*ops)[i];
1213 vn_reference_op_t mem_op = &(*ops)[i - 1];
1214 gimple def_stmt;
1215 enum tree_code code;
1216 offset_int off;
1218 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1219 if (!is_gimple_assign (def_stmt))
1220 return;
1222 code = gimple_assign_rhs_code (def_stmt);
1223 if (code != ADDR_EXPR
1224 && code != POINTER_PLUS_EXPR)
1225 return;
1227 off = offset_int::from (mem_op->op0, SIGNED);
1229 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1230 from .foo.bar to the preceding MEM_REF offset and replace the
1231 address with &OBJ. */
1232 if (code == ADDR_EXPR)
1234 tree addr, addr_base;
1235 HOST_WIDE_INT addr_offset;
1237 addr = gimple_assign_rhs1 (def_stmt);
1238 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1239 &addr_offset);
1240 if (!addr_base
1241 || TREE_CODE (addr_base) != MEM_REF)
1242 return;
1244 off += addr_offset;
1245 off += mem_ref_offset (addr_base);
1246 op->op0 = TREE_OPERAND (addr_base, 0);
1248 else
1250 tree ptr, ptroff;
1251 ptr = gimple_assign_rhs1 (def_stmt);
1252 ptroff = gimple_assign_rhs2 (def_stmt);
1253 if (TREE_CODE (ptr) != SSA_NAME
1254 || TREE_CODE (ptroff) != INTEGER_CST)
1255 return;
1257 off += wi::to_offset (ptroff);
1258 op->op0 = ptr;
1261 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1262 if (tree_fits_shwi_p (mem_op->op0))
1263 mem_op->off = tree_to_shwi (mem_op->op0);
1264 else
1265 mem_op->off = -1;
1266 if (TREE_CODE (op->op0) == SSA_NAME)
1267 op->op0 = SSA_VAL (op->op0);
1268 if (TREE_CODE (op->op0) != SSA_NAME)
1269 op->opcode = TREE_CODE (op->op0);
1271 /* And recurse. */
1272 if (TREE_CODE (op->op0) == SSA_NAME)
1273 vn_reference_maybe_forwprop_address (ops, i_p);
1274 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1275 vn_reference_fold_indirect (ops, i_p);
1278 /* Optimize the reference REF to a constant if possible or return
1279 NULL_TREE if not. */
1281 tree
1282 fully_constant_vn_reference_p (vn_reference_t ref)
1284 vec<vn_reference_op_s> operands = ref->operands;
1285 vn_reference_op_t op;
1287 /* Try to simplify the translated expression if it is
1288 a call to a builtin function with at most two arguments. */
1289 op = &operands[0];
1290 if (op->opcode == CALL_EXPR
1291 && TREE_CODE (op->op0) == ADDR_EXPR
1292 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1293 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1294 && operands.length () >= 2
1295 && operands.length () <= 3)
1297 vn_reference_op_t arg0, arg1 = NULL;
1298 bool anyconst = false;
1299 arg0 = &operands[1];
1300 if (operands.length () > 2)
1301 arg1 = &operands[2];
1302 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1303 || (arg0->opcode == ADDR_EXPR
1304 && is_gimple_min_invariant (arg0->op0)))
1305 anyconst = true;
1306 if (arg1
1307 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1308 || (arg1->opcode == ADDR_EXPR
1309 && is_gimple_min_invariant (arg1->op0))))
1310 anyconst = true;
1311 if (anyconst)
1313 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1314 arg1 ? 2 : 1,
1315 arg0->op0,
1316 arg1 ? arg1->op0 : NULL);
1317 if (folded
1318 && TREE_CODE (folded) == NOP_EXPR)
1319 folded = TREE_OPERAND (folded, 0);
1320 if (folded
1321 && is_gimple_min_invariant (folded))
1322 return folded;
1326 /* Simplify reads from constant strings. */
1327 else if (op->opcode == ARRAY_REF
1328 && TREE_CODE (op->op0) == INTEGER_CST
1329 && integer_zerop (op->op1)
1330 && operands.length () == 2)
1332 vn_reference_op_t arg0;
1333 arg0 = &operands[1];
1334 if (arg0->opcode == STRING_CST
1335 && (TYPE_MODE (op->type)
1336 == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
1337 && GET_MODE_CLASS (TYPE_MODE (op->type)) == MODE_INT
1338 && GET_MODE_SIZE (TYPE_MODE (op->type)) == 1
1339 && tree_int_cst_sgn (op->op0) >= 0
1340 && compare_tree_int (op->op0, TREE_STRING_LENGTH (arg0->op0)) < 0)
1341 return build_int_cst_type (op->type,
1342 (TREE_STRING_POINTER (arg0->op0)
1343 [TREE_INT_CST_LOW (op->op0)]));
1346 return NULL_TREE;
1349 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1350 structures into their value numbers. This is done in-place, and
1351 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1352 whether any operands were valueized. */
1354 static vec<vn_reference_op_s>
1355 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1357 vn_reference_op_t vro;
1358 unsigned int i;
1360 *valueized_anything = false;
1362 FOR_EACH_VEC_ELT (orig, i, vro)
1364 if (vro->opcode == SSA_NAME
1365 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1367 tree tem = SSA_VAL (vro->op0);
1368 if (tem != vro->op0)
1370 *valueized_anything = true;
1371 vro->op0 = tem;
1373 /* If it transforms from an SSA_NAME to a constant, update
1374 the opcode. */
1375 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1376 vro->opcode = TREE_CODE (vro->op0);
1378 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1380 tree tem = SSA_VAL (vro->op1);
1381 if (tem != vro->op1)
1383 *valueized_anything = true;
1384 vro->op1 = tem;
1387 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1389 tree tem = SSA_VAL (vro->op2);
1390 if (tem != vro->op2)
1392 *valueized_anything = true;
1393 vro->op2 = tem;
1396 /* If it transforms from an SSA_NAME to an address, fold with
1397 a preceding indirect reference. */
1398 if (i > 0
1399 && vro->op0
1400 && TREE_CODE (vro->op0) == ADDR_EXPR
1401 && orig[i - 1].opcode == MEM_REF)
1402 vn_reference_fold_indirect (&orig, &i);
1403 else if (i > 0
1404 && vro->opcode == SSA_NAME
1405 && orig[i - 1].opcode == MEM_REF)
1406 vn_reference_maybe_forwprop_address (&orig, &i);
1407 /* If it transforms a non-constant ARRAY_REF into a constant
1408 one, adjust the constant offset. */
1409 else if (vro->opcode == ARRAY_REF
1410 && vro->off == -1
1411 && TREE_CODE (vro->op0) == INTEGER_CST
1412 && TREE_CODE (vro->op1) == INTEGER_CST
1413 && TREE_CODE (vro->op2) == INTEGER_CST)
1415 offset_int off = ((wi::to_offset (vro->op0)
1416 - wi::to_offset (vro->op1))
1417 * wi::to_offset (vro->op2));
1418 if (wi::fits_shwi_p (off))
1419 vro->off = off.to_shwi ();
1423 return orig;
1426 static vec<vn_reference_op_s>
1427 valueize_refs (vec<vn_reference_op_s> orig)
1429 bool tem;
1430 return valueize_refs_1 (orig, &tem);
1433 static vec<vn_reference_op_s> shared_lookup_references;
1435 /* Create a vector of vn_reference_op_s structures from REF, a
1436 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1437 this function. *VALUEIZED_ANYTHING will specify whether any
1438 operands were valueized. */
1440 static vec<vn_reference_op_s>
1441 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1443 if (!ref)
1444 return vNULL;
1445 shared_lookup_references.truncate (0);
1446 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1447 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1448 valueized_anything);
1449 return shared_lookup_references;
1452 /* Create a vector of vn_reference_op_s structures from CALL, a
1453 call statement. The vector is shared among all callers of
1454 this function. */
1456 static vec<vn_reference_op_s>
1457 valueize_shared_reference_ops_from_call (gimple call)
1459 if (!call)
1460 return vNULL;
1461 shared_lookup_references.truncate (0);
1462 copy_reference_ops_from_call (call, &shared_lookup_references);
1463 shared_lookup_references = valueize_refs (shared_lookup_references);
1464 return shared_lookup_references;
1467 /* Lookup a SCCVN reference operation VR in the current hash table.
1468 Returns the resulting value number if it exists in the hash table,
1469 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1470 vn_reference_t stored in the hashtable if something is found. */
1472 static tree
1473 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1475 vn_reference_s **slot;
1476 hashval_t hash;
1478 hash = vr->hashcode;
1479 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1480 if (!slot && current_info == optimistic_info)
1481 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1482 if (slot)
1484 if (vnresult)
1485 *vnresult = (vn_reference_t)*slot;
1486 return ((vn_reference_t)*slot)->result;
1489 return NULL_TREE;
1492 static tree *last_vuse_ptr;
1493 static vn_lookup_kind vn_walk_kind;
1494 static vn_lookup_kind default_vn_walk_kind;
1496 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1497 with the current VUSE and performs the expression lookup. */
1499 static void *
1500 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1501 unsigned int cnt, void *vr_)
1503 vn_reference_t vr = (vn_reference_t)vr_;
1504 vn_reference_s **slot;
1505 hashval_t hash;
1507 /* This bounds the stmt walks we perform on reference lookups
1508 to O(1) instead of O(N) where N is the number of dominating
1509 stores. */
1510 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1511 return (void *)-1;
1513 if (last_vuse_ptr)
1514 *last_vuse_ptr = vuse;
1516 /* Fixup vuse and hash. */
1517 if (vr->vuse)
1518 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1519 vr->vuse = vuse_ssa_val (vuse);
1520 if (vr->vuse)
1521 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1523 hash = vr->hashcode;
1524 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1525 if (!slot && current_info == optimistic_info)
1526 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1527 if (slot)
1528 return *slot;
1530 return NULL;
1533 /* Lookup an existing or insert a new vn_reference entry into the
1534 value table for the VUSE, SET, TYPE, OPERANDS reference which
1535 has the value VALUE which is either a constant or an SSA name. */
1537 static vn_reference_t
1538 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1539 alias_set_type set,
1540 tree type,
1541 vec<vn_reference_op_s,
1542 va_heap> operands,
1543 tree value)
1545 struct vn_reference_s vr1;
1546 vn_reference_t result;
1547 unsigned value_id;
1548 vr1.vuse = vuse;
1549 vr1.operands = operands;
1550 vr1.type = type;
1551 vr1.set = set;
1552 vr1.hashcode = vn_reference_compute_hash (&vr1);
1553 if (vn_reference_lookup_1 (&vr1, &result))
1554 return result;
1555 if (TREE_CODE (value) == SSA_NAME)
1556 value_id = VN_INFO (value)->value_id;
1557 else
1558 value_id = get_or_alloc_constant_value_id (value);
1559 return vn_reference_insert_pieces (vuse, set, type,
1560 operands.copy (), value, value_id);
1563 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1564 from the statement defining VUSE and if not successful tries to
1565 translate *REFP and VR_ through an aggregate copy at the definition
1566 of VUSE. */
1568 static void *
1569 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1570 bool disambiguate_only)
1572 vn_reference_t vr = (vn_reference_t)vr_;
1573 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1574 tree base;
1575 HOST_WIDE_INT offset, maxsize;
1576 static vec<vn_reference_op_s>
1577 lhs_ops = vNULL;
1578 ao_ref lhs_ref;
1579 bool lhs_ref_ok = false;
1581 /* First try to disambiguate after value-replacing in the definitions LHS. */
1582 if (is_gimple_assign (def_stmt))
1584 vec<vn_reference_op_s> tem;
1585 tree lhs = gimple_assign_lhs (def_stmt);
1586 bool valueized_anything = false;
1587 /* Avoid re-allocation overhead. */
1588 lhs_ops.truncate (0);
1589 copy_reference_ops_from_ref (lhs, &lhs_ops);
1590 tem = lhs_ops;
1591 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1592 gcc_assert (lhs_ops == tem);
1593 if (valueized_anything)
1595 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1596 get_alias_set (lhs),
1597 TREE_TYPE (lhs), lhs_ops);
1598 if (lhs_ref_ok
1599 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1600 return NULL;
1602 else
1604 ao_ref_init (&lhs_ref, lhs);
1605 lhs_ref_ok = true;
1608 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1609 && gimple_call_num_args (def_stmt) <= 4)
1611 /* For builtin calls valueize its arguments and call the
1612 alias oracle again. Valueization may improve points-to
1613 info of pointers and constify size and position arguments.
1614 Originally this was motivated by PR61034 which has
1615 conditional calls to free falsely clobbering ref because
1616 of imprecise points-to info of the argument. */
1617 tree oldargs[4];
1618 bool valueized_anything = false;
1619 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1621 oldargs[i] = gimple_call_arg (def_stmt, i);
1622 if (TREE_CODE (oldargs[i]) == SSA_NAME
1623 && VN_INFO (oldargs[i])->valnum != oldargs[i])
1625 gimple_call_set_arg (def_stmt, i, VN_INFO (oldargs[i])->valnum);
1626 valueized_anything = true;
1629 if (valueized_anything)
1631 bool res = call_may_clobber_ref_p_1 (def_stmt, ref);
1632 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1633 gimple_call_set_arg (def_stmt, i, oldargs[i]);
1634 if (!res)
1635 return NULL;
1639 if (disambiguate_only)
1640 return (void *)-1;
1642 base = ao_ref_base (ref);
1643 offset = ref->offset;
1644 maxsize = ref->max_size;
1646 /* If we cannot constrain the size of the reference we cannot
1647 test if anything kills it. */
1648 if (maxsize == -1)
1649 return (void *)-1;
1651 /* We can't deduce anything useful from clobbers. */
1652 if (gimple_clobber_p (def_stmt))
1653 return (void *)-1;
1655 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1656 from that definition.
1657 1) Memset. */
1658 if (is_gimple_reg_type (vr->type)
1659 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1660 && integer_zerop (gimple_call_arg (def_stmt, 1))
1661 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2))
1662 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1664 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1665 tree base2;
1666 HOST_WIDE_INT offset2, size2, maxsize2;
1667 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1668 size2 = tree_to_uhwi (gimple_call_arg (def_stmt, 2)) * 8;
1669 if ((unsigned HOST_WIDE_INT)size2 / 8
1670 == tree_to_uhwi (gimple_call_arg (def_stmt, 2))
1671 && maxsize2 != -1
1672 && operand_equal_p (base, base2, 0)
1673 && offset2 <= offset
1674 && offset2 + size2 >= offset + maxsize)
1676 tree val = build_zero_cst (vr->type);
1677 return vn_reference_lookup_or_insert_for_pieces
1678 (vuse, vr->set, vr->type, vr->operands, val);
1682 /* 2) Assignment from an empty CONSTRUCTOR. */
1683 else if (is_gimple_reg_type (vr->type)
1684 && gimple_assign_single_p (def_stmt)
1685 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1686 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1688 tree base2;
1689 HOST_WIDE_INT offset2, size2, maxsize2;
1690 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1691 &offset2, &size2, &maxsize2);
1692 if (maxsize2 != -1
1693 && operand_equal_p (base, base2, 0)
1694 && offset2 <= offset
1695 && offset2 + size2 >= offset + maxsize)
1697 tree val = build_zero_cst (vr->type);
1698 return vn_reference_lookup_or_insert_for_pieces
1699 (vuse, vr->set, vr->type, vr->operands, val);
1703 /* 3) Assignment from a constant. We can use folds native encode/interpret
1704 routines to extract the assigned bits. */
1705 else if (vn_walk_kind == VN_WALKREWRITE
1706 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1707 && ref->size == maxsize
1708 && maxsize % BITS_PER_UNIT == 0
1709 && offset % BITS_PER_UNIT == 0
1710 && is_gimple_reg_type (vr->type)
1711 && gimple_assign_single_p (def_stmt)
1712 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
1714 tree base2;
1715 HOST_WIDE_INT offset2, size2, maxsize2;
1716 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1717 &offset2, &size2, &maxsize2);
1718 if (maxsize2 != -1
1719 && maxsize2 == size2
1720 && size2 % BITS_PER_UNIT == 0
1721 && offset2 % BITS_PER_UNIT == 0
1722 && operand_equal_p (base, base2, 0)
1723 && offset2 <= offset
1724 && offset2 + size2 >= offset + maxsize)
1726 /* We support up to 512-bit values (for V8DFmode). */
1727 unsigned char buffer[64];
1728 int len;
1730 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1731 buffer, sizeof (buffer));
1732 if (len > 0)
1734 tree val = native_interpret_expr (vr->type,
1735 buffer
1736 + ((offset - offset2)
1737 / BITS_PER_UNIT),
1738 ref->size / BITS_PER_UNIT);
1739 if (val)
1740 return vn_reference_lookup_or_insert_for_pieces
1741 (vuse, vr->set, vr->type, vr->operands, val);
1746 /* 4) Assignment from an SSA name which definition we may be able
1747 to access pieces from. */
1748 else if (ref->size == maxsize
1749 && is_gimple_reg_type (vr->type)
1750 && gimple_assign_single_p (def_stmt)
1751 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
1753 tree rhs1 = gimple_assign_rhs1 (def_stmt);
1754 gimple def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
1755 if (is_gimple_assign (def_stmt2)
1756 && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
1757 || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
1758 && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1))))
1760 tree base2;
1761 HOST_WIDE_INT offset2, size2, maxsize2, off;
1762 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1763 &offset2, &size2, &maxsize2);
1764 off = offset - offset2;
1765 if (maxsize2 != -1
1766 && maxsize2 == size2
1767 && operand_equal_p (base, base2, 0)
1768 && offset2 <= offset
1769 && offset2 + size2 >= offset + maxsize)
1771 tree val = NULL_TREE;
1772 HOST_WIDE_INT elsz
1773 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1))));
1774 if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR)
1776 if (off == 0)
1777 val = gimple_assign_rhs1 (def_stmt2);
1778 else if (off == elsz)
1779 val = gimple_assign_rhs2 (def_stmt2);
1781 else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR
1782 && off % elsz == 0)
1784 tree ctor = gimple_assign_rhs1 (def_stmt2);
1785 unsigned i = off / elsz;
1786 if (i < CONSTRUCTOR_NELTS (ctor))
1788 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
1789 if (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
1791 if (TREE_CODE (TREE_TYPE (elt->value))
1792 != VECTOR_TYPE)
1793 val = elt->value;
1797 if (val)
1798 return vn_reference_lookup_or_insert_for_pieces
1799 (vuse, vr->set, vr->type, vr->operands, val);
1804 /* 5) For aggregate copies translate the reference through them if
1805 the copy kills ref. */
1806 else if (vn_walk_kind == VN_WALKREWRITE
1807 && gimple_assign_single_p (def_stmt)
1808 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1809 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1810 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1812 tree base2;
1813 HOST_WIDE_INT offset2, size2, maxsize2;
1814 int i, j;
1815 auto_vec<vn_reference_op_s> rhs;
1816 vn_reference_op_t vro;
1817 ao_ref r;
1819 if (!lhs_ref_ok)
1820 return (void *)-1;
1822 /* See if the assignment kills REF. */
1823 base2 = ao_ref_base (&lhs_ref);
1824 offset2 = lhs_ref.offset;
1825 size2 = lhs_ref.size;
1826 maxsize2 = lhs_ref.max_size;
1827 if (maxsize2 == -1
1828 || (base != base2 && !operand_equal_p (base, base2, 0))
1829 || offset2 > offset
1830 || offset2 + size2 < offset + maxsize)
1831 return (void *)-1;
1833 /* Find the common base of ref and the lhs. lhs_ops already
1834 contains valueized operands for the lhs. */
1835 i = vr->operands.length () - 1;
1836 j = lhs_ops.length () - 1;
1837 while (j >= 0 && i >= 0
1838 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
1840 i--;
1841 j--;
1844 /* ??? The innermost op should always be a MEM_REF and we already
1845 checked that the assignment to the lhs kills vr. Thus for
1846 aggregate copies using char[] types the vn_reference_op_eq
1847 may fail when comparing types for compatibility. But we really
1848 don't care here - further lookups with the rewritten operands
1849 will simply fail if we messed up types too badly. */
1850 if (j == 0 && i >= 0
1851 && lhs_ops[0].opcode == MEM_REF
1852 && lhs_ops[0].off != -1
1853 && (lhs_ops[0].off == vr->operands[i].off))
1854 i--, j--;
1856 /* i now points to the first additional op.
1857 ??? LHS may not be completely contained in VR, one or more
1858 VIEW_CONVERT_EXPRs could be in its way. We could at least
1859 try handling outermost VIEW_CONVERT_EXPRs. */
1860 if (j != -1)
1861 return (void *)-1;
1863 /* Now re-write REF to be based on the rhs of the assignment. */
1864 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1865 /* We need to pre-pend vr->operands[0..i] to rhs. */
1866 vec<vn_reference_op_s> old = vr->operands;
1867 if (i + 1 + rhs.length () > vr->operands.length ())
1869 vr->operands.safe_grow (i + 1 + rhs.length ());
1870 if (old == shared_lookup_references)
1871 shared_lookup_references = vr->operands;
1873 else
1874 vr->operands.truncate (i + 1 + rhs.length ());
1875 FOR_EACH_VEC_ELT (rhs, j, vro)
1876 vr->operands[i + 1 + j] = *vro;
1877 vr->operands = valueize_refs (vr->operands);
1878 if (old == shared_lookup_references)
1879 shared_lookup_references = vr->operands;
1880 vr->hashcode = vn_reference_compute_hash (vr);
1882 /* Adjust *ref from the new operands. */
1883 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1884 return (void *)-1;
1885 /* This can happen with bitfields. */
1886 if (ref->size != r.size)
1887 return (void *)-1;
1888 *ref = r;
1890 /* Do not update last seen VUSE after translating. */
1891 last_vuse_ptr = NULL;
1893 /* Keep looking for the adjusted *REF / VR pair. */
1894 return NULL;
1897 /* 6) For memcpy copies translate the reference through them if
1898 the copy kills ref. */
1899 else if (vn_walk_kind == VN_WALKREWRITE
1900 && is_gimple_reg_type (vr->type)
1901 /* ??? Handle BCOPY as well. */
1902 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
1903 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
1904 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
1905 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
1906 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
1907 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
1908 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
1909 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2)))
1911 tree lhs, rhs;
1912 ao_ref r;
1913 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
1914 vn_reference_op_s op;
1915 HOST_WIDE_INT at;
1918 /* Only handle non-variable, addressable refs. */
1919 if (ref->size != maxsize
1920 || offset % BITS_PER_UNIT != 0
1921 || ref->size % BITS_PER_UNIT != 0)
1922 return (void *)-1;
1924 /* Extract a pointer base and an offset for the destination. */
1925 lhs = gimple_call_arg (def_stmt, 0);
1926 lhs_offset = 0;
1927 if (TREE_CODE (lhs) == SSA_NAME)
1928 lhs = SSA_VAL (lhs);
1929 if (TREE_CODE (lhs) == ADDR_EXPR)
1931 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
1932 &lhs_offset);
1933 if (!tem)
1934 return (void *)-1;
1935 if (TREE_CODE (tem) == MEM_REF
1936 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
1938 lhs = TREE_OPERAND (tem, 0);
1939 lhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
1941 else if (DECL_P (tem))
1942 lhs = build_fold_addr_expr (tem);
1943 else
1944 return (void *)-1;
1946 if (TREE_CODE (lhs) != SSA_NAME
1947 && TREE_CODE (lhs) != ADDR_EXPR)
1948 return (void *)-1;
1950 /* Extract a pointer base and an offset for the source. */
1951 rhs = gimple_call_arg (def_stmt, 1);
1952 rhs_offset = 0;
1953 if (TREE_CODE (rhs) == SSA_NAME)
1954 rhs = SSA_VAL (rhs);
1955 if (TREE_CODE (rhs) == ADDR_EXPR)
1957 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
1958 &rhs_offset);
1959 if (!tem)
1960 return (void *)-1;
1961 if (TREE_CODE (tem) == MEM_REF
1962 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
1964 rhs = TREE_OPERAND (tem, 0);
1965 rhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
1967 else if (DECL_P (tem))
1968 rhs = build_fold_addr_expr (tem);
1969 else
1970 return (void *)-1;
1972 if (TREE_CODE (rhs) != SSA_NAME
1973 && TREE_CODE (rhs) != ADDR_EXPR)
1974 return (void *)-1;
1976 copy_size = tree_to_uhwi (gimple_call_arg (def_stmt, 2));
1978 /* The bases of the destination and the references have to agree. */
1979 if ((TREE_CODE (base) != MEM_REF
1980 && !DECL_P (base))
1981 || (TREE_CODE (base) == MEM_REF
1982 && (TREE_OPERAND (base, 0) != lhs
1983 || !tree_fits_uhwi_p (TREE_OPERAND (base, 1))))
1984 || (DECL_P (base)
1985 && (TREE_CODE (lhs) != ADDR_EXPR
1986 || TREE_OPERAND (lhs, 0) != base)))
1987 return (void *)-1;
1989 /* And the access has to be contained within the memcpy destination. */
1990 at = offset / BITS_PER_UNIT;
1991 if (TREE_CODE (base) == MEM_REF)
1992 at += tree_to_uhwi (TREE_OPERAND (base, 1));
1993 if (lhs_offset > at
1994 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
1995 return (void *)-1;
1997 /* Make room for 2 operands in the new reference. */
1998 if (vr->operands.length () < 2)
2000 vec<vn_reference_op_s> old = vr->operands;
2001 vr->operands.safe_grow_cleared (2);
2002 if (old == shared_lookup_references
2003 && vr->operands != old)
2004 shared_lookup_references = vr->operands;
2006 else
2007 vr->operands.truncate (2);
2009 /* The looked-through reference is a simple MEM_REF. */
2010 memset (&op, 0, sizeof (op));
2011 op.type = vr->type;
2012 op.opcode = MEM_REF;
2013 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
2014 op.off = at - lhs_offset + rhs_offset;
2015 vr->operands[0] = op;
2016 op.type = TREE_TYPE (rhs);
2017 op.opcode = TREE_CODE (rhs);
2018 op.op0 = rhs;
2019 op.off = -1;
2020 vr->operands[1] = op;
2021 vr->hashcode = vn_reference_compute_hash (vr);
2023 /* Adjust *ref from the new operands. */
2024 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2025 return (void *)-1;
2026 /* This can happen with bitfields. */
2027 if (ref->size != r.size)
2028 return (void *)-1;
2029 *ref = r;
2031 /* Do not update last seen VUSE after translating. */
2032 last_vuse_ptr = NULL;
2034 /* Keep looking for the adjusted *REF / VR pair. */
2035 return NULL;
2038 /* Bail out and stop walking. */
2039 return (void *)-1;
2042 /* Lookup a reference operation by it's parts, in the current hash table.
2043 Returns the resulting value number if it exists in the hash table,
2044 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2045 vn_reference_t stored in the hashtable if something is found. */
2047 tree
2048 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2049 vec<vn_reference_op_s> operands,
2050 vn_reference_t *vnresult, vn_lookup_kind kind)
2052 struct vn_reference_s vr1;
2053 vn_reference_t tmp;
2054 tree cst;
2056 if (!vnresult)
2057 vnresult = &tmp;
2058 *vnresult = NULL;
2060 vr1.vuse = vuse_ssa_val (vuse);
2061 shared_lookup_references.truncate (0);
2062 shared_lookup_references.safe_grow (operands.length ());
2063 memcpy (shared_lookup_references.address (),
2064 operands.address (),
2065 sizeof (vn_reference_op_s)
2066 * operands.length ());
2067 vr1.operands = operands = shared_lookup_references
2068 = valueize_refs (shared_lookup_references);
2069 vr1.type = type;
2070 vr1.set = set;
2071 vr1.hashcode = vn_reference_compute_hash (&vr1);
2072 if ((cst = fully_constant_vn_reference_p (&vr1)))
2073 return cst;
2075 vn_reference_lookup_1 (&vr1, vnresult);
2076 if (!*vnresult
2077 && kind != VN_NOWALK
2078 && vr1.vuse)
2080 ao_ref r;
2081 vn_walk_kind = kind;
2082 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2083 *vnresult =
2084 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2085 vn_reference_lookup_2,
2086 vn_reference_lookup_3, &vr1);
2087 gcc_checking_assert (vr1.operands == shared_lookup_references);
2090 if (*vnresult)
2091 return (*vnresult)->result;
2093 return NULL_TREE;
2096 /* Lookup OP in the current hash table, and return the resulting value
2097 number if it exists in the hash table. Return NULL_TREE if it does
2098 not exist in the hash table or if the result field of the structure
2099 was NULL.. VNRESULT will be filled in with the vn_reference_t
2100 stored in the hashtable if one exists. */
2102 tree
2103 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2104 vn_reference_t *vnresult)
2106 vec<vn_reference_op_s> operands;
2107 struct vn_reference_s vr1;
2108 tree cst;
2109 bool valuezied_anything;
2111 if (vnresult)
2112 *vnresult = NULL;
2114 vr1.vuse = vuse_ssa_val (vuse);
2115 vr1.operands = operands
2116 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2117 vr1.type = TREE_TYPE (op);
2118 vr1.set = get_alias_set (op);
2119 vr1.hashcode = vn_reference_compute_hash (&vr1);
2120 if ((cst = fully_constant_vn_reference_p (&vr1)))
2121 return cst;
2123 if (kind != VN_NOWALK
2124 && vr1.vuse)
2126 vn_reference_t wvnresult;
2127 ao_ref r;
2128 /* Make sure to use a valueized reference if we valueized anything.
2129 Otherwise preserve the full reference for advanced TBAA. */
2130 if (!valuezied_anything
2131 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2132 vr1.operands))
2133 ao_ref_init (&r, op);
2134 vn_walk_kind = kind;
2135 wvnresult =
2136 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2137 vn_reference_lookup_2,
2138 vn_reference_lookup_3, &vr1);
2139 gcc_checking_assert (vr1.operands == shared_lookup_references);
2140 if (wvnresult)
2142 if (vnresult)
2143 *vnresult = wvnresult;
2144 return wvnresult->result;
2147 return NULL_TREE;
2150 return vn_reference_lookup_1 (&vr1, vnresult);
2153 /* Lookup CALL in the current hash table and return the entry in
2154 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2156 void
2157 vn_reference_lookup_call (gimple call, vn_reference_t *vnresult,
2158 vn_reference_t vr)
2160 if (vnresult)
2161 *vnresult = NULL;
2163 tree vuse = gimple_vuse (call);
2165 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2166 vr->operands = valueize_shared_reference_ops_from_call (call);
2167 vr->type = gimple_expr_type (call);
2168 vr->set = 0;
2169 vr->hashcode = vn_reference_compute_hash (vr);
2170 vn_reference_lookup_1 (vr, vnresult);
2173 /* Insert OP into the current hash table with a value number of
2174 RESULT, and return the resulting reference structure we created. */
2176 static vn_reference_t
2177 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2179 vn_reference_s **slot;
2180 vn_reference_t vr1;
2181 bool tem;
2183 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
2184 if (TREE_CODE (result) == SSA_NAME)
2185 vr1->value_id = VN_INFO (result)->value_id;
2186 else
2187 vr1->value_id = get_or_alloc_constant_value_id (result);
2188 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2189 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2190 vr1->type = TREE_TYPE (op);
2191 vr1->set = get_alias_set (op);
2192 vr1->hashcode = vn_reference_compute_hash (vr1);
2193 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2194 vr1->result_vdef = vdef;
2196 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2197 INSERT);
2199 /* Because we lookup stores using vuses, and value number failures
2200 using the vdefs (see visit_reference_op_store for how and why),
2201 it's possible that on failure we may try to insert an already
2202 inserted store. This is not wrong, there is no ssa name for a
2203 store that we could use as a differentiator anyway. Thus, unlike
2204 the other lookup functions, you cannot gcc_assert (!*slot)
2205 here. */
2207 /* But free the old slot in case of a collision. */
2208 if (*slot)
2209 free_reference (*slot);
2211 *slot = vr1;
2212 return vr1;
2215 /* Insert a reference by it's pieces into the current hash table with
2216 a value number of RESULT. Return the resulting reference
2217 structure we created. */
2219 vn_reference_t
2220 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2221 vec<vn_reference_op_s> operands,
2222 tree result, unsigned int value_id)
2225 vn_reference_s **slot;
2226 vn_reference_t vr1;
2228 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
2229 vr1->value_id = value_id;
2230 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2231 vr1->operands = valueize_refs (operands);
2232 vr1->type = type;
2233 vr1->set = set;
2234 vr1->hashcode = vn_reference_compute_hash (vr1);
2235 if (result && TREE_CODE (result) == SSA_NAME)
2236 result = SSA_VAL (result);
2237 vr1->result = result;
2239 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2240 INSERT);
2242 /* At this point we should have all the things inserted that we have
2243 seen before, and we should never try inserting something that
2244 already exists. */
2245 gcc_assert (!*slot);
2246 if (*slot)
2247 free_reference (*slot);
2249 *slot = vr1;
2250 return vr1;
2253 /* Compute and return the hash value for nary operation VBO1. */
2255 static hashval_t
2256 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2258 inchash::hash hstate;
2259 unsigned i;
2261 for (i = 0; i < vno1->length; ++i)
2262 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2263 vno1->op[i] = SSA_VAL (vno1->op[i]);
2265 if (vno1->length == 2
2266 && commutative_tree_code (vno1->opcode)
2267 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2269 tree temp = vno1->op[0];
2270 vno1->op[0] = vno1->op[1];
2271 vno1->op[1] = temp;
2274 hstate.add_int (vno1->opcode);
2275 for (i = 0; i < vno1->length; ++i)
2276 inchash::add_expr (vno1->op[i], hstate);
2278 return hstate.end ();
2281 /* Compare nary operations VNO1 and VNO2 and return true if they are
2282 equivalent. */
2284 bool
2285 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2287 unsigned i;
2289 if (vno1->hashcode != vno2->hashcode)
2290 return false;
2292 if (vno1->length != vno2->length)
2293 return false;
2295 if (vno1->opcode != vno2->opcode
2296 || !types_compatible_p (vno1->type, vno2->type))
2297 return false;
2299 for (i = 0; i < vno1->length; ++i)
2300 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2301 return false;
2303 return true;
2306 /* Initialize VNO from the pieces provided. */
2308 static void
2309 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2310 enum tree_code code, tree type, tree *ops)
2312 vno->opcode = code;
2313 vno->length = length;
2314 vno->type = type;
2315 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2318 /* Initialize VNO from OP. */
2320 static void
2321 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2323 unsigned i;
2325 vno->opcode = TREE_CODE (op);
2326 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2327 vno->type = TREE_TYPE (op);
2328 for (i = 0; i < vno->length; ++i)
2329 vno->op[i] = TREE_OPERAND (op, i);
2332 /* Return the number of operands for a vn_nary ops structure from STMT. */
2334 static unsigned int
2335 vn_nary_length_from_stmt (gimple stmt)
2337 switch (gimple_assign_rhs_code (stmt))
2339 case REALPART_EXPR:
2340 case IMAGPART_EXPR:
2341 case VIEW_CONVERT_EXPR:
2342 return 1;
2344 case BIT_FIELD_REF:
2345 return 3;
2347 case CONSTRUCTOR:
2348 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2350 default:
2351 return gimple_num_ops (stmt) - 1;
2355 /* Initialize VNO from STMT. */
2357 static void
2358 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
2360 unsigned i;
2362 vno->opcode = gimple_assign_rhs_code (stmt);
2363 vno->type = gimple_expr_type (stmt);
2364 switch (vno->opcode)
2366 case REALPART_EXPR:
2367 case IMAGPART_EXPR:
2368 case VIEW_CONVERT_EXPR:
2369 vno->length = 1;
2370 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2371 break;
2373 case BIT_FIELD_REF:
2374 vno->length = 3;
2375 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2376 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2377 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2378 break;
2380 case CONSTRUCTOR:
2381 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2382 for (i = 0; i < vno->length; ++i)
2383 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2384 break;
2386 default:
2387 gcc_checking_assert (!gimple_assign_single_p (stmt));
2388 vno->length = gimple_num_ops (stmt) - 1;
2389 for (i = 0; i < vno->length; ++i)
2390 vno->op[i] = gimple_op (stmt, i + 1);
2394 /* Compute the hashcode for VNO and look for it in the hash table;
2395 return the resulting value number if it exists in the hash table.
2396 Return NULL_TREE if it does not exist in the hash table or if the
2397 result field of the operation is NULL. VNRESULT will contain the
2398 vn_nary_op_t from the hashtable if it exists. */
2400 static tree
2401 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2403 vn_nary_op_s **slot;
2405 if (vnresult)
2406 *vnresult = NULL;
2408 vno->hashcode = vn_nary_op_compute_hash (vno);
2409 slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2410 NO_INSERT);
2411 if (!slot && current_info == optimistic_info)
2412 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2413 NO_INSERT);
2414 if (!slot)
2415 return NULL_TREE;
2416 if (vnresult)
2417 *vnresult = *slot;
2418 return (*slot)->result;
2421 /* Lookup a n-ary operation by its pieces and return the resulting value
2422 number if it exists in the hash table. Return NULL_TREE if it does
2423 not exist in the hash table or if the result field of the operation
2424 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2425 if it exists. */
2427 tree
2428 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2429 tree type, tree *ops, vn_nary_op_t *vnresult)
2431 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2432 sizeof_vn_nary_op (length));
2433 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2434 return vn_nary_op_lookup_1 (vno1, vnresult);
2437 /* Lookup OP in the current hash table, and return the resulting value
2438 number if it exists in the hash table. Return NULL_TREE if it does
2439 not exist in the hash table or if the result field of the operation
2440 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2441 if it exists. */
2443 tree
2444 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2446 vn_nary_op_t vno1
2447 = XALLOCAVAR (struct vn_nary_op_s,
2448 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2449 init_vn_nary_op_from_op (vno1, op);
2450 return vn_nary_op_lookup_1 (vno1, vnresult);
2453 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2454 value number if it exists in the hash table. Return NULL_TREE if
2455 it does not exist in the hash table. VNRESULT will contain the
2456 vn_nary_op_t from the hashtable if it exists. */
2458 tree
2459 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
2461 vn_nary_op_t vno1
2462 = XALLOCAVAR (struct vn_nary_op_s,
2463 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2464 init_vn_nary_op_from_stmt (vno1, stmt);
2465 return vn_nary_op_lookup_1 (vno1, vnresult);
2468 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2470 static vn_nary_op_t
2471 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2473 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2476 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2477 obstack. */
2479 static vn_nary_op_t
2480 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2482 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2483 &current_info->nary_obstack);
2485 vno1->value_id = value_id;
2486 vno1->length = length;
2487 vno1->result = result;
2489 return vno1;
2492 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2493 VNO->HASHCODE first. */
2495 static vn_nary_op_t
2496 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
2497 bool compute_hash)
2499 vn_nary_op_s **slot;
2501 if (compute_hash)
2502 vno->hashcode = vn_nary_op_compute_hash (vno);
2504 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
2505 gcc_assert (!*slot);
2507 *slot = vno;
2508 return vno;
2511 /* Insert a n-ary operation into the current hash table using it's
2512 pieces. Return the vn_nary_op_t structure we created and put in
2513 the hashtable. */
2515 vn_nary_op_t
2516 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2517 tree type, tree *ops,
2518 tree result, unsigned int value_id)
2520 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2521 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2522 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2525 /* Insert OP into the current hash table with a value number of
2526 RESULT. Return the vn_nary_op_t structure we created and put in
2527 the hashtable. */
2529 vn_nary_op_t
2530 vn_nary_op_insert (tree op, tree result)
2532 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2533 vn_nary_op_t vno1;
2535 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2536 init_vn_nary_op_from_op (vno1, op);
2537 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2540 /* Insert the rhs of STMT into the current hash table with a value number of
2541 RESULT. */
2543 vn_nary_op_t
2544 vn_nary_op_insert_stmt (gimple stmt, tree result)
2546 vn_nary_op_t vno1
2547 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2548 result, VN_INFO (result)->value_id);
2549 init_vn_nary_op_from_stmt (vno1, stmt);
2550 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2553 /* Compute a hashcode for PHI operation VP1 and return it. */
2555 static inline hashval_t
2556 vn_phi_compute_hash (vn_phi_t vp1)
2558 inchash::hash hstate (vp1->block->index);
2559 int i;
2560 tree phi1op;
2561 tree type;
2563 /* If all PHI arguments are constants we need to distinguish
2564 the PHI node via its type. */
2565 type = vp1->type;
2566 hstate.merge_hash (vn_hash_type (type));
2568 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2570 if (phi1op == VN_TOP)
2571 continue;
2572 inchash::add_expr (phi1op, hstate);
2575 return hstate.end ();
2578 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2580 static int
2581 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
2583 if (vp1->hashcode != vp2->hashcode)
2584 return false;
2586 if (vp1->block == vp2->block)
2588 int i;
2589 tree phi1op;
2591 /* If the PHI nodes do not have compatible types
2592 they are not the same. */
2593 if (!types_compatible_p (vp1->type, vp2->type))
2594 return false;
2596 /* Any phi in the same block will have it's arguments in the
2597 same edge order, because of how we store phi nodes. */
2598 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2600 tree phi2op = vp2->phiargs[i];
2601 if (phi1op == VN_TOP || phi2op == VN_TOP)
2602 continue;
2603 if (!expressions_equal_p (phi1op, phi2op))
2604 return false;
2606 return true;
2608 return false;
2611 static vec<tree> shared_lookup_phiargs;
2613 /* Lookup PHI in the current hash table, and return the resulting
2614 value number if it exists in the hash table. Return NULL_TREE if
2615 it does not exist in the hash table. */
2617 static tree
2618 vn_phi_lookup (gimple phi)
2620 vn_phi_s **slot;
2621 struct vn_phi_s vp1;
2622 unsigned i;
2624 shared_lookup_phiargs.truncate (0);
2626 /* Canonicalize the SSA_NAME's to their value number. */
2627 for (i = 0; i < gimple_phi_num_args (phi); i++)
2629 tree def = PHI_ARG_DEF (phi, i);
2630 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2631 shared_lookup_phiargs.safe_push (def);
2633 vp1.type = TREE_TYPE (gimple_phi_result (phi));
2634 vp1.phiargs = shared_lookup_phiargs;
2635 vp1.block = gimple_bb (phi);
2636 vp1.hashcode = vn_phi_compute_hash (&vp1);
2637 slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
2638 NO_INSERT);
2639 if (!slot && current_info == optimistic_info)
2640 slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
2641 NO_INSERT);
2642 if (!slot)
2643 return NULL_TREE;
2644 return (*slot)->result;
2647 /* Insert PHI into the current hash table with a value number of
2648 RESULT. */
2650 static vn_phi_t
2651 vn_phi_insert (gimple phi, tree result)
2653 vn_phi_s **slot;
2654 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
2655 unsigned i;
2656 vec<tree> args = vNULL;
2658 /* Canonicalize the SSA_NAME's to their value number. */
2659 for (i = 0; i < gimple_phi_num_args (phi); i++)
2661 tree def = PHI_ARG_DEF (phi, i);
2662 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2663 args.safe_push (def);
2665 vp1->value_id = VN_INFO (result)->value_id;
2666 vp1->type = TREE_TYPE (gimple_phi_result (phi));
2667 vp1->phiargs = args;
2668 vp1->block = gimple_bb (phi);
2669 vp1->result = result;
2670 vp1->hashcode = vn_phi_compute_hash (vp1);
2672 slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
2674 /* Because we iterate over phi operations more than once, it's
2675 possible the slot might already exist here, hence no assert.*/
2676 *slot = vp1;
2677 return vp1;
2681 /* Print set of components in strongly connected component SCC to OUT. */
2683 static void
2684 print_scc (FILE *out, vec<tree> scc)
2686 tree var;
2687 unsigned int i;
2689 fprintf (out, "SCC consists of:");
2690 FOR_EACH_VEC_ELT (scc, i, var)
2692 fprintf (out, " ");
2693 print_generic_expr (out, var, 0);
2695 fprintf (out, "\n");
2698 /* Set the value number of FROM to TO, return true if it has changed
2699 as a result. */
2701 static inline bool
2702 set_ssa_val_to (tree from, tree to)
2704 tree currval = SSA_VAL (from);
2705 HOST_WIDE_INT toff, coff;
2707 /* The only thing we allow as value numbers are ssa_names
2708 and invariants. So assert that here. We don't allow VN_TOP
2709 as visiting a stmt should produce a value-number other than
2710 that.
2711 ??? Still VN_TOP can happen for unreachable code, so force
2712 it to varying in that case. Not all code is prepared to
2713 get VN_TOP on valueization. */
2714 if (to == VN_TOP)
2716 if (dump_file && (dump_flags & TDF_DETAILS))
2717 fprintf (dump_file, "Forcing value number to varying on "
2718 "receiving VN_TOP\n");
2719 to = from;
2722 gcc_assert (to != NULL_TREE
2723 && (TREE_CODE (to) == SSA_NAME
2724 || is_gimple_min_invariant (to)));
2726 if (from != to)
2728 if (currval == from)
2730 if (dump_file && (dump_flags & TDF_DETAILS))
2732 fprintf (dump_file, "Not changing value number of ");
2733 print_generic_expr (dump_file, from, 0);
2734 fprintf (dump_file, " from VARYING to ");
2735 print_generic_expr (dump_file, to, 0);
2736 fprintf (dump_file, "\n");
2738 return false;
2740 else if (TREE_CODE (to) == SSA_NAME
2741 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2742 to = from;
2745 if (dump_file && (dump_flags & TDF_DETAILS))
2747 fprintf (dump_file, "Setting value number of ");
2748 print_generic_expr (dump_file, from, 0);
2749 fprintf (dump_file, " to ");
2750 print_generic_expr (dump_file, to, 0);
2753 if (currval != to
2754 && !operand_equal_p (currval, to, 0)
2755 /* ??? For addresses involving volatile objects or types operand_equal_p
2756 does not reliably detect ADDR_EXPRs as equal. We know we are only
2757 getting invariant gimple addresses here, so can use
2758 get_addr_base_and_unit_offset to do this comparison. */
2759 && !(TREE_CODE (currval) == ADDR_EXPR
2760 && TREE_CODE (to) == ADDR_EXPR
2761 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
2762 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
2763 && coff == toff))
2765 VN_INFO (from)->valnum = to;
2766 if (dump_file && (dump_flags & TDF_DETAILS))
2767 fprintf (dump_file, " (changed)\n");
2768 return true;
2770 if (dump_file && (dump_flags & TDF_DETAILS))
2771 fprintf (dump_file, "\n");
2772 return false;
2775 /* Mark as processed all the definitions in the defining stmt of USE, or
2776 the USE itself. */
2778 static void
2779 mark_use_processed (tree use)
2781 ssa_op_iter iter;
2782 def_operand_p defp;
2783 gimple stmt = SSA_NAME_DEF_STMT (use);
2785 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
2787 VN_INFO (use)->use_processed = true;
2788 return;
2791 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2793 tree def = DEF_FROM_PTR (defp);
2795 VN_INFO (def)->use_processed = true;
2799 /* Set all definitions in STMT to value number to themselves.
2800 Return true if a value number changed. */
2802 static bool
2803 defs_to_varying (gimple stmt)
2805 bool changed = false;
2806 ssa_op_iter iter;
2807 def_operand_p defp;
2809 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2811 tree def = DEF_FROM_PTR (defp);
2812 changed |= set_ssa_val_to (def, def);
2814 return changed;
2817 static bool expr_has_constants (tree expr);
2819 /* Visit a copy between LHS and RHS, return true if the value number
2820 changed. */
2822 static bool
2823 visit_copy (tree lhs, tree rhs)
2825 /* The copy may have a more interesting constant filled expression
2826 (we don't, since we know our RHS is just an SSA name). */
2827 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2828 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2830 /* And finally valueize. */
2831 rhs = SSA_VAL (rhs);
2833 return set_ssa_val_to (lhs, rhs);
2836 /* Visit a nary operator RHS, value number it, and return true if the
2837 value number of LHS has changed as a result. */
2839 static bool
2840 visit_nary_op (tree lhs, gimple stmt)
2842 bool changed = false;
2843 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2845 if (result)
2846 changed = set_ssa_val_to (lhs, result);
2847 else
2849 changed = set_ssa_val_to (lhs, lhs);
2850 vn_nary_op_insert_stmt (stmt, lhs);
2853 return changed;
2856 /* Visit a call STMT storing into LHS. Return true if the value number
2857 of the LHS has changed as a result. */
2859 static bool
2860 visit_reference_op_call (tree lhs, gimple stmt)
2862 bool changed = false;
2863 struct vn_reference_s vr1;
2864 vn_reference_t vnresult = NULL;
2865 tree vdef = gimple_vdef (stmt);
2867 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
2868 if (lhs && TREE_CODE (lhs) != SSA_NAME)
2869 lhs = NULL_TREE;
2871 vn_reference_lookup_call (stmt, &vnresult, &vr1);
2872 if (vnresult)
2874 if (vnresult->result_vdef && vdef)
2875 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
2877 if (!vnresult->result && lhs)
2878 vnresult->result = lhs;
2880 if (vnresult->result && lhs)
2882 changed |= set_ssa_val_to (lhs, vnresult->result);
2884 if (VN_INFO (vnresult->result)->has_constants)
2885 VN_INFO (lhs)->has_constants = true;
2888 else
2890 vn_reference_t vr2;
2891 vn_reference_s **slot;
2892 if (vdef)
2893 changed |= set_ssa_val_to (vdef, vdef);
2894 if (lhs)
2895 changed |= set_ssa_val_to (lhs, lhs);
2896 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
2897 vr2->vuse = vr1.vuse;
2898 /* As we are not walking the virtual operand chain we know the
2899 shared_lookup_references are still original so we can re-use
2900 them here. */
2901 vr2->operands = vr1.operands.copy ();
2902 vr2->type = vr1.type;
2903 vr2->set = vr1.set;
2904 vr2->hashcode = vr1.hashcode;
2905 vr2->result = lhs;
2906 vr2->result_vdef = vdef;
2907 slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
2908 INSERT);
2909 gcc_assert (!*slot);
2910 *slot = vr2;
2913 return changed;
2916 /* Visit a load from a reference operator RHS, part of STMT, value number it,
2917 and return true if the value number of the LHS has changed as a result. */
2919 static bool
2920 visit_reference_op_load (tree lhs, tree op, gimple stmt)
2922 bool changed = false;
2923 tree last_vuse;
2924 tree result;
2926 last_vuse = gimple_vuse (stmt);
2927 last_vuse_ptr = &last_vuse;
2928 result = vn_reference_lookup (op, gimple_vuse (stmt),
2929 default_vn_walk_kind, NULL);
2930 last_vuse_ptr = NULL;
2932 /* We handle type-punning through unions by value-numbering based
2933 on offset and size of the access. Be prepared to handle a
2934 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
2935 if (result
2936 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
2938 /* We will be setting the value number of lhs to the value number
2939 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2940 So first simplify and lookup this expression to see if it
2941 is already available. */
2942 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
2943 if ((CONVERT_EXPR_P (val)
2944 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
2945 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
2947 tree tem = vn_get_expr_for (TREE_OPERAND (val, 0));
2948 if ((CONVERT_EXPR_P (tem)
2949 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
2950 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
2951 TREE_TYPE (val), tem)))
2952 val = tem;
2954 result = val;
2955 if (!is_gimple_min_invariant (val)
2956 && TREE_CODE (val) != SSA_NAME)
2957 result = vn_nary_op_lookup (val, NULL);
2958 /* If the expression is not yet available, value-number lhs to
2959 a new SSA_NAME we create. */
2960 if (!result)
2962 result = make_temp_ssa_name (TREE_TYPE (lhs), gimple_build_nop (),
2963 "vntemp");
2964 /* Initialize value-number information properly. */
2965 VN_INFO_GET (result)->valnum = result;
2966 VN_INFO (result)->value_id = get_next_value_id ();
2967 VN_INFO (result)->expr = val;
2968 VN_INFO (result)->has_constants = expr_has_constants (val);
2969 VN_INFO (result)->needs_insertion = true;
2970 /* As all "inserted" statements are singleton SCCs, insert
2971 to the valid table. This is strictly needed to
2972 avoid re-generating new value SSA_NAMEs for the same
2973 expression during SCC iteration over and over (the
2974 optimistic table gets cleared after each iteration).
2975 We do not need to insert into the optimistic table, as
2976 lookups there will fall back to the valid table. */
2977 if (current_info == optimistic_info)
2979 current_info = valid_info;
2980 vn_nary_op_insert (val, result);
2981 current_info = optimistic_info;
2983 else
2984 vn_nary_op_insert (val, result);
2985 if (dump_file && (dump_flags & TDF_DETAILS))
2987 fprintf (dump_file, "Inserting name ");
2988 print_generic_expr (dump_file, result, 0);
2989 fprintf (dump_file, " for expression ");
2990 print_generic_expr (dump_file, val, 0);
2991 fprintf (dump_file, "\n");
2996 if (result)
2998 changed = set_ssa_val_to (lhs, result);
2999 if (TREE_CODE (result) == SSA_NAME
3000 && VN_INFO (result)->has_constants)
3002 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
3003 VN_INFO (lhs)->has_constants = true;
3006 else
3008 changed = set_ssa_val_to (lhs, lhs);
3009 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
3012 return changed;
3016 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3017 and return true if the value number of the LHS has changed as a result. */
3019 static bool
3020 visit_reference_op_store (tree lhs, tree op, gimple stmt)
3022 bool changed = false;
3023 vn_reference_t vnresult = NULL;
3024 tree result, assign;
3025 bool resultsame = false;
3026 tree vuse = gimple_vuse (stmt);
3027 tree vdef = gimple_vdef (stmt);
3029 /* First we want to lookup using the *vuses* from the store and see
3030 if there the last store to this location with the same address
3031 had the same value.
3033 The vuses represent the memory state before the store. If the
3034 memory state, address, and value of the store is the same as the
3035 last store to this location, then this store will produce the
3036 same memory state as that store.
3038 In this case the vdef versions for this store are value numbered to those
3039 vuse versions, since they represent the same memory state after
3040 this store.
3042 Otherwise, the vdefs for the store are used when inserting into
3043 the table, since the store generates a new memory state. */
3045 result = vn_reference_lookup (lhs, vuse, VN_NOWALK, NULL);
3047 if (result)
3049 if (TREE_CODE (result) == SSA_NAME)
3050 result = SSA_VAL (result);
3051 if (TREE_CODE (op) == SSA_NAME)
3052 op = SSA_VAL (op);
3053 resultsame = expressions_equal_p (result, op);
3056 if ((!result || !resultsame)
3057 /* Only perform the following when being called from PRE
3058 which embeds tail merging. */
3059 && default_vn_walk_kind == VN_WALK)
3061 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3062 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult);
3063 if (vnresult)
3065 VN_INFO (vdef)->use_processed = true;
3066 return set_ssa_val_to (vdef, vnresult->result_vdef);
3070 if (!result || !resultsame)
3072 if (dump_file && (dump_flags & TDF_DETAILS))
3074 fprintf (dump_file, "No store match\n");
3075 fprintf (dump_file, "Value numbering store ");
3076 print_generic_expr (dump_file, lhs, 0);
3077 fprintf (dump_file, " to ");
3078 print_generic_expr (dump_file, op, 0);
3079 fprintf (dump_file, "\n");
3081 /* Have to set value numbers before insert, since insert is
3082 going to valueize the references in-place. */
3083 if (vdef)
3085 changed |= set_ssa_val_to (vdef, vdef);
3088 /* Do not insert structure copies into the tables. */
3089 if (is_gimple_min_invariant (op)
3090 || is_gimple_reg (op))
3091 vn_reference_insert (lhs, op, vdef, NULL);
3093 /* Only perform the following when being called from PRE
3094 which embeds tail merging. */
3095 if (default_vn_walk_kind == VN_WALK)
3097 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3098 vn_reference_insert (assign, lhs, vuse, vdef);
3101 else
3103 /* We had a match, so value number the vdef to have the value
3104 number of the vuse it came from. */
3106 if (dump_file && (dump_flags & TDF_DETAILS))
3107 fprintf (dump_file, "Store matched earlier value,"
3108 "value numbering store vdefs to matching vuses.\n");
3110 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3113 return changed;
3116 /* Visit and value number PHI, return true if the value number
3117 changed. */
3119 static bool
3120 visit_phi (gimple phi)
3122 bool changed = false;
3123 tree result;
3124 tree sameval = VN_TOP;
3125 bool allsame = true;
3127 /* TODO: We could check for this in init_sccvn, and replace this
3128 with a gcc_assert. */
3129 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3130 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3132 /* See if all non-TOP arguments have the same value. TOP is
3133 equivalent to everything, so we can ignore it. */
3134 edge_iterator ei;
3135 edge e;
3136 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3137 if (e->flags & EDGE_EXECUTABLE)
3139 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3141 if (TREE_CODE (def) == SSA_NAME)
3142 def = SSA_VAL (def);
3143 if (def == VN_TOP)
3144 continue;
3145 if (sameval == VN_TOP)
3147 sameval = def;
3149 else
3151 if (!expressions_equal_p (def, sameval))
3153 allsame = false;
3154 break;
3159 /* If all value numbered to the same value, the phi node has that
3160 value. */
3161 if (allsame)
3162 return set_ssa_val_to (PHI_RESULT (phi), sameval);
3164 /* Otherwise, see if it is equivalent to a phi node in this block. */
3165 result = vn_phi_lookup (phi);
3166 if (result)
3167 changed = set_ssa_val_to (PHI_RESULT (phi), result);
3168 else
3170 vn_phi_insert (phi, PHI_RESULT (phi));
3171 VN_INFO (PHI_RESULT (phi))->has_constants = false;
3172 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
3173 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3176 return changed;
3179 /* Return true if EXPR contains constants. */
3181 static bool
3182 expr_has_constants (tree expr)
3184 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
3186 case tcc_unary:
3187 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
3189 case tcc_binary:
3190 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
3191 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
3192 /* Constants inside reference ops are rarely interesting, but
3193 it can take a lot of looking to find them. */
3194 case tcc_reference:
3195 case tcc_declaration:
3196 return false;
3197 default:
3198 return is_gimple_min_invariant (expr);
3200 return false;
3203 /* Return true if STMT contains constants. */
3205 static bool
3206 stmt_has_constants (gimple stmt)
3208 tree tem;
3210 if (gimple_code (stmt) != GIMPLE_ASSIGN)
3211 return false;
3213 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
3215 case GIMPLE_TERNARY_RHS:
3216 tem = gimple_assign_rhs3 (stmt);
3217 if (TREE_CODE (tem) == SSA_NAME)
3218 tem = SSA_VAL (tem);
3219 if (is_gimple_min_invariant (tem))
3220 return true;
3221 /* Fallthru. */
3223 case GIMPLE_BINARY_RHS:
3224 tem = gimple_assign_rhs2 (stmt);
3225 if (TREE_CODE (tem) == SSA_NAME)
3226 tem = SSA_VAL (tem);
3227 if (is_gimple_min_invariant (tem))
3228 return true;
3229 /* Fallthru. */
3231 case GIMPLE_SINGLE_RHS:
3232 /* Constants inside reference ops are rarely interesting, but
3233 it can take a lot of looking to find them. */
3234 case GIMPLE_UNARY_RHS:
3235 tem = gimple_assign_rhs1 (stmt);
3236 if (TREE_CODE (tem) == SSA_NAME)
3237 tem = SSA_VAL (tem);
3238 return is_gimple_min_invariant (tem);
3240 default:
3241 gcc_unreachable ();
3243 return false;
3246 /* Simplify the binary expression RHS, and return the result if
3247 simplified. */
3249 static tree
3250 simplify_binary_expression (gimple stmt)
3252 tree result = NULL_TREE;
3253 tree op0 = gimple_assign_rhs1 (stmt);
3254 tree op1 = gimple_assign_rhs2 (stmt);
3255 enum tree_code code = gimple_assign_rhs_code (stmt);
3257 /* This will not catch every single case we could combine, but will
3258 catch those with constants. The goal here is to simultaneously
3259 combine constants between expressions, but avoid infinite
3260 expansion of expressions during simplification. */
3261 op0 = vn_valueize (op0);
3262 if (TREE_CODE (op0) == SSA_NAME
3263 && (VN_INFO (op0)->has_constants
3264 || TREE_CODE_CLASS (code) == tcc_comparison
3265 || code == COMPLEX_EXPR))
3266 op0 = vn_get_expr_for (op0);
3268 op1 = vn_valueize (op1);
3269 if (TREE_CODE (op1) == SSA_NAME
3270 && (VN_INFO (op1)->has_constants
3271 || code == COMPLEX_EXPR))
3272 op1 = vn_get_expr_for (op1);
3274 /* Pointer plus constant can be represented as invariant address.
3275 Do so to allow further propatation, see also tree forwprop. */
3276 if (code == POINTER_PLUS_EXPR
3277 && tree_fits_uhwi_p (op1)
3278 && TREE_CODE (op0) == ADDR_EXPR
3279 && is_gimple_min_invariant (op0))
3280 return build_invariant_address (TREE_TYPE (op0),
3281 TREE_OPERAND (op0, 0),
3282 tree_to_uhwi (op1));
3284 /* Avoid folding if nothing changed. */
3285 if (op0 == gimple_assign_rhs1 (stmt)
3286 && op1 == gimple_assign_rhs2 (stmt))
3287 return NULL_TREE;
3289 fold_defer_overflow_warnings ();
3291 result = fold_binary (code, gimple_expr_type (stmt), op0, op1);
3292 if (result)
3293 STRIP_USELESS_TYPE_CONVERSION (result);
3295 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
3296 stmt, 0);
3298 /* Make sure result is not a complex expression consisting
3299 of operators of operators (IE (a + b) + (a + c))
3300 Otherwise, we will end up with unbounded expressions if
3301 fold does anything at all. */
3302 if (result && valid_gimple_rhs_p (result))
3303 return result;
3305 return NULL_TREE;
3308 /* Simplify the unary expression RHS, and return the result if
3309 simplified. */
3311 static tree
3312 simplify_unary_expression (gimple stmt)
3314 tree result = NULL_TREE;
3315 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
3316 enum tree_code code = gimple_assign_rhs_code (stmt);
3318 /* We handle some tcc_reference codes here that are all
3319 GIMPLE_ASSIGN_SINGLE codes. */
3320 if (code == REALPART_EXPR
3321 || code == IMAGPART_EXPR
3322 || code == VIEW_CONVERT_EXPR
3323 || code == BIT_FIELD_REF)
3324 op0 = TREE_OPERAND (op0, 0);
3326 orig_op0 = op0;
3327 op0 = vn_valueize (op0);
3328 if (TREE_CODE (op0) == SSA_NAME)
3330 if (VN_INFO (op0)->has_constants)
3331 op0 = vn_get_expr_for (op0);
3332 else if (CONVERT_EXPR_CODE_P (code)
3333 || code == REALPART_EXPR
3334 || code == IMAGPART_EXPR
3335 || code == VIEW_CONVERT_EXPR
3336 || code == BIT_FIELD_REF)
3338 /* We want to do tree-combining on conversion-like expressions.
3339 Make sure we feed only SSA_NAMEs or constants to fold though. */
3340 tree tem = vn_get_expr_for (op0);
3341 if (UNARY_CLASS_P (tem)
3342 || BINARY_CLASS_P (tem)
3343 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
3344 || TREE_CODE (tem) == SSA_NAME
3345 || TREE_CODE (tem) == CONSTRUCTOR
3346 || is_gimple_min_invariant (tem))
3347 op0 = tem;
3351 /* Avoid folding if nothing changed, but remember the expression. */
3352 if (op0 == orig_op0)
3353 return NULL_TREE;
3355 if (code == BIT_FIELD_REF)
3357 tree rhs = gimple_assign_rhs1 (stmt);
3358 result = fold_ternary (BIT_FIELD_REF, TREE_TYPE (rhs),
3359 op0, TREE_OPERAND (rhs, 1), TREE_OPERAND (rhs, 2));
3361 else
3362 result = fold_unary_ignore_overflow (code, gimple_expr_type (stmt), op0);
3363 if (result)
3365 STRIP_USELESS_TYPE_CONVERSION (result);
3366 if (valid_gimple_rhs_p (result))
3367 return result;
3370 return NULL_TREE;
3373 /* Try to simplify RHS using equivalences and constant folding. */
3375 static tree
3376 try_to_simplify (gimple stmt)
3378 enum tree_code code = gimple_assign_rhs_code (stmt);
3379 tree tem;
3381 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3382 in this case, there is no point in doing extra work. */
3383 if (code == SSA_NAME)
3384 return NULL_TREE;
3386 /* First try constant folding based on our current lattice. */
3387 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize);
3388 if (tem
3389 && (TREE_CODE (tem) == SSA_NAME
3390 || is_gimple_min_invariant (tem)))
3391 return tem;
3393 /* If that didn't work try combining multiple statements. */
3394 switch (TREE_CODE_CLASS (code))
3396 case tcc_reference:
3397 /* Fallthrough for some unary codes that can operate on registers. */
3398 if (!(code == REALPART_EXPR
3399 || code == IMAGPART_EXPR
3400 || code == VIEW_CONVERT_EXPR
3401 || code == BIT_FIELD_REF))
3402 break;
3403 /* We could do a little more with unary ops, if they expand
3404 into binary ops, but it's debatable whether it is worth it. */
3405 case tcc_unary:
3406 return simplify_unary_expression (stmt);
3408 case tcc_comparison:
3409 case tcc_binary:
3410 return simplify_binary_expression (stmt);
3412 default:
3413 break;
3416 return NULL_TREE;
3419 /* Visit and value number USE, return true if the value number
3420 changed. */
3422 static bool
3423 visit_use (tree use)
3425 bool changed = false;
3426 gimple stmt = SSA_NAME_DEF_STMT (use);
3428 mark_use_processed (use);
3430 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3431 if (dump_file && (dump_flags & TDF_DETAILS)
3432 && !SSA_NAME_IS_DEFAULT_DEF (use))
3434 fprintf (dump_file, "Value numbering ");
3435 print_generic_expr (dump_file, use, 0);
3436 fprintf (dump_file, " stmt = ");
3437 print_gimple_stmt (dump_file, stmt, 0, 0);
3440 /* Handle uninitialized uses. */
3441 if (SSA_NAME_IS_DEFAULT_DEF (use))
3442 changed = set_ssa_val_to (use, use);
3443 else
3445 if (gimple_code (stmt) == GIMPLE_PHI)
3446 changed = visit_phi (stmt);
3447 else if (gimple_has_volatile_ops (stmt))
3448 changed = defs_to_varying (stmt);
3449 else if (is_gimple_assign (stmt))
3451 enum tree_code code = gimple_assign_rhs_code (stmt);
3452 tree lhs = gimple_assign_lhs (stmt);
3453 tree rhs1 = gimple_assign_rhs1 (stmt);
3454 tree simplified;
3456 /* Shortcut for copies. Simplifying copies is pointless,
3457 since we copy the expression and value they represent. */
3458 if (code == SSA_NAME
3459 && TREE_CODE (lhs) == SSA_NAME)
3461 changed = visit_copy (lhs, rhs1);
3462 goto done;
3464 simplified = try_to_simplify (stmt);
3465 if (simplified)
3467 if (dump_file && (dump_flags & TDF_DETAILS))
3469 fprintf (dump_file, "RHS ");
3470 print_gimple_expr (dump_file, stmt, 0, 0);
3471 fprintf (dump_file, " simplified to ");
3472 print_generic_expr (dump_file, simplified, 0);
3473 if (TREE_CODE (lhs) == SSA_NAME)
3474 fprintf (dump_file, " has constants %d\n",
3475 expr_has_constants (simplified));
3476 else
3477 fprintf (dump_file, "\n");
3480 /* Setting value numbers to constants will occasionally
3481 screw up phi congruence because constants are not
3482 uniquely associated with a single ssa name that can be
3483 looked up. */
3484 if (simplified
3485 && is_gimple_min_invariant (simplified)
3486 && TREE_CODE (lhs) == SSA_NAME)
3488 VN_INFO (lhs)->expr = simplified;
3489 VN_INFO (lhs)->has_constants = true;
3490 changed = set_ssa_val_to (lhs, simplified);
3491 goto done;
3493 else if (simplified
3494 && TREE_CODE (simplified) == SSA_NAME
3495 && TREE_CODE (lhs) == SSA_NAME)
3497 changed = visit_copy (lhs, simplified);
3498 goto done;
3500 else if (simplified)
3502 if (TREE_CODE (lhs) == SSA_NAME)
3504 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
3505 /* We have to unshare the expression or else
3506 valuizing may change the IL stream. */
3507 VN_INFO (lhs)->expr = unshare_expr (simplified);
3510 else if (stmt_has_constants (stmt)
3511 && TREE_CODE (lhs) == SSA_NAME)
3512 VN_INFO (lhs)->has_constants = true;
3513 else if (TREE_CODE (lhs) == SSA_NAME)
3515 /* We reset expr and constantness here because we may
3516 have been value numbering optimistically, and
3517 iterating. They may become non-constant in this case,
3518 even if they were optimistically constant. */
3520 VN_INFO (lhs)->has_constants = false;
3521 VN_INFO (lhs)->expr = NULL_TREE;
3524 if ((TREE_CODE (lhs) == SSA_NAME
3525 /* We can substitute SSA_NAMEs that are live over
3526 abnormal edges with their constant value. */
3527 && !(gimple_assign_copy_p (stmt)
3528 && is_gimple_min_invariant (rhs1))
3529 && !(simplified
3530 && is_gimple_min_invariant (simplified))
3531 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3532 /* Stores or copies from SSA_NAMEs that are live over
3533 abnormal edges are a problem. */
3534 || (code == SSA_NAME
3535 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3536 changed = defs_to_varying (stmt);
3537 else if (REFERENCE_CLASS_P (lhs)
3538 || DECL_P (lhs))
3539 changed = visit_reference_op_store (lhs, rhs1, stmt);
3540 else if (TREE_CODE (lhs) == SSA_NAME)
3542 if ((gimple_assign_copy_p (stmt)
3543 && is_gimple_min_invariant (rhs1))
3544 || (simplified
3545 && is_gimple_min_invariant (simplified)))
3547 VN_INFO (lhs)->has_constants = true;
3548 if (simplified)
3549 changed = set_ssa_val_to (lhs, simplified);
3550 else
3551 changed = set_ssa_val_to (lhs, rhs1);
3553 else
3555 /* First try to lookup the simplified expression. */
3556 if (simplified)
3558 enum gimple_rhs_class rhs_class;
3561 rhs_class = get_gimple_rhs_class (TREE_CODE (simplified));
3562 if ((rhs_class == GIMPLE_UNARY_RHS
3563 || rhs_class == GIMPLE_BINARY_RHS
3564 || rhs_class == GIMPLE_TERNARY_RHS)
3565 && valid_gimple_rhs_p (simplified))
3567 tree result = vn_nary_op_lookup (simplified, NULL);
3568 if (result)
3570 changed = set_ssa_val_to (lhs, result);
3571 goto done;
3576 /* Otherwise visit the original statement. */
3577 switch (vn_get_stmt_kind (stmt))
3579 case VN_NARY:
3580 changed = visit_nary_op (lhs, stmt);
3581 break;
3582 case VN_REFERENCE:
3583 changed = visit_reference_op_load (lhs, rhs1, stmt);
3584 break;
3585 default:
3586 changed = defs_to_varying (stmt);
3587 break;
3591 else
3592 changed = defs_to_varying (stmt);
3594 else if (is_gimple_call (stmt))
3596 tree lhs = gimple_call_lhs (stmt);
3597 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3599 /* Try constant folding based on our current lattice. */
3600 tree simplified = gimple_fold_stmt_to_constant_1 (stmt,
3601 vn_valueize);
3602 if (simplified)
3604 if (dump_file && (dump_flags & TDF_DETAILS))
3606 fprintf (dump_file, "call ");
3607 print_gimple_expr (dump_file, stmt, 0, 0);
3608 fprintf (dump_file, " simplified to ");
3609 print_generic_expr (dump_file, simplified, 0);
3610 if (TREE_CODE (lhs) == SSA_NAME)
3611 fprintf (dump_file, " has constants %d\n",
3612 expr_has_constants (simplified));
3613 else
3614 fprintf (dump_file, "\n");
3617 /* Setting value numbers to constants will occasionally
3618 screw up phi congruence because constants are not
3619 uniquely associated with a single ssa name that can be
3620 looked up. */
3621 if (simplified
3622 && is_gimple_min_invariant (simplified))
3624 VN_INFO (lhs)->expr = simplified;
3625 VN_INFO (lhs)->has_constants = true;
3626 changed = set_ssa_val_to (lhs, simplified);
3627 if (gimple_vdef (stmt))
3628 changed |= set_ssa_val_to (gimple_vdef (stmt),
3629 gimple_vuse (stmt));
3630 goto done;
3632 else if (simplified
3633 && TREE_CODE (simplified) == SSA_NAME)
3635 changed = visit_copy (lhs, simplified);
3636 if (gimple_vdef (stmt))
3637 changed |= set_ssa_val_to (gimple_vdef (stmt),
3638 gimple_vuse (stmt));
3639 goto done;
3641 else
3643 if (stmt_has_constants (stmt))
3644 VN_INFO (lhs)->has_constants = true;
3645 else
3647 /* We reset expr and constantness here because we may
3648 have been value numbering optimistically, and
3649 iterating. They may become non-constant in this case,
3650 even if they were optimistically constant. */
3651 VN_INFO (lhs)->has_constants = false;
3652 VN_INFO (lhs)->expr = NULL_TREE;
3655 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3657 changed = defs_to_varying (stmt);
3658 goto done;
3663 if (!gimple_call_internal_p (stmt)
3664 && (/* Calls to the same function with the same vuse
3665 and the same operands do not necessarily return the same
3666 value, unless they're pure or const. */
3667 gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST)
3668 /* If calls have a vdef, subsequent calls won't have
3669 the same incoming vuse. So, if 2 calls with vdef have the
3670 same vuse, we know they're not subsequent.
3671 We can value number 2 calls to the same function with the
3672 same vuse and the same operands which are not subsequent
3673 the same, because there is no code in the program that can
3674 compare the 2 values... */
3675 || (gimple_vdef (stmt)
3676 /* ... unless the call returns a pointer which does
3677 not alias with anything else. In which case the
3678 information that the values are distinct are encoded
3679 in the IL. */
3680 && !(gimple_call_return_flags (stmt) & ERF_NOALIAS)
3681 /* Only perform the following when being called from PRE
3682 which embeds tail merging. */
3683 && default_vn_walk_kind == VN_WALK)))
3684 changed = visit_reference_op_call (lhs, stmt);
3685 else
3686 changed = defs_to_varying (stmt);
3688 else
3689 changed = defs_to_varying (stmt);
3691 done:
3692 return changed;
3695 /* Compare two operands by reverse postorder index */
3697 static int
3698 compare_ops (const void *pa, const void *pb)
3700 const tree opa = *((const tree *)pa);
3701 const tree opb = *((const tree *)pb);
3702 gimple opstmta = SSA_NAME_DEF_STMT (opa);
3703 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
3704 basic_block bba;
3705 basic_block bbb;
3707 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3708 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3709 else if (gimple_nop_p (opstmta))
3710 return -1;
3711 else if (gimple_nop_p (opstmtb))
3712 return 1;
3714 bba = gimple_bb (opstmta);
3715 bbb = gimple_bb (opstmtb);
3717 if (!bba && !bbb)
3718 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3719 else if (!bba)
3720 return -1;
3721 else if (!bbb)
3722 return 1;
3724 if (bba == bbb)
3726 if (gimple_code (opstmta) == GIMPLE_PHI
3727 && gimple_code (opstmtb) == GIMPLE_PHI)
3728 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3729 else if (gimple_code (opstmta) == GIMPLE_PHI)
3730 return -1;
3731 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3732 return 1;
3733 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3734 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3735 else
3736 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3738 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3741 /* Sort an array containing members of a strongly connected component
3742 SCC so that the members are ordered by RPO number.
3743 This means that when the sort is complete, iterating through the
3744 array will give you the members in RPO order. */
3746 static void
3747 sort_scc (vec<tree> scc)
3749 scc.qsort (compare_ops);
3752 /* Insert the no longer used nary ONARY to the hash INFO. */
3754 static void
3755 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3757 size_t size = sizeof_vn_nary_op (onary->length);
3758 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3759 &info->nary_obstack);
3760 memcpy (nary, onary, size);
3761 vn_nary_op_insert_into (nary, info->nary, false);
3764 /* Insert the no longer used phi OPHI to the hash INFO. */
3766 static void
3767 copy_phi (vn_phi_t ophi, vn_tables_t info)
3769 vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
3770 vn_phi_s **slot;
3771 memcpy (phi, ophi, sizeof (*phi));
3772 ophi->phiargs.create (0);
3773 slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
3774 gcc_assert (!*slot);
3775 *slot = phi;
3778 /* Insert the no longer used reference OREF to the hash INFO. */
3780 static void
3781 copy_reference (vn_reference_t oref, vn_tables_t info)
3783 vn_reference_t ref;
3784 vn_reference_s **slot;
3785 ref = (vn_reference_t) pool_alloc (info->references_pool);
3786 memcpy (ref, oref, sizeof (*ref));
3787 oref->operands.create (0);
3788 slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
3789 if (*slot)
3790 free_reference (*slot);
3791 *slot = ref;
3794 /* Process a strongly connected component in the SSA graph. */
3796 static void
3797 process_scc (vec<tree> scc)
3799 tree var;
3800 unsigned int i;
3801 unsigned int iterations = 0;
3802 bool changed = true;
3803 vn_nary_op_iterator_type hin;
3804 vn_phi_iterator_type hip;
3805 vn_reference_iterator_type hir;
3806 vn_nary_op_t nary;
3807 vn_phi_t phi;
3808 vn_reference_t ref;
3810 /* If the SCC has a single member, just visit it. */
3811 if (scc.length () == 1)
3813 tree use = scc[0];
3814 if (VN_INFO (use)->use_processed)
3815 return;
3816 /* We need to make sure it doesn't form a cycle itself, which can
3817 happen for self-referential PHI nodes. In that case we would
3818 end up inserting an expression with VN_TOP operands into the
3819 valid table which makes us derive bogus equivalences later.
3820 The cheapest way to check this is to assume it for all PHI nodes. */
3821 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3822 /* Fallthru to iteration. */ ;
3823 else
3825 visit_use (use);
3826 return;
3830 if (dump_file && (dump_flags & TDF_DETAILS))
3831 print_scc (dump_file, scc);
3833 /* Iterate over the SCC with the optimistic table until it stops
3834 changing. */
3835 current_info = optimistic_info;
3836 while (changed)
3838 changed = false;
3839 iterations++;
3840 if (dump_file && (dump_flags & TDF_DETAILS))
3841 fprintf (dump_file, "Starting iteration %d\n", iterations);
3842 /* As we are value-numbering optimistically we have to
3843 clear the expression tables and the simplified expressions
3844 in each iteration until we converge. */
3845 optimistic_info->nary->empty ();
3846 optimistic_info->phis->empty ();
3847 optimistic_info->references->empty ();
3848 obstack_free (&optimistic_info->nary_obstack, NULL);
3849 gcc_obstack_init (&optimistic_info->nary_obstack);
3850 empty_alloc_pool (optimistic_info->phis_pool);
3851 empty_alloc_pool (optimistic_info->references_pool);
3852 FOR_EACH_VEC_ELT (scc, i, var)
3853 VN_INFO (var)->expr = NULL_TREE;
3854 FOR_EACH_VEC_ELT (scc, i, var)
3855 changed |= visit_use (var);
3858 if (dump_file && (dump_flags & TDF_DETAILS))
3859 fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
3860 statistics_histogram_event (cfun, "SCC iterations", iterations);
3862 /* Finally, copy the contents of the no longer used optimistic
3863 table to the valid table. */
3864 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
3865 copy_nary (nary, valid_info);
3866 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
3867 copy_phi (phi, valid_info);
3868 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
3869 ref, vn_reference_t, hir)
3870 copy_reference (ref, valid_info);
3872 current_info = valid_info;
3876 /* Pop the components of the found SCC for NAME off the SCC stack
3877 and process them. Returns true if all went well, false if
3878 we run into resource limits. */
3880 static bool
3881 extract_and_process_scc_for_name (tree name)
3883 auto_vec<tree> scc;
3884 tree x;
3886 /* Found an SCC, pop the components off the SCC stack and
3887 process them. */
3890 x = sccstack.pop ();
3892 VN_INFO (x)->on_sccstack = false;
3893 scc.safe_push (x);
3894 } while (x != name);
3896 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
3897 if (scc.length ()
3898 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
3900 if (dump_file)
3901 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
3902 "SCC size %u exceeding %u\n", scc.length (),
3903 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
3905 return false;
3908 if (scc.length () > 1)
3909 sort_scc (scc);
3911 process_scc (scc);
3913 return true;
3916 /* Depth first search on NAME to discover and process SCC's in the SSA
3917 graph.
3918 Execution of this algorithm relies on the fact that the SCC's are
3919 popped off the stack in topological order.
3920 Returns true if successful, false if we stopped processing SCC's due
3921 to resource constraints. */
3923 static bool
3924 DFS (tree name)
3926 vec<ssa_op_iter> itervec = vNULL;
3927 vec<tree> namevec = vNULL;
3928 use_operand_p usep = NULL;
3929 gimple defstmt;
3930 tree use;
3931 ssa_op_iter iter;
3933 start_over:
3934 /* SCC info */
3935 VN_INFO (name)->dfsnum = next_dfs_num++;
3936 VN_INFO (name)->visited = true;
3937 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
3939 sccstack.safe_push (name);
3940 VN_INFO (name)->on_sccstack = true;
3941 defstmt = SSA_NAME_DEF_STMT (name);
3943 /* Recursively DFS on our operands, looking for SCC's. */
3944 if (!gimple_nop_p (defstmt))
3946 /* Push a new iterator. */
3947 if (gimple_code (defstmt) == GIMPLE_PHI)
3948 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
3949 else
3950 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
3952 else
3953 clear_and_done_ssa_iter (&iter);
3955 while (1)
3957 /* If we are done processing uses of a name, go up the stack
3958 of iterators and process SCCs as we found them. */
3959 if (op_iter_done (&iter))
3961 /* See if we found an SCC. */
3962 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
3963 if (!extract_and_process_scc_for_name (name))
3965 namevec.release ();
3966 itervec.release ();
3967 return false;
3970 /* Check if we are done. */
3971 if (namevec.is_empty ())
3973 namevec.release ();
3974 itervec.release ();
3975 return true;
3978 /* Restore the last use walker and continue walking there. */
3979 use = name;
3980 name = namevec.pop ();
3981 memcpy (&iter, &itervec.last (),
3982 sizeof (ssa_op_iter));
3983 itervec.pop ();
3984 goto continue_walking;
3987 use = USE_FROM_PTR (usep);
3989 /* Since we handle phi nodes, we will sometimes get
3990 invariants in the use expression. */
3991 if (TREE_CODE (use) == SSA_NAME)
3993 if (! (VN_INFO (use)->visited))
3995 /* Recurse by pushing the current use walking state on
3996 the stack and starting over. */
3997 itervec.safe_push (iter);
3998 namevec.safe_push (name);
3999 name = use;
4000 goto start_over;
4002 continue_walking:
4003 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
4004 VN_INFO (use)->low);
4006 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
4007 && VN_INFO (use)->on_sccstack)
4009 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
4010 VN_INFO (name)->low);
4014 usep = op_iter_next_use (&iter);
4018 /* Allocate a value number table. */
4020 static void
4021 allocate_vn_table (vn_tables_t table)
4023 table->phis = new vn_phi_table_type (23);
4024 table->nary = new vn_nary_op_table_type (23);
4025 table->references = new vn_reference_table_type (23);
4027 gcc_obstack_init (&table->nary_obstack);
4028 table->phis_pool = create_alloc_pool ("VN phis",
4029 sizeof (struct vn_phi_s),
4030 30);
4031 table->references_pool = create_alloc_pool ("VN references",
4032 sizeof (struct vn_reference_s),
4033 30);
4036 /* Free a value number table. */
4038 static void
4039 free_vn_table (vn_tables_t table)
4041 delete table->phis;
4042 table->phis = NULL;
4043 delete table->nary;
4044 table->nary = NULL;
4045 delete table->references;
4046 table->references = NULL;
4047 obstack_free (&table->nary_obstack, NULL);
4048 free_alloc_pool (table->phis_pool);
4049 free_alloc_pool (table->references_pool);
4052 static void
4053 init_scc_vn (void)
4055 size_t i;
4056 int j;
4057 int *rpo_numbers_temp;
4059 calculate_dominance_info (CDI_DOMINATORS);
4060 sccstack.create (0);
4061 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
4063 constant_value_ids = BITMAP_ALLOC (NULL);
4065 next_dfs_num = 1;
4066 next_value_id = 1;
4068 vn_ssa_aux_table.create (num_ssa_names + 1);
4069 /* VEC_alloc doesn't actually grow it to the right size, it just
4070 preallocates the space to do so. */
4071 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
4072 gcc_obstack_init (&vn_ssa_aux_obstack);
4074 shared_lookup_phiargs.create (0);
4075 shared_lookup_references.create (0);
4076 rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
4077 rpo_numbers_temp =
4078 XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
4079 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4081 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4082 the i'th block in RPO order is bb. We want to map bb's to RPO
4083 numbers, so we need to rearrange this array. */
4084 for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
4085 rpo_numbers[rpo_numbers_temp[j]] = j;
4087 XDELETE (rpo_numbers_temp);
4089 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
4091 /* Create the VN_INFO structures, and initialize value numbers to
4092 TOP. */
4093 for (i = 0; i < num_ssa_names; i++)
4095 tree name = ssa_name (i);
4096 if (name)
4098 VN_INFO_GET (name)->valnum = VN_TOP;
4099 VN_INFO (name)->expr = NULL_TREE;
4100 VN_INFO (name)->value_id = 0;
4104 renumber_gimple_stmt_uids ();
4106 /* Create the valid and optimistic value numbering tables. */
4107 valid_info = XCNEW (struct vn_tables_s);
4108 allocate_vn_table (valid_info);
4109 optimistic_info = XCNEW (struct vn_tables_s);
4110 allocate_vn_table (optimistic_info);
4113 void
4114 free_scc_vn (void)
4116 size_t i;
4118 delete constant_to_value_id;
4119 constant_to_value_id = NULL;
4120 BITMAP_FREE (constant_value_ids);
4121 shared_lookup_phiargs.release ();
4122 shared_lookup_references.release ();
4123 XDELETEVEC (rpo_numbers);
4125 for (i = 0; i < num_ssa_names; i++)
4127 tree name = ssa_name (i);
4128 if (name
4129 && VN_INFO (name)->needs_insertion)
4130 release_ssa_name (name);
4132 obstack_free (&vn_ssa_aux_obstack, NULL);
4133 vn_ssa_aux_table.release ();
4135 sccstack.release ();
4136 free_vn_table (valid_info);
4137 XDELETE (valid_info);
4138 free_vn_table (optimistic_info);
4139 XDELETE (optimistic_info);
4142 /* Set *ID according to RESULT. */
4144 static void
4145 set_value_id_for_result (tree result, unsigned int *id)
4147 if (result && TREE_CODE (result) == SSA_NAME)
4148 *id = VN_INFO (result)->value_id;
4149 else if (result && is_gimple_min_invariant (result))
4150 *id = get_or_alloc_constant_value_id (result);
4151 else
4152 *id = get_next_value_id ();
4155 /* Set the value ids in the valid hash tables. */
4157 static void
4158 set_hashtable_value_ids (void)
4160 vn_nary_op_iterator_type hin;
4161 vn_phi_iterator_type hip;
4162 vn_reference_iterator_type hir;
4163 vn_nary_op_t vno;
4164 vn_reference_t vr;
4165 vn_phi_t vp;
4167 /* Now set the value ids of the things we had put in the hash
4168 table. */
4170 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4171 set_value_id_for_result (vno->result, &vno->value_id);
4173 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4174 set_value_id_for_result (vp->result, &vp->value_id);
4176 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4177 hir)
4178 set_value_id_for_result (vr->result, &vr->value_id);
4181 class cond_dom_walker : public dom_walker
4183 public:
4184 cond_dom_walker () : dom_walker (CDI_DOMINATORS), fail (false) {}
4186 virtual void before_dom_children (basic_block);
4188 bool fail;
4191 void
4192 cond_dom_walker::before_dom_children (basic_block bb)
4194 edge e;
4195 edge_iterator ei;
4197 if (fail)
4198 return;
4200 /* If any of the predecessor edges that do not come from blocks dominated
4201 by us are still marked as possibly executable consider this block
4202 reachable. */
4203 bool reachable = bb == ENTRY_BLOCK_PTR_FOR_FN (cfun);
4204 FOR_EACH_EDGE (e, ei, bb->preds)
4205 if (!dominated_by_p (CDI_DOMINATORS, e->src, bb))
4206 reachable |= (e->flags & EDGE_EXECUTABLE);
4208 /* If the block is not reachable all outgoing edges are not
4209 executable. */
4210 if (!reachable)
4212 if (dump_file && (dump_flags & TDF_DETAILS))
4213 fprintf (dump_file, "Marking all outgoing edges of unreachable "
4214 "BB %d as not executable\n", bb->index);
4216 FOR_EACH_EDGE (e, ei, bb->succs)
4217 e->flags &= ~EDGE_EXECUTABLE;
4218 return;
4221 gimple stmt = last_stmt (bb);
4222 if (!stmt)
4223 return;
4225 enum gimple_code code = gimple_code (stmt);
4226 if (code != GIMPLE_COND
4227 && code != GIMPLE_SWITCH
4228 && code != GIMPLE_GOTO)
4229 return;
4231 if (dump_file && (dump_flags & TDF_DETAILS))
4233 fprintf (dump_file, "Value-numbering operands of stmt ending BB %d: ",
4234 bb->index);
4235 print_gimple_stmt (dump_file, stmt, 0, 0);
4238 /* Value-number the last stmts SSA uses. */
4239 ssa_op_iter i;
4240 tree op;
4241 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_USE)
4242 if (VN_INFO (op)->visited == false
4243 && !DFS (op))
4245 fail = true;
4246 return;
4249 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
4250 if value-numbering can prove they are not reachable. Handling
4251 computed gotos is also possible. */
4252 tree val;
4253 switch (code)
4255 case GIMPLE_COND:
4257 tree lhs = gimple_cond_lhs (stmt);
4258 tree rhs = gimple_cond_rhs (stmt);
4259 /* Work hard in computing the condition and take into account
4260 the valueization of the defining stmt. */
4261 if (TREE_CODE (lhs) == SSA_NAME)
4262 lhs = vn_get_expr_for (lhs);
4263 if (TREE_CODE (rhs) == SSA_NAME)
4264 rhs = vn_get_expr_for (rhs);
4265 val = fold_binary (gimple_cond_code (stmt),
4266 boolean_type_node, lhs, rhs);
4267 break;
4269 case GIMPLE_SWITCH:
4270 val = gimple_switch_index (stmt);
4271 break;
4272 case GIMPLE_GOTO:
4273 val = gimple_goto_dest (stmt);
4274 break;
4275 default:
4276 gcc_unreachable ();
4278 if (!val)
4279 return;
4281 edge taken = find_taken_edge (bb, vn_valueize (val));
4282 if (!taken)
4283 return;
4285 if (dump_file && (dump_flags & TDF_DETAILS))
4286 fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
4287 "not executable\n", bb->index, bb->index, taken->dest->index);
4289 FOR_EACH_EDGE (e, ei, bb->succs)
4290 if (e != taken)
4291 e->flags &= ~EDGE_EXECUTABLE;
4294 /* Do SCCVN. Returns true if it finished, false if we bailed out
4295 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
4296 how we use the alias oracle walking during the VN process. */
4298 bool
4299 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
4301 basic_block bb;
4302 size_t i;
4303 tree param;
4305 default_vn_walk_kind = default_vn_walk_kind_;
4307 init_scc_vn ();
4308 current_info = valid_info;
4310 for (param = DECL_ARGUMENTS (current_function_decl);
4311 param;
4312 param = DECL_CHAIN (param))
4314 tree def = ssa_default_def (cfun, param);
4315 if (def)
4317 VN_INFO (def)->visited = true;
4318 VN_INFO (def)->valnum = def;
4322 /* Mark all edges as possibly executable. */
4323 FOR_ALL_BB_FN (bb, cfun)
4325 edge_iterator ei;
4326 edge e;
4327 FOR_EACH_EDGE (e, ei, bb->succs)
4328 e->flags |= EDGE_EXECUTABLE;
4331 /* Walk all blocks in dominator order, value-numbering the last stmts
4332 SSA uses and decide whether outgoing edges are not executable. */
4333 cond_dom_walker walker;
4334 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
4335 if (walker.fail)
4337 free_scc_vn ();
4338 return false;
4341 /* Value-number remaining SSA names. */
4342 for (i = 1; i < num_ssa_names; ++i)
4344 tree name = ssa_name (i);
4345 if (name
4346 && VN_INFO (name)->visited == false
4347 && !has_zero_uses (name))
4348 if (!DFS (name))
4350 free_scc_vn ();
4351 return false;
4355 /* Initialize the value ids. */
4357 for (i = 1; i < num_ssa_names; ++i)
4359 tree name = ssa_name (i);
4360 vn_ssa_aux_t info;
4361 if (!name)
4362 continue;
4363 info = VN_INFO (name);
4364 if (info->valnum == name
4365 || info->valnum == VN_TOP)
4366 info->value_id = get_next_value_id ();
4367 else if (is_gimple_min_invariant (info->valnum))
4368 info->value_id = get_or_alloc_constant_value_id (info->valnum);
4371 /* Propagate. */
4372 for (i = 1; i < num_ssa_names; ++i)
4374 tree name = ssa_name (i);
4375 vn_ssa_aux_t info;
4376 if (!name)
4377 continue;
4378 info = VN_INFO (name);
4379 if (TREE_CODE (info->valnum) == SSA_NAME
4380 && info->valnum != name
4381 && info->value_id != VN_INFO (info->valnum)->value_id)
4382 info->value_id = VN_INFO (info->valnum)->value_id;
4385 set_hashtable_value_ids ();
4387 if (dump_file && (dump_flags & TDF_DETAILS))
4389 fprintf (dump_file, "Value numbers:\n");
4390 for (i = 0; i < num_ssa_names; i++)
4392 tree name = ssa_name (i);
4393 if (name
4394 && VN_INFO (name)->visited
4395 && SSA_VAL (name) != name)
4397 print_generic_expr (dump_file, name, 0);
4398 fprintf (dump_file, " = ");
4399 print_generic_expr (dump_file, SSA_VAL (name), 0);
4400 fprintf (dump_file, "\n");
4405 return true;
4408 /* Return the maximum value id we have ever seen. */
4410 unsigned int
4411 get_max_value_id (void)
4413 return next_value_id;
4416 /* Return the next unique value id. */
4418 unsigned int
4419 get_next_value_id (void)
4421 return next_value_id++;
4425 /* Compare two expressions E1 and E2 and return true if they are equal. */
4427 bool
4428 expressions_equal_p (tree e1, tree e2)
4430 /* The obvious case. */
4431 if (e1 == e2)
4432 return true;
4434 /* If only one of them is null, they cannot be equal. */
4435 if (!e1 || !e2)
4436 return false;
4438 /* Now perform the actual comparison. */
4439 if (TREE_CODE (e1) == TREE_CODE (e2)
4440 && operand_equal_p (e1, e2, OEP_PURE_SAME))
4441 return true;
4443 return false;
4447 /* Return true if the nary operation NARY may trap. This is a copy
4448 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4450 bool
4451 vn_nary_may_trap (vn_nary_op_t nary)
4453 tree type;
4454 tree rhs2 = NULL_TREE;
4455 bool honor_nans = false;
4456 bool honor_snans = false;
4457 bool fp_operation = false;
4458 bool honor_trapv = false;
4459 bool handled, ret;
4460 unsigned i;
4462 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4463 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4464 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4466 type = nary->type;
4467 fp_operation = FLOAT_TYPE_P (type);
4468 if (fp_operation)
4470 honor_nans = flag_trapping_math && !flag_finite_math_only;
4471 honor_snans = flag_signaling_nans != 0;
4473 else if (INTEGRAL_TYPE_P (type)
4474 && TYPE_OVERFLOW_TRAPS (type))
4475 honor_trapv = true;
4477 if (nary->length >= 2)
4478 rhs2 = nary->op[1];
4479 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4480 honor_trapv,
4481 honor_nans, honor_snans, rhs2,
4482 &handled);
4483 if (handled
4484 && ret)
4485 return true;
4487 for (i = 0; i < nary->length; ++i)
4488 if (tree_could_trap_p (nary->op[i]))
4489 return true;
4491 return false;