2015-08-24 François Dumont <fdumont@gcc.gnu.org>
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blobaea6acc04510d26cbfee5dabf1e7b6c9f7c81bb0
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2015 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "rtl.h"
28 #include "ssa.h"
29 #include "alias.h"
30 #include "fold-const.h"
31 #include "stor-layout.h"
32 #include "cfganal.h"
33 #include "gimple-pretty-print.h"
34 #include "tree-inline.h"
35 #include "internal-fn.h"
36 #include "gimple-fold.h"
37 #include "tree-eh.h"
38 #include "gimplify.h"
39 #include "flags.h"
40 #include "insn-config.h"
41 #include "expmed.h"
42 #include "dojump.h"
43 #include "explow.h"
44 #include "calls.h"
45 #include "emit-rtl.h"
46 #include "varasm.h"
47 #include "stmt.h"
48 #include "expr.h"
49 #include "tree-dfa.h"
50 #include "tree-ssa.h"
51 #include "dumpfile.h"
52 #include "alloc-pool.h"
53 #include "cfgloop.h"
54 #include "params.h"
55 #include "tree-ssa-propagate.h"
56 #include "tree-ssa-sccvn.h"
57 #include "tree-cfg.h"
58 #include "domwalk.h"
59 #include "cgraph.h"
60 #include "gimple-iterator.h"
62 /* This algorithm is based on the SCC algorithm presented by Keith
63 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
64 (http://citeseer.ist.psu.edu/41805.html). In
65 straight line code, it is equivalent to a regular hash based value
66 numbering that is performed in reverse postorder.
68 For code with cycles, there are two alternatives, both of which
69 require keeping the hashtables separate from the actual list of
70 value numbers for SSA names.
72 1. Iterate value numbering in an RPO walk of the blocks, removing
73 all the entries from the hashtable after each iteration (but
74 keeping the SSA name->value number mapping between iterations).
75 Iterate until it does not change.
77 2. Perform value numbering as part of an SCC walk on the SSA graph,
78 iterating only the cycles in the SSA graph until they do not change
79 (using a separate, optimistic hashtable for value numbering the SCC
80 operands).
82 The second is not just faster in practice (because most SSA graph
83 cycles do not involve all the variables in the graph), it also has
84 some nice properties.
86 One of these nice properties is that when we pop an SCC off the
87 stack, we are guaranteed to have processed all the operands coming from
88 *outside of that SCC*, so we do not need to do anything special to
89 ensure they have value numbers.
91 Another nice property is that the SCC walk is done as part of a DFS
92 of the SSA graph, which makes it easy to perform combining and
93 simplifying operations at the same time.
95 The code below is deliberately written in a way that makes it easy
96 to separate the SCC walk from the other work it does.
98 In order to propagate constants through the code, we track which
99 expressions contain constants, and use those while folding. In
100 theory, we could also track expressions whose value numbers are
101 replaced, in case we end up folding based on expression
102 identities.
104 In order to value number memory, we assign value numbers to vuses.
105 This enables us to note that, for example, stores to the same
106 address of the same value from the same starting memory states are
107 equivalent.
108 TODO:
110 1. We can iterate only the changing portions of the SCC's, but
111 I have not seen an SCC big enough for this to be a win.
112 2. If you differentiate between phi nodes for loops and phi nodes
113 for if-then-else, you can properly consider phi nodes in different
114 blocks for equivalence.
115 3. We could value number vuses in more cases, particularly, whole
116 structure copies.
120 static tree *last_vuse_ptr;
121 static vn_lookup_kind vn_walk_kind;
122 static vn_lookup_kind default_vn_walk_kind;
124 /* vn_nary_op hashtable helpers. */
126 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
128 typedef vn_nary_op_s *compare_type;
129 static inline hashval_t hash (const vn_nary_op_s *);
130 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
133 /* Return the computed hashcode for nary operation P1. */
135 inline hashval_t
136 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
138 return vno1->hashcode;
141 /* Compare nary operations P1 and P2 and return true if they are
142 equivalent. */
144 inline bool
145 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
147 return vn_nary_op_eq (vno1, vno2);
150 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
151 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
154 /* vn_phi hashtable helpers. */
156 static int
157 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
159 struct vn_phi_hasher : pointer_hash <vn_phi_s>
161 static inline hashval_t hash (const vn_phi_s *);
162 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
163 static inline void remove (vn_phi_s *);
166 /* Return the computed hashcode for phi operation P1. */
168 inline hashval_t
169 vn_phi_hasher::hash (const vn_phi_s *vp1)
171 return vp1->hashcode;
174 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
176 inline bool
177 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
179 return vn_phi_eq (vp1, vp2);
182 /* Free a phi operation structure VP. */
184 inline void
185 vn_phi_hasher::remove (vn_phi_s *phi)
187 phi->phiargs.release ();
190 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
191 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
194 /* Compare two reference operands P1 and P2 for equality. Return true if
195 they are equal, and false otherwise. */
197 static int
198 vn_reference_op_eq (const void *p1, const void *p2)
200 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
201 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
203 return (vro1->opcode == vro2->opcode
204 /* We do not care for differences in type qualification. */
205 && (vro1->type == vro2->type
206 || (vro1->type && vro2->type
207 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
208 TYPE_MAIN_VARIANT (vro2->type))))
209 && expressions_equal_p (vro1->op0, vro2->op0)
210 && expressions_equal_p (vro1->op1, vro2->op1)
211 && expressions_equal_p (vro1->op2, vro2->op2));
214 /* Free a reference operation structure VP. */
216 static inline void
217 free_reference (vn_reference_s *vr)
219 vr->operands.release ();
223 /* vn_reference hashtable helpers. */
225 struct vn_reference_hasher : pointer_hash <vn_reference_s>
227 static inline hashval_t hash (const vn_reference_s *);
228 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
229 static inline void remove (vn_reference_s *);
232 /* Return the hashcode for a given reference operation P1. */
234 inline hashval_t
235 vn_reference_hasher::hash (const vn_reference_s *vr1)
237 return vr1->hashcode;
240 inline bool
241 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
243 return vn_reference_eq (v, c);
246 inline void
247 vn_reference_hasher::remove (vn_reference_s *v)
249 free_reference (v);
252 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
253 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
256 /* The set of hashtables and alloc_pool's for their items. */
258 typedef struct vn_tables_s
260 vn_nary_op_table_type *nary;
261 vn_phi_table_type *phis;
262 vn_reference_table_type *references;
263 struct obstack nary_obstack;
264 object_allocator<vn_phi_s> *phis_pool;
265 object_allocator<vn_reference_s> *references_pool;
266 } *vn_tables_t;
269 /* vn_constant hashtable helpers. */
271 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
273 static inline hashval_t hash (const vn_constant_s *);
274 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
277 /* Hash table hash function for vn_constant_t. */
279 inline hashval_t
280 vn_constant_hasher::hash (const vn_constant_s *vc1)
282 return vc1->hashcode;
285 /* Hash table equality function for vn_constant_t. */
287 inline bool
288 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
290 if (vc1->hashcode != vc2->hashcode)
291 return false;
293 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
296 static hash_table<vn_constant_hasher> *constant_to_value_id;
297 static bitmap constant_value_ids;
300 /* Valid hashtables storing information we have proven to be
301 correct. */
303 static vn_tables_t valid_info;
305 /* Optimistic hashtables storing information we are making assumptions about
306 during iterations. */
308 static vn_tables_t optimistic_info;
310 /* Pointer to the set of hashtables that is currently being used.
311 Should always point to either the optimistic_info, or the
312 valid_info. */
314 static vn_tables_t current_info;
317 /* Reverse post order index for each basic block. */
319 static int *rpo_numbers;
321 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
323 /* Return the SSA value of the VUSE x, supporting released VDEFs
324 during elimination which will value-number the VDEF to the
325 associated VUSE (but not substitute in the whole lattice). */
327 static inline tree
328 vuse_ssa_val (tree x)
330 if (!x)
331 return NULL_TREE;
335 x = SSA_VAL (x);
337 while (SSA_NAME_IN_FREE_LIST (x));
339 return x;
342 /* This represents the top of the VN lattice, which is the universal
343 value. */
345 tree VN_TOP;
347 /* Unique counter for our value ids. */
349 static unsigned int next_value_id;
351 /* Next DFS number and the stack for strongly connected component
352 detection. */
354 static unsigned int next_dfs_num;
355 static vec<tree> sccstack;
359 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
360 are allocated on an obstack for locality reasons, and to free them
361 without looping over the vec. */
363 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
364 static struct obstack vn_ssa_aux_obstack;
366 /* Return the value numbering information for a given SSA name. */
368 vn_ssa_aux_t
369 VN_INFO (tree name)
371 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
372 gcc_checking_assert (res);
373 return res;
376 /* Set the value numbering info for a given SSA name to a given
377 value. */
379 static inline void
380 VN_INFO_SET (tree name, vn_ssa_aux_t value)
382 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
385 /* Initialize the value numbering info for a given SSA name.
386 This should be called just once for every SSA name. */
388 vn_ssa_aux_t
389 VN_INFO_GET (tree name)
391 vn_ssa_aux_t newinfo;
393 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
394 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
395 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
396 vn_ssa_aux_table.safe_grow (SSA_NAME_VERSION (name) + 1);
397 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
398 return newinfo;
402 /* Get the representative expression for the SSA_NAME NAME. Returns
403 the representative SSA_NAME if there is no expression associated with it. */
405 tree
406 vn_get_expr_for (tree name)
408 vn_ssa_aux_t vn = VN_INFO (name);
409 gimple def_stmt;
410 tree expr = NULL_TREE;
411 enum tree_code code;
413 if (vn->valnum == VN_TOP)
414 return name;
416 /* If the value-number is a constant it is the representative
417 expression. */
418 if (TREE_CODE (vn->valnum) != SSA_NAME)
419 return vn->valnum;
421 /* Get to the information of the value of this SSA_NAME. */
422 vn = VN_INFO (vn->valnum);
424 /* If the value-number is a constant it is the representative
425 expression. */
426 if (TREE_CODE (vn->valnum) != SSA_NAME)
427 return vn->valnum;
429 /* Else if we have an expression, return it. */
430 if (vn->expr != NULL_TREE)
431 return vn->expr;
433 /* Otherwise use the defining statement to build the expression. */
434 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
436 /* If the value number is not an assignment use it directly. */
437 if (!is_gimple_assign (def_stmt))
438 return vn->valnum;
440 /* Note that we can valueize here because we clear the cached
441 simplified expressions after each optimistic iteration. */
442 code = gimple_assign_rhs_code (def_stmt);
443 switch (TREE_CODE_CLASS (code))
445 case tcc_reference:
446 if ((code == REALPART_EXPR
447 || code == IMAGPART_EXPR
448 || code == VIEW_CONVERT_EXPR)
449 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (def_stmt),
450 0)) == SSA_NAME)
451 expr = fold_build1 (code,
452 gimple_expr_type (def_stmt),
453 vn_valueize (TREE_OPERAND
454 (gimple_assign_rhs1 (def_stmt), 0)));
455 break;
457 case tcc_unary:
458 expr = fold_build1 (code,
459 gimple_expr_type (def_stmt),
460 vn_valueize (gimple_assign_rhs1 (def_stmt)));
461 break;
463 case tcc_binary:
464 expr = fold_build2 (code,
465 gimple_expr_type (def_stmt),
466 vn_valueize (gimple_assign_rhs1 (def_stmt)),
467 vn_valueize (gimple_assign_rhs2 (def_stmt)));
468 break;
470 case tcc_exceptional:
471 if (code == CONSTRUCTOR
472 && TREE_CODE
473 (TREE_TYPE (gimple_assign_rhs1 (def_stmt))) == VECTOR_TYPE)
474 expr = gimple_assign_rhs1 (def_stmt);
475 break;
477 default:;
479 if (expr == NULL_TREE)
480 return vn->valnum;
482 /* Cache the expression. */
483 vn->expr = expr;
485 return expr;
488 /* Return the vn_kind the expression computed by the stmt should be
489 associated with. */
491 enum vn_kind
492 vn_get_stmt_kind (gimple stmt)
494 switch (gimple_code (stmt))
496 case GIMPLE_CALL:
497 return VN_REFERENCE;
498 case GIMPLE_PHI:
499 return VN_PHI;
500 case GIMPLE_ASSIGN:
502 enum tree_code code = gimple_assign_rhs_code (stmt);
503 tree rhs1 = gimple_assign_rhs1 (stmt);
504 switch (get_gimple_rhs_class (code))
506 case GIMPLE_UNARY_RHS:
507 case GIMPLE_BINARY_RHS:
508 case GIMPLE_TERNARY_RHS:
509 return VN_NARY;
510 case GIMPLE_SINGLE_RHS:
511 switch (TREE_CODE_CLASS (code))
513 case tcc_reference:
514 /* VOP-less references can go through unary case. */
515 if ((code == REALPART_EXPR
516 || code == IMAGPART_EXPR
517 || code == VIEW_CONVERT_EXPR
518 || code == BIT_FIELD_REF)
519 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
520 return VN_NARY;
522 /* Fallthrough. */
523 case tcc_declaration:
524 return VN_REFERENCE;
526 case tcc_constant:
527 return VN_CONSTANT;
529 default:
530 if (code == ADDR_EXPR)
531 return (is_gimple_min_invariant (rhs1)
532 ? VN_CONSTANT : VN_REFERENCE);
533 else if (code == CONSTRUCTOR)
534 return VN_NARY;
535 return VN_NONE;
537 default:
538 return VN_NONE;
541 default:
542 return VN_NONE;
546 /* Lookup a value id for CONSTANT and return it. If it does not
547 exist returns 0. */
549 unsigned int
550 get_constant_value_id (tree constant)
552 vn_constant_s **slot;
553 struct vn_constant_s vc;
555 vc.hashcode = vn_hash_constant_with_type (constant);
556 vc.constant = constant;
557 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
558 if (slot)
559 return (*slot)->value_id;
560 return 0;
563 /* Lookup a value id for CONSTANT, and if it does not exist, create a
564 new one and return it. If it does exist, return it. */
566 unsigned int
567 get_or_alloc_constant_value_id (tree constant)
569 vn_constant_s **slot;
570 struct vn_constant_s vc;
571 vn_constant_t vcp;
573 vc.hashcode = vn_hash_constant_with_type (constant);
574 vc.constant = constant;
575 slot = constant_to_value_id->find_slot (&vc, INSERT);
576 if (*slot)
577 return (*slot)->value_id;
579 vcp = XNEW (struct vn_constant_s);
580 vcp->hashcode = vc.hashcode;
581 vcp->constant = constant;
582 vcp->value_id = get_next_value_id ();
583 *slot = vcp;
584 bitmap_set_bit (constant_value_ids, vcp->value_id);
585 return vcp->value_id;
588 /* Return true if V is a value id for a constant. */
590 bool
591 value_id_constant_p (unsigned int v)
593 return bitmap_bit_p (constant_value_ids, v);
596 /* Compute the hash for a reference operand VRO1. */
598 static void
599 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
601 hstate.add_int (vro1->opcode);
602 if (vro1->op0)
603 inchash::add_expr (vro1->op0, hstate);
604 if (vro1->op1)
605 inchash::add_expr (vro1->op1, hstate);
606 if (vro1->op2)
607 inchash::add_expr (vro1->op2, hstate);
610 /* Compute a hash for the reference operation VR1 and return it. */
612 static hashval_t
613 vn_reference_compute_hash (const vn_reference_t vr1)
615 inchash::hash hstate;
616 hashval_t result;
617 int i;
618 vn_reference_op_t vro;
619 HOST_WIDE_INT off = -1;
620 bool deref = false;
622 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
624 if (vro->opcode == MEM_REF)
625 deref = true;
626 else if (vro->opcode != ADDR_EXPR)
627 deref = false;
628 if (vro->off != -1)
630 if (off == -1)
631 off = 0;
632 off += vro->off;
634 else
636 if (off != -1
637 && off != 0)
638 hstate.add_int (off);
639 off = -1;
640 if (deref
641 && vro->opcode == ADDR_EXPR)
643 if (vro->op0)
645 tree op = TREE_OPERAND (vro->op0, 0);
646 hstate.add_int (TREE_CODE (op));
647 inchash::add_expr (op, hstate);
650 else
651 vn_reference_op_compute_hash (vro, hstate);
654 result = hstate.end ();
655 /* ??? We would ICE later if we hash instead of adding that in. */
656 if (vr1->vuse)
657 result += SSA_NAME_VERSION (vr1->vuse);
659 return result;
662 /* Return true if reference operations VR1 and VR2 are equivalent. This
663 means they have the same set of operands and vuses. */
665 bool
666 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
668 unsigned i, j;
670 /* Early out if this is not a hash collision. */
671 if (vr1->hashcode != vr2->hashcode)
672 return false;
674 /* The VOP needs to be the same. */
675 if (vr1->vuse != vr2->vuse)
676 return false;
678 /* If the operands are the same we are done. */
679 if (vr1->operands == vr2->operands)
680 return true;
682 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
683 return false;
685 if (INTEGRAL_TYPE_P (vr1->type)
686 && INTEGRAL_TYPE_P (vr2->type))
688 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
689 return false;
691 else if (INTEGRAL_TYPE_P (vr1->type)
692 && (TYPE_PRECISION (vr1->type)
693 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
694 return false;
695 else if (INTEGRAL_TYPE_P (vr2->type)
696 && (TYPE_PRECISION (vr2->type)
697 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
698 return false;
700 i = 0;
701 j = 0;
704 HOST_WIDE_INT off1 = 0, off2 = 0;
705 vn_reference_op_t vro1, vro2;
706 vn_reference_op_s tem1, tem2;
707 bool deref1 = false, deref2 = false;
708 for (; vr1->operands.iterate (i, &vro1); i++)
710 if (vro1->opcode == MEM_REF)
711 deref1 = true;
712 if (vro1->off == -1)
713 break;
714 off1 += vro1->off;
716 for (; vr2->operands.iterate (j, &vro2); j++)
718 if (vro2->opcode == MEM_REF)
719 deref2 = true;
720 if (vro2->off == -1)
721 break;
722 off2 += vro2->off;
724 if (off1 != off2)
725 return false;
726 if (deref1 && vro1->opcode == ADDR_EXPR)
728 memset (&tem1, 0, sizeof (tem1));
729 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
730 tem1.type = TREE_TYPE (tem1.op0);
731 tem1.opcode = TREE_CODE (tem1.op0);
732 vro1 = &tem1;
733 deref1 = false;
735 if (deref2 && vro2->opcode == ADDR_EXPR)
737 memset (&tem2, 0, sizeof (tem2));
738 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
739 tem2.type = TREE_TYPE (tem2.op0);
740 tem2.opcode = TREE_CODE (tem2.op0);
741 vro2 = &tem2;
742 deref2 = false;
744 if (deref1 != deref2)
745 return false;
746 if (!vn_reference_op_eq (vro1, vro2))
747 return false;
748 ++j;
749 ++i;
751 while (vr1->operands.length () != i
752 || vr2->operands.length () != j);
754 return true;
757 /* Copy the operations present in load/store REF into RESULT, a vector of
758 vn_reference_op_s's. */
760 static void
761 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
763 if (TREE_CODE (ref) == TARGET_MEM_REF)
765 vn_reference_op_s temp;
767 result->reserve (3);
769 memset (&temp, 0, sizeof (temp));
770 temp.type = TREE_TYPE (ref);
771 temp.opcode = TREE_CODE (ref);
772 temp.op0 = TMR_INDEX (ref);
773 temp.op1 = TMR_STEP (ref);
774 temp.op2 = TMR_OFFSET (ref);
775 temp.off = -1;
776 result->quick_push (temp);
778 memset (&temp, 0, sizeof (temp));
779 temp.type = NULL_TREE;
780 temp.opcode = ERROR_MARK;
781 temp.op0 = TMR_INDEX2 (ref);
782 temp.off = -1;
783 result->quick_push (temp);
785 memset (&temp, 0, sizeof (temp));
786 temp.type = NULL_TREE;
787 temp.opcode = TREE_CODE (TMR_BASE (ref));
788 temp.op0 = TMR_BASE (ref);
789 temp.off = -1;
790 result->quick_push (temp);
791 return;
794 /* For non-calls, store the information that makes up the address. */
795 tree orig = ref;
796 while (ref)
798 vn_reference_op_s temp;
800 memset (&temp, 0, sizeof (temp));
801 temp.type = TREE_TYPE (ref);
802 temp.opcode = TREE_CODE (ref);
803 temp.off = -1;
805 switch (temp.opcode)
807 case MODIFY_EXPR:
808 temp.op0 = TREE_OPERAND (ref, 1);
809 break;
810 case WITH_SIZE_EXPR:
811 temp.op0 = TREE_OPERAND (ref, 1);
812 temp.off = 0;
813 break;
814 case MEM_REF:
815 /* The base address gets its own vn_reference_op_s structure. */
816 temp.op0 = TREE_OPERAND (ref, 1);
817 if (tree_fits_shwi_p (TREE_OPERAND (ref, 1)))
818 temp.off = tree_to_shwi (TREE_OPERAND (ref, 1));
819 break;
820 case BIT_FIELD_REF:
821 /* Record bits and position. */
822 temp.op0 = TREE_OPERAND (ref, 1);
823 temp.op1 = TREE_OPERAND (ref, 2);
824 break;
825 case COMPONENT_REF:
826 /* The field decl is enough to unambiguously specify the field,
827 a matching type is not necessary and a mismatching type
828 is always a spurious difference. */
829 temp.type = NULL_TREE;
830 temp.op0 = TREE_OPERAND (ref, 1);
831 temp.op1 = TREE_OPERAND (ref, 2);
833 tree this_offset = component_ref_field_offset (ref);
834 if (this_offset
835 && TREE_CODE (this_offset) == INTEGER_CST)
837 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
838 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
840 offset_int off
841 = (wi::to_offset (this_offset)
842 + wi::lrshift (wi::to_offset (bit_offset),
843 LOG2_BITS_PER_UNIT));
844 if (wi::fits_shwi_p (off)
845 /* Probibit value-numbering zero offset components
846 of addresses the same before the pass folding
847 __builtin_object_size had a chance to run
848 (checking cfun->after_inlining does the
849 trick here). */
850 && (TREE_CODE (orig) != ADDR_EXPR
851 || off != 0
852 || cfun->after_inlining))
853 temp.off = off.to_shwi ();
857 break;
858 case ARRAY_RANGE_REF:
859 case ARRAY_REF:
860 /* Record index as operand. */
861 temp.op0 = TREE_OPERAND (ref, 1);
862 /* Always record lower bounds and element size. */
863 temp.op1 = array_ref_low_bound (ref);
864 temp.op2 = array_ref_element_size (ref);
865 if (TREE_CODE (temp.op0) == INTEGER_CST
866 && TREE_CODE (temp.op1) == INTEGER_CST
867 && TREE_CODE (temp.op2) == INTEGER_CST)
869 offset_int off = ((wi::to_offset (temp.op0)
870 - wi::to_offset (temp.op1))
871 * wi::to_offset (temp.op2));
872 if (wi::fits_shwi_p (off))
873 temp.off = off.to_shwi();
875 break;
876 case VAR_DECL:
877 if (DECL_HARD_REGISTER (ref))
879 temp.op0 = ref;
880 break;
882 /* Fallthru. */
883 case PARM_DECL:
884 case CONST_DECL:
885 case RESULT_DECL:
886 /* Canonicalize decls to MEM[&decl] which is what we end up with
887 when valueizing MEM[ptr] with ptr = &decl. */
888 temp.opcode = MEM_REF;
889 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
890 temp.off = 0;
891 result->safe_push (temp);
892 temp.opcode = ADDR_EXPR;
893 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
894 temp.type = TREE_TYPE (temp.op0);
895 temp.off = -1;
896 break;
897 case STRING_CST:
898 case INTEGER_CST:
899 case COMPLEX_CST:
900 case VECTOR_CST:
901 case REAL_CST:
902 case FIXED_CST:
903 case CONSTRUCTOR:
904 case SSA_NAME:
905 temp.op0 = ref;
906 break;
907 case ADDR_EXPR:
908 if (is_gimple_min_invariant (ref))
910 temp.op0 = ref;
911 break;
913 break;
914 /* These are only interesting for their operands, their
915 existence, and their type. They will never be the last
916 ref in the chain of references (IE they require an
917 operand), so we don't have to put anything
918 for op* as it will be handled by the iteration */
919 case REALPART_EXPR:
920 case VIEW_CONVERT_EXPR:
921 temp.off = 0;
922 break;
923 case IMAGPART_EXPR:
924 /* This is only interesting for its constant offset. */
925 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
926 break;
927 default:
928 gcc_unreachable ();
930 result->safe_push (temp);
932 if (REFERENCE_CLASS_P (ref)
933 || TREE_CODE (ref) == MODIFY_EXPR
934 || TREE_CODE (ref) == WITH_SIZE_EXPR
935 || (TREE_CODE (ref) == ADDR_EXPR
936 && !is_gimple_min_invariant (ref)))
937 ref = TREE_OPERAND (ref, 0);
938 else
939 ref = NULL_TREE;
943 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
944 operands in *OPS, the reference alias set SET and the reference type TYPE.
945 Return true if something useful was produced. */
947 bool
948 ao_ref_init_from_vn_reference (ao_ref *ref,
949 alias_set_type set, tree type,
950 vec<vn_reference_op_s> ops)
952 vn_reference_op_t op;
953 unsigned i;
954 tree base = NULL_TREE;
955 tree *op0_p = &base;
956 HOST_WIDE_INT offset = 0;
957 HOST_WIDE_INT max_size;
958 HOST_WIDE_INT size = -1;
959 tree size_tree = NULL_TREE;
960 alias_set_type base_alias_set = -1;
962 /* First get the final access size from just the outermost expression. */
963 op = &ops[0];
964 if (op->opcode == COMPONENT_REF)
965 size_tree = DECL_SIZE (op->op0);
966 else if (op->opcode == BIT_FIELD_REF)
967 size_tree = op->op0;
968 else
970 machine_mode mode = TYPE_MODE (type);
971 if (mode == BLKmode)
972 size_tree = TYPE_SIZE (type);
973 else
974 size = GET_MODE_BITSIZE (mode);
976 if (size_tree != NULL_TREE)
978 if (!tree_fits_uhwi_p (size_tree))
979 size = -1;
980 else
981 size = tree_to_uhwi (size_tree);
984 /* Initially, maxsize is the same as the accessed element size.
985 In the following it will only grow (or become -1). */
986 max_size = size;
988 /* Compute cumulative bit-offset for nested component-refs and array-refs,
989 and find the ultimate containing object. */
990 FOR_EACH_VEC_ELT (ops, i, op)
992 switch (op->opcode)
994 /* These may be in the reference ops, but we cannot do anything
995 sensible with them here. */
996 case ADDR_EXPR:
997 /* Apart from ADDR_EXPR arguments to MEM_REF. */
998 if (base != NULL_TREE
999 && TREE_CODE (base) == MEM_REF
1000 && op->op0
1001 && DECL_P (TREE_OPERAND (op->op0, 0)))
1003 vn_reference_op_t pop = &ops[i-1];
1004 base = TREE_OPERAND (op->op0, 0);
1005 if (pop->off == -1)
1007 max_size = -1;
1008 offset = 0;
1010 else
1011 offset += pop->off * BITS_PER_UNIT;
1012 op0_p = NULL;
1013 break;
1015 /* Fallthru. */
1016 case CALL_EXPR:
1017 return false;
1019 /* Record the base objects. */
1020 case MEM_REF:
1021 base_alias_set = get_deref_alias_set (op->op0);
1022 *op0_p = build2 (MEM_REF, op->type,
1023 NULL_TREE, op->op0);
1024 op0_p = &TREE_OPERAND (*op0_p, 0);
1025 break;
1027 case VAR_DECL:
1028 case PARM_DECL:
1029 case RESULT_DECL:
1030 case SSA_NAME:
1031 *op0_p = op->op0;
1032 op0_p = NULL;
1033 break;
1035 /* And now the usual component-reference style ops. */
1036 case BIT_FIELD_REF:
1037 offset += tree_to_shwi (op->op1);
1038 break;
1040 case COMPONENT_REF:
1042 tree field = op->op0;
1043 /* We do not have a complete COMPONENT_REF tree here so we
1044 cannot use component_ref_field_offset. Do the interesting
1045 parts manually. */
1047 if (op->op1
1048 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (field)))
1049 max_size = -1;
1050 else
1052 offset += (tree_to_uhwi (DECL_FIELD_OFFSET (field))
1053 * BITS_PER_UNIT);
1054 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1056 break;
1059 case ARRAY_RANGE_REF:
1060 case ARRAY_REF:
1061 /* We recorded the lower bound and the element size. */
1062 if (!tree_fits_shwi_p (op->op0)
1063 || !tree_fits_shwi_p (op->op1)
1064 || !tree_fits_shwi_p (op->op2))
1065 max_size = -1;
1066 else
1068 HOST_WIDE_INT hindex = tree_to_shwi (op->op0);
1069 hindex -= tree_to_shwi (op->op1);
1070 hindex *= tree_to_shwi (op->op2);
1071 hindex *= BITS_PER_UNIT;
1072 offset += hindex;
1074 break;
1076 case REALPART_EXPR:
1077 break;
1079 case IMAGPART_EXPR:
1080 offset += size;
1081 break;
1083 case VIEW_CONVERT_EXPR:
1084 break;
1086 case STRING_CST:
1087 case INTEGER_CST:
1088 case COMPLEX_CST:
1089 case VECTOR_CST:
1090 case REAL_CST:
1091 case CONSTRUCTOR:
1092 case CONST_DECL:
1093 return false;
1095 default:
1096 return false;
1100 if (base == NULL_TREE)
1101 return false;
1103 ref->ref = NULL_TREE;
1104 ref->base = base;
1105 ref->offset = offset;
1106 ref->size = size;
1107 ref->max_size = max_size;
1108 ref->ref_alias_set = set;
1109 if (base_alias_set != -1)
1110 ref->base_alias_set = base_alias_set;
1111 else
1112 ref->base_alias_set = get_alias_set (base);
1113 /* We discount volatiles from value-numbering elsewhere. */
1114 ref->volatile_p = false;
1116 return true;
1119 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1120 vn_reference_op_s's. */
1122 static void
1123 copy_reference_ops_from_call (gcall *call,
1124 vec<vn_reference_op_s> *result)
1126 vn_reference_op_s temp;
1127 unsigned i;
1128 tree lhs = gimple_call_lhs (call);
1129 int lr;
1131 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1132 different. By adding the lhs here in the vector, we ensure that the
1133 hashcode is different, guaranteeing a different value number. */
1134 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1136 memset (&temp, 0, sizeof (temp));
1137 temp.opcode = MODIFY_EXPR;
1138 temp.type = TREE_TYPE (lhs);
1139 temp.op0 = lhs;
1140 temp.off = -1;
1141 result->safe_push (temp);
1144 /* Copy the type, opcode, function, static chain and EH region, if any. */
1145 memset (&temp, 0, sizeof (temp));
1146 temp.type = gimple_call_return_type (call);
1147 temp.opcode = CALL_EXPR;
1148 temp.op0 = gimple_call_fn (call);
1149 temp.op1 = gimple_call_chain (call);
1150 if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1151 temp.op2 = size_int (lr);
1152 temp.off = -1;
1153 if (gimple_call_with_bounds_p (call))
1154 temp.with_bounds = 1;
1155 result->safe_push (temp);
1157 /* Copy the call arguments. As they can be references as well,
1158 just chain them together. */
1159 for (i = 0; i < gimple_call_num_args (call); ++i)
1161 tree callarg = gimple_call_arg (call, i);
1162 copy_reference_ops_from_ref (callarg, result);
1166 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1167 *I_P to point to the last element of the replacement. */
1168 void
1169 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1170 unsigned int *i_p)
1172 unsigned int i = *i_p;
1173 vn_reference_op_t op = &(*ops)[i];
1174 vn_reference_op_t mem_op = &(*ops)[i - 1];
1175 tree addr_base;
1176 HOST_WIDE_INT addr_offset = 0;
1178 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1179 from .foo.bar to the preceding MEM_REF offset and replace the
1180 address with &OBJ. */
1181 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1182 &addr_offset);
1183 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1184 if (addr_base != TREE_OPERAND (op->op0, 0))
1186 offset_int off = offset_int::from (mem_op->op0, SIGNED);
1187 off += addr_offset;
1188 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1189 op->op0 = build_fold_addr_expr (addr_base);
1190 if (tree_fits_shwi_p (mem_op->op0))
1191 mem_op->off = tree_to_shwi (mem_op->op0);
1192 else
1193 mem_op->off = -1;
1197 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1198 *I_P to point to the last element of the replacement. */
1199 static void
1200 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1201 unsigned int *i_p)
1203 unsigned int i = *i_p;
1204 vn_reference_op_t op = &(*ops)[i];
1205 vn_reference_op_t mem_op = &(*ops)[i - 1];
1206 gimple def_stmt;
1207 enum tree_code code;
1208 offset_int off;
1210 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1211 if (!is_gimple_assign (def_stmt))
1212 return;
1214 code = gimple_assign_rhs_code (def_stmt);
1215 if (code != ADDR_EXPR
1216 && code != POINTER_PLUS_EXPR)
1217 return;
1219 off = offset_int::from (mem_op->op0, SIGNED);
1221 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1222 from .foo.bar to the preceding MEM_REF offset and replace the
1223 address with &OBJ. */
1224 if (code == ADDR_EXPR)
1226 tree addr, addr_base;
1227 HOST_WIDE_INT addr_offset;
1229 addr = gimple_assign_rhs1 (def_stmt);
1230 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1231 &addr_offset);
1232 /* If that didn't work because the address isn't invariant propagate
1233 the reference tree from the address operation in case the current
1234 dereference isn't offsetted. */
1235 if (!addr_base
1236 && *i_p == ops->length () - 1
1237 && off == 0
1238 /* This makes us disable this transform for PRE where the
1239 reference ops might be also used for code insertion which
1240 is invalid. */
1241 && default_vn_walk_kind == VN_WALKREWRITE)
1243 auto_vec<vn_reference_op_s, 32> tem;
1244 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1245 ops->pop ();
1246 ops->pop ();
1247 ops->safe_splice (tem);
1248 --*i_p;
1249 return;
1251 if (!addr_base
1252 || TREE_CODE (addr_base) != MEM_REF)
1253 return;
1255 off += addr_offset;
1256 off += mem_ref_offset (addr_base);
1257 op->op0 = TREE_OPERAND (addr_base, 0);
1259 else
1261 tree ptr, ptroff;
1262 ptr = gimple_assign_rhs1 (def_stmt);
1263 ptroff = gimple_assign_rhs2 (def_stmt);
1264 if (TREE_CODE (ptr) != SSA_NAME
1265 || TREE_CODE (ptroff) != INTEGER_CST)
1266 return;
1268 off += wi::to_offset (ptroff);
1269 op->op0 = ptr;
1272 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1273 if (tree_fits_shwi_p (mem_op->op0))
1274 mem_op->off = tree_to_shwi (mem_op->op0);
1275 else
1276 mem_op->off = -1;
1277 if (TREE_CODE (op->op0) == SSA_NAME)
1278 op->op0 = SSA_VAL (op->op0);
1279 if (TREE_CODE (op->op0) != SSA_NAME)
1280 op->opcode = TREE_CODE (op->op0);
1282 /* And recurse. */
1283 if (TREE_CODE (op->op0) == SSA_NAME)
1284 vn_reference_maybe_forwprop_address (ops, i_p);
1285 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1286 vn_reference_fold_indirect (ops, i_p);
1289 /* Optimize the reference REF to a constant if possible or return
1290 NULL_TREE if not. */
1292 tree
1293 fully_constant_vn_reference_p (vn_reference_t ref)
1295 vec<vn_reference_op_s> operands = ref->operands;
1296 vn_reference_op_t op;
1298 /* Try to simplify the translated expression if it is
1299 a call to a builtin function with at most two arguments. */
1300 op = &operands[0];
1301 if (op->opcode == CALL_EXPR
1302 && TREE_CODE (op->op0) == ADDR_EXPR
1303 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1304 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1305 && operands.length () >= 2
1306 && operands.length () <= 3)
1308 vn_reference_op_t arg0, arg1 = NULL;
1309 bool anyconst = false;
1310 arg0 = &operands[1];
1311 if (operands.length () > 2)
1312 arg1 = &operands[2];
1313 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1314 || (arg0->opcode == ADDR_EXPR
1315 && is_gimple_min_invariant (arg0->op0)))
1316 anyconst = true;
1317 if (arg1
1318 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1319 || (arg1->opcode == ADDR_EXPR
1320 && is_gimple_min_invariant (arg1->op0))))
1321 anyconst = true;
1322 if (anyconst)
1324 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1325 arg1 ? 2 : 1,
1326 arg0->op0,
1327 arg1 ? arg1->op0 : NULL);
1328 if (folded
1329 && TREE_CODE (folded) == NOP_EXPR)
1330 folded = TREE_OPERAND (folded, 0);
1331 if (folded
1332 && is_gimple_min_invariant (folded))
1333 return folded;
1337 /* Simplify reads from constants or constant initializers. */
1338 else if (BITS_PER_UNIT == 8
1339 && is_gimple_reg_type (ref->type)
1340 && (!INTEGRAL_TYPE_P (ref->type)
1341 || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
1343 HOST_WIDE_INT off = 0;
1344 HOST_WIDE_INT size;
1345 if (INTEGRAL_TYPE_P (ref->type))
1346 size = TYPE_PRECISION (ref->type);
1347 else
1348 size = tree_to_shwi (TYPE_SIZE (ref->type));
1349 if (size % BITS_PER_UNIT != 0
1350 || size > MAX_BITSIZE_MODE_ANY_MODE)
1351 return NULL_TREE;
1352 size /= BITS_PER_UNIT;
1353 unsigned i;
1354 for (i = 0; i < operands.length (); ++i)
1356 if (operands[i].off == -1)
1357 return NULL_TREE;
1358 off += operands[i].off;
1359 if (operands[i].opcode == MEM_REF)
1361 ++i;
1362 break;
1365 vn_reference_op_t base = &operands[--i];
1366 tree ctor = error_mark_node;
1367 tree decl = NULL_TREE;
1368 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1369 ctor = base->op0;
1370 else if (base->opcode == MEM_REF
1371 && base[1].opcode == ADDR_EXPR
1372 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1373 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL))
1375 decl = TREE_OPERAND (base[1].op0, 0);
1376 ctor = ctor_for_folding (decl);
1378 if (ctor == NULL_TREE)
1379 return build_zero_cst (ref->type);
1380 else if (ctor != error_mark_node)
1382 if (decl)
1384 tree res = fold_ctor_reference (ref->type, ctor,
1385 off * BITS_PER_UNIT,
1386 size * BITS_PER_UNIT, decl);
1387 if (res)
1389 STRIP_USELESS_TYPE_CONVERSION (res);
1390 if (is_gimple_min_invariant (res))
1391 return res;
1394 else
1396 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1397 if (native_encode_expr (ctor, buf, size, off) > 0)
1398 return native_interpret_expr (ref->type, buf, size);
1403 return NULL_TREE;
1406 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1407 structures into their value numbers. This is done in-place, and
1408 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1409 whether any operands were valueized. */
1411 static vec<vn_reference_op_s>
1412 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1414 vn_reference_op_t vro;
1415 unsigned int i;
1417 *valueized_anything = false;
1419 FOR_EACH_VEC_ELT (orig, i, vro)
1421 if (vro->opcode == SSA_NAME
1422 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1424 tree tem = SSA_VAL (vro->op0);
1425 if (tem != vro->op0)
1427 *valueized_anything = true;
1428 vro->op0 = tem;
1430 /* If it transforms from an SSA_NAME to a constant, update
1431 the opcode. */
1432 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1433 vro->opcode = TREE_CODE (vro->op0);
1435 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1437 tree tem = SSA_VAL (vro->op1);
1438 if (tem != vro->op1)
1440 *valueized_anything = true;
1441 vro->op1 = tem;
1444 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1446 tree tem = SSA_VAL (vro->op2);
1447 if (tem != vro->op2)
1449 *valueized_anything = true;
1450 vro->op2 = tem;
1453 /* If it transforms from an SSA_NAME to an address, fold with
1454 a preceding indirect reference. */
1455 if (i > 0
1456 && vro->op0
1457 && TREE_CODE (vro->op0) == ADDR_EXPR
1458 && orig[i - 1].opcode == MEM_REF)
1459 vn_reference_fold_indirect (&orig, &i);
1460 else if (i > 0
1461 && vro->opcode == SSA_NAME
1462 && orig[i - 1].opcode == MEM_REF)
1463 vn_reference_maybe_forwprop_address (&orig, &i);
1464 /* If it transforms a non-constant ARRAY_REF into a constant
1465 one, adjust the constant offset. */
1466 else if (vro->opcode == ARRAY_REF
1467 && vro->off == -1
1468 && TREE_CODE (vro->op0) == INTEGER_CST
1469 && TREE_CODE (vro->op1) == INTEGER_CST
1470 && TREE_CODE (vro->op2) == INTEGER_CST)
1472 offset_int off = ((wi::to_offset (vro->op0)
1473 - wi::to_offset (vro->op1))
1474 * wi::to_offset (vro->op2));
1475 if (wi::fits_shwi_p (off))
1476 vro->off = off.to_shwi ();
1480 return orig;
1483 static vec<vn_reference_op_s>
1484 valueize_refs (vec<vn_reference_op_s> orig)
1486 bool tem;
1487 return valueize_refs_1 (orig, &tem);
1490 static vec<vn_reference_op_s> shared_lookup_references;
1492 /* Create a vector of vn_reference_op_s structures from REF, a
1493 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1494 this function. *VALUEIZED_ANYTHING will specify whether any
1495 operands were valueized. */
1497 static vec<vn_reference_op_s>
1498 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1500 if (!ref)
1501 return vNULL;
1502 shared_lookup_references.truncate (0);
1503 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1504 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1505 valueized_anything);
1506 return shared_lookup_references;
1509 /* Create a vector of vn_reference_op_s structures from CALL, a
1510 call statement. The vector is shared among all callers of
1511 this function. */
1513 static vec<vn_reference_op_s>
1514 valueize_shared_reference_ops_from_call (gcall *call)
1516 if (!call)
1517 return vNULL;
1518 shared_lookup_references.truncate (0);
1519 copy_reference_ops_from_call (call, &shared_lookup_references);
1520 shared_lookup_references = valueize_refs (shared_lookup_references);
1521 return shared_lookup_references;
1524 /* Lookup a SCCVN reference operation VR in the current hash table.
1525 Returns the resulting value number if it exists in the hash table,
1526 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1527 vn_reference_t stored in the hashtable if something is found. */
1529 static tree
1530 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1532 vn_reference_s **slot;
1533 hashval_t hash;
1535 hash = vr->hashcode;
1536 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1537 if (!slot && current_info == optimistic_info)
1538 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1539 if (slot)
1541 if (vnresult)
1542 *vnresult = (vn_reference_t)*slot;
1543 return ((vn_reference_t)*slot)->result;
1546 return NULL_TREE;
1549 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1550 with the current VUSE and performs the expression lookup. */
1552 static void *
1553 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1554 unsigned int cnt, void *vr_)
1556 vn_reference_t vr = (vn_reference_t)vr_;
1557 vn_reference_s **slot;
1558 hashval_t hash;
1560 /* This bounds the stmt walks we perform on reference lookups
1561 to O(1) instead of O(N) where N is the number of dominating
1562 stores. */
1563 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1564 return (void *)-1;
1566 if (last_vuse_ptr)
1567 *last_vuse_ptr = vuse;
1569 /* Fixup vuse and hash. */
1570 if (vr->vuse)
1571 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1572 vr->vuse = vuse_ssa_val (vuse);
1573 if (vr->vuse)
1574 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1576 hash = vr->hashcode;
1577 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1578 if (!slot && current_info == optimistic_info)
1579 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1580 if (slot)
1581 return *slot;
1583 return NULL;
1586 /* Lookup an existing or insert a new vn_reference entry into the
1587 value table for the VUSE, SET, TYPE, OPERANDS reference which
1588 has the value VALUE which is either a constant or an SSA name. */
1590 static vn_reference_t
1591 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1592 alias_set_type set,
1593 tree type,
1594 vec<vn_reference_op_s,
1595 va_heap> operands,
1596 tree value)
1598 vn_reference_s vr1;
1599 vn_reference_t result;
1600 unsigned value_id;
1601 vr1.vuse = vuse;
1602 vr1.operands = operands;
1603 vr1.type = type;
1604 vr1.set = set;
1605 vr1.hashcode = vn_reference_compute_hash (&vr1);
1606 if (vn_reference_lookup_1 (&vr1, &result))
1607 return result;
1608 if (TREE_CODE (value) == SSA_NAME)
1609 value_id = VN_INFO (value)->value_id;
1610 else
1611 value_id = get_or_alloc_constant_value_id (value);
1612 return vn_reference_insert_pieces (vuse, set, type,
1613 operands.copy (), value, value_id);
1616 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1617 from the statement defining VUSE and if not successful tries to
1618 translate *REFP and VR_ through an aggregate copy at the definition
1619 of VUSE. */
1621 static void *
1622 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1623 bool disambiguate_only)
1625 vn_reference_t vr = (vn_reference_t)vr_;
1626 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1627 tree base;
1628 HOST_WIDE_INT offset, maxsize;
1629 static vec<vn_reference_op_s>
1630 lhs_ops = vNULL;
1631 ao_ref lhs_ref;
1632 bool lhs_ref_ok = false;
1634 /* First try to disambiguate after value-replacing in the definitions LHS. */
1635 if (is_gimple_assign (def_stmt))
1637 tree lhs = gimple_assign_lhs (def_stmt);
1638 bool valueized_anything = false;
1639 /* Avoid re-allocation overhead. */
1640 lhs_ops.truncate (0);
1641 copy_reference_ops_from_ref (lhs, &lhs_ops);
1642 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1643 if (valueized_anything)
1645 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1646 get_alias_set (lhs),
1647 TREE_TYPE (lhs), lhs_ops);
1648 if (lhs_ref_ok
1649 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1650 return NULL;
1652 else
1654 ao_ref_init (&lhs_ref, lhs);
1655 lhs_ref_ok = true;
1658 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1659 && gimple_call_num_args (def_stmt) <= 4)
1661 /* For builtin calls valueize its arguments and call the
1662 alias oracle again. Valueization may improve points-to
1663 info of pointers and constify size and position arguments.
1664 Originally this was motivated by PR61034 which has
1665 conditional calls to free falsely clobbering ref because
1666 of imprecise points-to info of the argument. */
1667 tree oldargs[4];
1668 bool valueized_anything = false;
1669 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1671 oldargs[i] = gimple_call_arg (def_stmt, i);
1672 if (TREE_CODE (oldargs[i]) == SSA_NAME
1673 && VN_INFO (oldargs[i])->valnum != oldargs[i])
1675 gimple_call_set_arg (def_stmt, i, VN_INFO (oldargs[i])->valnum);
1676 valueized_anything = true;
1679 if (valueized_anything)
1681 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1682 ref);
1683 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1684 gimple_call_set_arg (def_stmt, i, oldargs[i]);
1685 if (!res)
1686 return NULL;
1690 if (disambiguate_only)
1691 return (void *)-1;
1693 base = ao_ref_base (ref);
1694 offset = ref->offset;
1695 maxsize = ref->max_size;
1697 /* If we cannot constrain the size of the reference we cannot
1698 test if anything kills it. */
1699 if (maxsize == -1)
1700 return (void *)-1;
1702 /* We can't deduce anything useful from clobbers. */
1703 if (gimple_clobber_p (def_stmt))
1704 return (void *)-1;
1706 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1707 from that definition.
1708 1) Memset. */
1709 if (is_gimple_reg_type (vr->type)
1710 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1711 && integer_zerop (gimple_call_arg (def_stmt, 1))
1712 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2))
1713 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1715 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1716 tree base2;
1717 HOST_WIDE_INT offset2, size2, maxsize2;
1718 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1719 size2 = tree_to_uhwi (gimple_call_arg (def_stmt, 2)) * 8;
1720 if ((unsigned HOST_WIDE_INT)size2 / 8
1721 == tree_to_uhwi (gimple_call_arg (def_stmt, 2))
1722 && maxsize2 != -1
1723 && operand_equal_p (base, base2, 0)
1724 && offset2 <= offset
1725 && offset2 + size2 >= offset + maxsize)
1727 tree val = build_zero_cst (vr->type);
1728 return vn_reference_lookup_or_insert_for_pieces
1729 (vuse, vr->set, vr->type, vr->operands, val);
1733 /* 2) Assignment from an empty CONSTRUCTOR. */
1734 else if (is_gimple_reg_type (vr->type)
1735 && gimple_assign_single_p (def_stmt)
1736 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1737 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1739 tree base2;
1740 HOST_WIDE_INT offset2, size2, maxsize2;
1741 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1742 &offset2, &size2, &maxsize2);
1743 if (maxsize2 != -1
1744 && operand_equal_p (base, base2, 0)
1745 && offset2 <= offset
1746 && offset2 + size2 >= offset + maxsize)
1748 tree val = build_zero_cst (vr->type);
1749 return vn_reference_lookup_or_insert_for_pieces
1750 (vuse, vr->set, vr->type, vr->operands, val);
1754 /* 3) Assignment from a constant. We can use folds native encode/interpret
1755 routines to extract the assigned bits. */
1756 else if (vn_walk_kind == VN_WALKREWRITE
1757 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1758 && ref->size == maxsize
1759 && maxsize % BITS_PER_UNIT == 0
1760 && offset % BITS_PER_UNIT == 0
1761 && is_gimple_reg_type (vr->type)
1762 && gimple_assign_single_p (def_stmt)
1763 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
1765 tree base2;
1766 HOST_WIDE_INT offset2, size2, maxsize2;
1767 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1768 &offset2, &size2, &maxsize2);
1769 if (maxsize2 != -1
1770 && maxsize2 == size2
1771 && size2 % BITS_PER_UNIT == 0
1772 && offset2 % BITS_PER_UNIT == 0
1773 && operand_equal_p (base, base2, 0)
1774 && offset2 <= offset
1775 && offset2 + size2 >= offset + maxsize)
1777 /* We support up to 512-bit values (for V8DFmode). */
1778 unsigned char buffer[64];
1779 int len;
1781 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1782 buffer, sizeof (buffer));
1783 if (len > 0)
1785 tree val = native_interpret_expr (vr->type,
1786 buffer
1787 + ((offset - offset2)
1788 / BITS_PER_UNIT),
1789 ref->size / BITS_PER_UNIT);
1790 if (val)
1791 return vn_reference_lookup_or_insert_for_pieces
1792 (vuse, vr->set, vr->type, vr->operands, val);
1797 /* 4) Assignment from an SSA name which definition we may be able
1798 to access pieces from. */
1799 else if (ref->size == maxsize
1800 && is_gimple_reg_type (vr->type)
1801 && gimple_assign_single_p (def_stmt)
1802 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
1804 tree rhs1 = gimple_assign_rhs1 (def_stmt);
1805 gimple def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
1806 if (is_gimple_assign (def_stmt2)
1807 && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
1808 || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
1809 && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1))))
1811 tree base2;
1812 HOST_WIDE_INT offset2, size2, maxsize2, off;
1813 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1814 &offset2, &size2, &maxsize2);
1815 off = offset - offset2;
1816 if (maxsize2 != -1
1817 && maxsize2 == size2
1818 && operand_equal_p (base, base2, 0)
1819 && offset2 <= offset
1820 && offset2 + size2 >= offset + maxsize)
1822 tree val = NULL_TREE;
1823 HOST_WIDE_INT elsz
1824 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1))));
1825 if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR)
1827 if (off == 0)
1828 val = gimple_assign_rhs1 (def_stmt2);
1829 else if (off == elsz)
1830 val = gimple_assign_rhs2 (def_stmt2);
1832 else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR
1833 && off % elsz == 0)
1835 tree ctor = gimple_assign_rhs1 (def_stmt2);
1836 unsigned i = off / elsz;
1837 if (i < CONSTRUCTOR_NELTS (ctor))
1839 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
1840 if (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
1842 if (TREE_CODE (TREE_TYPE (elt->value))
1843 != VECTOR_TYPE)
1844 val = elt->value;
1848 if (val)
1849 return vn_reference_lookup_or_insert_for_pieces
1850 (vuse, vr->set, vr->type, vr->operands, val);
1855 /* 5) For aggregate copies translate the reference through them if
1856 the copy kills ref. */
1857 else if (vn_walk_kind == VN_WALKREWRITE
1858 && gimple_assign_single_p (def_stmt)
1859 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1860 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1861 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1863 tree base2;
1864 HOST_WIDE_INT offset2, size2, maxsize2;
1865 int i, j;
1866 auto_vec<vn_reference_op_s> rhs;
1867 vn_reference_op_t vro;
1868 ao_ref r;
1870 if (!lhs_ref_ok)
1871 return (void *)-1;
1873 /* See if the assignment kills REF. */
1874 base2 = ao_ref_base (&lhs_ref);
1875 offset2 = lhs_ref.offset;
1876 size2 = lhs_ref.size;
1877 maxsize2 = lhs_ref.max_size;
1878 if (maxsize2 == -1
1879 || (base != base2
1880 && (TREE_CODE (base) != MEM_REF
1881 || TREE_CODE (base2) != MEM_REF
1882 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
1883 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
1884 TREE_OPERAND (base2, 1))))
1885 || offset2 > offset
1886 || offset2 + size2 < offset + maxsize)
1887 return (void *)-1;
1889 /* Find the common base of ref and the lhs. lhs_ops already
1890 contains valueized operands for the lhs. */
1891 i = vr->operands.length () - 1;
1892 j = lhs_ops.length () - 1;
1893 while (j >= 0 && i >= 0
1894 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
1896 i--;
1897 j--;
1900 /* ??? The innermost op should always be a MEM_REF and we already
1901 checked that the assignment to the lhs kills vr. Thus for
1902 aggregate copies using char[] types the vn_reference_op_eq
1903 may fail when comparing types for compatibility. But we really
1904 don't care here - further lookups with the rewritten operands
1905 will simply fail if we messed up types too badly. */
1906 HOST_WIDE_INT extra_off = 0;
1907 if (j == 0 && i >= 0
1908 && lhs_ops[0].opcode == MEM_REF
1909 && lhs_ops[0].off != -1)
1911 if (lhs_ops[0].off == vr->operands[i].off)
1912 i--, j--;
1913 else if (vr->operands[i].opcode == MEM_REF
1914 && vr->operands[i].off != -1)
1916 extra_off = vr->operands[i].off - lhs_ops[0].off;
1917 i--, j--;
1921 /* i now points to the first additional op.
1922 ??? LHS may not be completely contained in VR, one or more
1923 VIEW_CONVERT_EXPRs could be in its way. We could at least
1924 try handling outermost VIEW_CONVERT_EXPRs. */
1925 if (j != -1)
1926 return (void *)-1;
1928 /* Now re-write REF to be based on the rhs of the assignment. */
1929 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1931 /* Apply an extra offset to the inner MEM_REF of the RHS. */
1932 if (extra_off != 0)
1934 if (rhs.length () < 2
1935 || rhs[0].opcode != MEM_REF
1936 || rhs[0].off == -1)
1937 return (void *)-1;
1938 rhs[0].off += extra_off;
1939 rhs[0].op0 = int_const_binop (PLUS_EXPR, rhs[0].op0,
1940 build_int_cst (TREE_TYPE (rhs[0].op0),
1941 extra_off));
1944 /* We need to pre-pend vr->operands[0..i] to rhs. */
1945 vec<vn_reference_op_s> old = vr->operands;
1946 if (i + 1 + rhs.length () > vr->operands.length ())
1948 vr->operands.safe_grow (i + 1 + rhs.length ());
1949 if (old == shared_lookup_references)
1950 shared_lookup_references = vr->operands;
1952 else
1953 vr->operands.truncate (i + 1 + rhs.length ());
1954 FOR_EACH_VEC_ELT (rhs, j, vro)
1955 vr->operands[i + 1 + j] = *vro;
1956 vr->operands = valueize_refs (vr->operands);
1957 if (old == shared_lookup_references)
1958 shared_lookup_references = vr->operands;
1959 vr->hashcode = vn_reference_compute_hash (vr);
1961 /* Try folding the new reference to a constant. */
1962 tree val = fully_constant_vn_reference_p (vr);
1963 if (val)
1964 return vn_reference_lookup_or_insert_for_pieces
1965 (vuse, vr->set, vr->type, vr->operands, val);
1967 /* Adjust *ref from the new operands. */
1968 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1969 return (void *)-1;
1970 /* This can happen with bitfields. */
1971 if (ref->size != r.size)
1972 return (void *)-1;
1973 *ref = r;
1975 /* Do not update last seen VUSE after translating. */
1976 last_vuse_ptr = NULL;
1978 /* Keep looking for the adjusted *REF / VR pair. */
1979 return NULL;
1982 /* 6) For memcpy copies translate the reference through them if
1983 the copy kills ref. */
1984 else if (vn_walk_kind == VN_WALKREWRITE
1985 && is_gimple_reg_type (vr->type)
1986 /* ??? Handle BCOPY as well. */
1987 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
1988 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
1989 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
1990 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
1991 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
1992 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
1993 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
1994 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2)))
1996 tree lhs, rhs;
1997 ao_ref r;
1998 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
1999 vn_reference_op_s op;
2000 HOST_WIDE_INT at;
2003 /* Only handle non-variable, addressable refs. */
2004 if (ref->size != maxsize
2005 || offset % BITS_PER_UNIT != 0
2006 || ref->size % BITS_PER_UNIT != 0)
2007 return (void *)-1;
2009 /* Extract a pointer base and an offset for the destination. */
2010 lhs = gimple_call_arg (def_stmt, 0);
2011 lhs_offset = 0;
2012 if (TREE_CODE (lhs) == SSA_NAME)
2014 lhs = SSA_VAL (lhs);
2015 if (TREE_CODE (lhs) == SSA_NAME)
2017 gimple def_stmt = SSA_NAME_DEF_STMT (lhs);
2018 if (gimple_assign_single_p (def_stmt)
2019 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2020 lhs = gimple_assign_rhs1 (def_stmt);
2023 if (TREE_CODE (lhs) == ADDR_EXPR)
2025 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2026 &lhs_offset);
2027 if (!tem)
2028 return (void *)-1;
2029 if (TREE_CODE (tem) == MEM_REF
2030 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2032 lhs = TREE_OPERAND (tem, 0);
2033 if (TREE_CODE (lhs) == SSA_NAME)
2034 lhs = SSA_VAL (lhs);
2035 lhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2037 else if (DECL_P (tem))
2038 lhs = build_fold_addr_expr (tem);
2039 else
2040 return (void *)-1;
2042 if (TREE_CODE (lhs) != SSA_NAME
2043 && TREE_CODE (lhs) != ADDR_EXPR)
2044 return (void *)-1;
2046 /* Extract a pointer base and an offset for the source. */
2047 rhs = gimple_call_arg (def_stmt, 1);
2048 rhs_offset = 0;
2049 if (TREE_CODE (rhs) == SSA_NAME)
2050 rhs = SSA_VAL (rhs);
2051 if (TREE_CODE (rhs) == ADDR_EXPR)
2053 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2054 &rhs_offset);
2055 if (!tem)
2056 return (void *)-1;
2057 if (TREE_CODE (tem) == MEM_REF
2058 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2060 rhs = TREE_OPERAND (tem, 0);
2061 rhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2063 else if (DECL_P (tem))
2064 rhs = build_fold_addr_expr (tem);
2065 else
2066 return (void *)-1;
2068 if (TREE_CODE (rhs) != SSA_NAME
2069 && TREE_CODE (rhs) != ADDR_EXPR)
2070 return (void *)-1;
2072 copy_size = tree_to_uhwi (gimple_call_arg (def_stmt, 2));
2074 /* The bases of the destination and the references have to agree. */
2075 if ((TREE_CODE (base) != MEM_REF
2076 && !DECL_P (base))
2077 || (TREE_CODE (base) == MEM_REF
2078 && (TREE_OPERAND (base, 0) != lhs
2079 || !tree_fits_uhwi_p (TREE_OPERAND (base, 1))))
2080 || (DECL_P (base)
2081 && (TREE_CODE (lhs) != ADDR_EXPR
2082 || TREE_OPERAND (lhs, 0) != base)))
2083 return (void *)-1;
2085 at = offset / BITS_PER_UNIT;
2086 if (TREE_CODE (base) == MEM_REF)
2087 at += tree_to_uhwi (TREE_OPERAND (base, 1));
2088 /* If the access is completely outside of the memcpy destination
2089 area there is no aliasing. */
2090 if (lhs_offset >= at + maxsize / BITS_PER_UNIT
2091 || lhs_offset + copy_size <= at)
2092 return NULL;
2093 /* And the access has to be contained within the memcpy destination. */
2094 if (lhs_offset > at
2095 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
2096 return (void *)-1;
2098 /* Make room for 2 operands in the new reference. */
2099 if (vr->operands.length () < 2)
2101 vec<vn_reference_op_s> old = vr->operands;
2102 vr->operands.safe_grow_cleared (2);
2103 if (old == shared_lookup_references
2104 && vr->operands != old)
2105 shared_lookup_references = vr->operands;
2107 else
2108 vr->operands.truncate (2);
2110 /* The looked-through reference is a simple MEM_REF. */
2111 memset (&op, 0, sizeof (op));
2112 op.type = vr->type;
2113 op.opcode = MEM_REF;
2114 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
2115 op.off = at - lhs_offset + rhs_offset;
2116 vr->operands[0] = op;
2117 op.type = TREE_TYPE (rhs);
2118 op.opcode = TREE_CODE (rhs);
2119 op.op0 = rhs;
2120 op.off = -1;
2121 vr->operands[1] = op;
2122 vr->hashcode = vn_reference_compute_hash (vr);
2124 /* Adjust *ref from the new operands. */
2125 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2126 return (void *)-1;
2127 /* This can happen with bitfields. */
2128 if (ref->size != r.size)
2129 return (void *)-1;
2130 *ref = r;
2132 /* Do not update last seen VUSE after translating. */
2133 last_vuse_ptr = NULL;
2135 /* Keep looking for the adjusted *REF / VR pair. */
2136 return NULL;
2139 /* Bail out and stop walking. */
2140 return (void *)-1;
2143 /* Lookup a reference operation by it's parts, in the current hash table.
2144 Returns the resulting value number if it exists in the hash table,
2145 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2146 vn_reference_t stored in the hashtable if something is found. */
2148 tree
2149 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2150 vec<vn_reference_op_s> operands,
2151 vn_reference_t *vnresult, vn_lookup_kind kind)
2153 struct vn_reference_s vr1;
2154 vn_reference_t tmp;
2155 tree cst;
2157 if (!vnresult)
2158 vnresult = &tmp;
2159 *vnresult = NULL;
2161 vr1.vuse = vuse_ssa_val (vuse);
2162 shared_lookup_references.truncate (0);
2163 shared_lookup_references.safe_grow (operands.length ());
2164 memcpy (shared_lookup_references.address (),
2165 operands.address (),
2166 sizeof (vn_reference_op_s)
2167 * operands.length ());
2168 vr1.operands = operands = shared_lookup_references
2169 = valueize_refs (shared_lookup_references);
2170 vr1.type = type;
2171 vr1.set = set;
2172 vr1.hashcode = vn_reference_compute_hash (&vr1);
2173 if ((cst = fully_constant_vn_reference_p (&vr1)))
2174 return cst;
2176 vn_reference_lookup_1 (&vr1, vnresult);
2177 if (!*vnresult
2178 && kind != VN_NOWALK
2179 && vr1.vuse)
2181 ao_ref r;
2182 vn_walk_kind = kind;
2183 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2184 *vnresult =
2185 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2186 vn_reference_lookup_2,
2187 vn_reference_lookup_3,
2188 vuse_ssa_val, &vr1);
2189 gcc_checking_assert (vr1.operands == shared_lookup_references);
2192 if (*vnresult)
2193 return (*vnresult)->result;
2195 return NULL_TREE;
2198 /* Lookup OP in the current hash table, and return the resulting value
2199 number if it exists in the hash table. Return NULL_TREE if it does
2200 not exist in the hash table or if the result field of the structure
2201 was NULL.. VNRESULT will be filled in with the vn_reference_t
2202 stored in the hashtable if one exists. */
2204 tree
2205 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2206 vn_reference_t *vnresult)
2208 vec<vn_reference_op_s> operands;
2209 struct vn_reference_s vr1;
2210 tree cst;
2211 bool valuezied_anything;
2213 if (vnresult)
2214 *vnresult = NULL;
2216 vr1.vuse = vuse_ssa_val (vuse);
2217 vr1.operands = operands
2218 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2219 vr1.type = TREE_TYPE (op);
2220 vr1.set = get_alias_set (op);
2221 vr1.hashcode = vn_reference_compute_hash (&vr1);
2222 if ((cst = fully_constant_vn_reference_p (&vr1)))
2223 return cst;
2225 if (kind != VN_NOWALK
2226 && vr1.vuse)
2228 vn_reference_t wvnresult;
2229 ao_ref r;
2230 /* Make sure to use a valueized reference if we valueized anything.
2231 Otherwise preserve the full reference for advanced TBAA. */
2232 if (!valuezied_anything
2233 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2234 vr1.operands))
2235 ao_ref_init (&r, op);
2236 vn_walk_kind = kind;
2237 wvnresult =
2238 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2239 vn_reference_lookup_2,
2240 vn_reference_lookup_3,
2241 vuse_ssa_val, &vr1);
2242 gcc_checking_assert (vr1.operands == shared_lookup_references);
2243 if (wvnresult)
2245 if (vnresult)
2246 *vnresult = wvnresult;
2247 return wvnresult->result;
2250 return NULL_TREE;
2253 return vn_reference_lookup_1 (&vr1, vnresult);
2256 /* Lookup CALL in the current hash table and return the entry in
2257 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2259 void
2260 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2261 vn_reference_t vr)
2263 if (vnresult)
2264 *vnresult = NULL;
2266 tree vuse = gimple_vuse (call);
2268 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2269 vr->operands = valueize_shared_reference_ops_from_call (call);
2270 vr->type = gimple_expr_type (call);
2271 vr->set = 0;
2272 vr->hashcode = vn_reference_compute_hash (vr);
2273 vn_reference_lookup_1 (vr, vnresult);
2276 /* Insert OP into the current hash table with a value number of
2277 RESULT, and return the resulting reference structure we created. */
2279 static vn_reference_t
2280 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2282 vn_reference_s **slot;
2283 vn_reference_t vr1;
2284 bool tem;
2286 vr1 = current_info->references_pool->allocate ();
2287 if (TREE_CODE (result) == SSA_NAME)
2288 vr1->value_id = VN_INFO (result)->value_id;
2289 else
2290 vr1->value_id = get_or_alloc_constant_value_id (result);
2291 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2292 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2293 vr1->type = TREE_TYPE (op);
2294 vr1->set = get_alias_set (op);
2295 vr1->hashcode = vn_reference_compute_hash (vr1);
2296 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2297 vr1->result_vdef = vdef;
2299 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2300 INSERT);
2302 /* Because we lookup stores using vuses, and value number failures
2303 using the vdefs (see visit_reference_op_store for how and why),
2304 it's possible that on failure we may try to insert an already
2305 inserted store. This is not wrong, there is no ssa name for a
2306 store that we could use as a differentiator anyway. Thus, unlike
2307 the other lookup functions, you cannot gcc_assert (!*slot)
2308 here. */
2310 /* But free the old slot in case of a collision. */
2311 if (*slot)
2312 free_reference (*slot);
2314 *slot = vr1;
2315 return vr1;
2318 /* Insert a reference by it's pieces into the current hash table with
2319 a value number of RESULT. Return the resulting reference
2320 structure we created. */
2322 vn_reference_t
2323 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2324 vec<vn_reference_op_s> operands,
2325 tree result, unsigned int value_id)
2328 vn_reference_s **slot;
2329 vn_reference_t vr1;
2331 vr1 = current_info->references_pool->allocate ();
2332 vr1->value_id = value_id;
2333 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2334 vr1->operands = valueize_refs (operands);
2335 vr1->type = type;
2336 vr1->set = set;
2337 vr1->hashcode = vn_reference_compute_hash (vr1);
2338 if (result && TREE_CODE (result) == SSA_NAME)
2339 result = SSA_VAL (result);
2340 vr1->result = result;
2342 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2343 INSERT);
2345 /* At this point we should have all the things inserted that we have
2346 seen before, and we should never try inserting something that
2347 already exists. */
2348 gcc_assert (!*slot);
2349 if (*slot)
2350 free_reference (*slot);
2352 *slot = vr1;
2353 return vr1;
2356 /* Compute and return the hash value for nary operation VBO1. */
2358 static hashval_t
2359 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2361 inchash::hash hstate;
2362 unsigned i;
2364 for (i = 0; i < vno1->length; ++i)
2365 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2366 vno1->op[i] = SSA_VAL (vno1->op[i]);
2368 if (((vno1->length == 2
2369 && commutative_tree_code (vno1->opcode))
2370 || (vno1->length == 3
2371 && commutative_ternary_tree_code (vno1->opcode)))
2372 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2373 std::swap (vno1->op[0], vno1->op[1]);
2374 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
2375 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2377 std::swap (vno1->op[0], vno1->op[1]);
2378 vno1->opcode = swap_tree_comparison (vno1->opcode);
2381 hstate.add_int (vno1->opcode);
2382 for (i = 0; i < vno1->length; ++i)
2383 inchash::add_expr (vno1->op[i], hstate);
2385 return hstate.end ();
2388 /* Compare nary operations VNO1 and VNO2 and return true if they are
2389 equivalent. */
2391 bool
2392 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2394 unsigned i;
2396 if (vno1->hashcode != vno2->hashcode)
2397 return false;
2399 if (vno1->length != vno2->length)
2400 return false;
2402 if (vno1->opcode != vno2->opcode
2403 || !types_compatible_p (vno1->type, vno2->type))
2404 return false;
2406 for (i = 0; i < vno1->length; ++i)
2407 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2408 return false;
2410 return true;
2413 /* Initialize VNO from the pieces provided. */
2415 static void
2416 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2417 enum tree_code code, tree type, tree *ops)
2419 vno->opcode = code;
2420 vno->length = length;
2421 vno->type = type;
2422 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2425 /* Initialize VNO from OP. */
2427 static void
2428 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2430 unsigned i;
2432 vno->opcode = TREE_CODE (op);
2433 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2434 vno->type = TREE_TYPE (op);
2435 for (i = 0; i < vno->length; ++i)
2436 vno->op[i] = TREE_OPERAND (op, i);
2439 /* Return the number of operands for a vn_nary ops structure from STMT. */
2441 static unsigned int
2442 vn_nary_length_from_stmt (gimple stmt)
2444 switch (gimple_assign_rhs_code (stmt))
2446 case REALPART_EXPR:
2447 case IMAGPART_EXPR:
2448 case VIEW_CONVERT_EXPR:
2449 return 1;
2451 case BIT_FIELD_REF:
2452 return 3;
2454 case CONSTRUCTOR:
2455 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2457 default:
2458 return gimple_num_ops (stmt) - 1;
2462 /* Initialize VNO from STMT. */
2464 static void
2465 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
2467 unsigned i;
2469 vno->opcode = gimple_assign_rhs_code (stmt);
2470 vno->type = gimple_expr_type (stmt);
2471 switch (vno->opcode)
2473 case REALPART_EXPR:
2474 case IMAGPART_EXPR:
2475 case VIEW_CONVERT_EXPR:
2476 vno->length = 1;
2477 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2478 break;
2480 case BIT_FIELD_REF:
2481 vno->length = 3;
2482 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2483 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2484 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2485 break;
2487 case CONSTRUCTOR:
2488 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2489 for (i = 0; i < vno->length; ++i)
2490 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2491 break;
2493 default:
2494 gcc_checking_assert (!gimple_assign_single_p (stmt));
2495 vno->length = gimple_num_ops (stmt) - 1;
2496 for (i = 0; i < vno->length; ++i)
2497 vno->op[i] = gimple_op (stmt, i + 1);
2501 /* Compute the hashcode for VNO and look for it in the hash table;
2502 return the resulting value number if it exists in the hash table.
2503 Return NULL_TREE if it does not exist in the hash table or if the
2504 result field of the operation is NULL. VNRESULT will contain the
2505 vn_nary_op_t from the hashtable if it exists. */
2507 static tree
2508 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2510 vn_nary_op_s **slot;
2512 if (vnresult)
2513 *vnresult = NULL;
2515 vno->hashcode = vn_nary_op_compute_hash (vno);
2516 slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2517 NO_INSERT);
2518 if (!slot && current_info == optimistic_info)
2519 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2520 NO_INSERT);
2521 if (!slot)
2522 return NULL_TREE;
2523 if (vnresult)
2524 *vnresult = *slot;
2525 return (*slot)->result;
2528 /* Lookup a n-ary operation by its pieces and return the resulting value
2529 number if it exists in the hash table. Return NULL_TREE if it does
2530 not exist in the hash table or if the result field of the operation
2531 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2532 if it exists. */
2534 tree
2535 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2536 tree type, tree *ops, vn_nary_op_t *vnresult)
2538 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2539 sizeof_vn_nary_op (length));
2540 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2541 return vn_nary_op_lookup_1 (vno1, vnresult);
2544 /* Lookup OP in the current hash table, and return the resulting value
2545 number if it exists in the hash table. Return NULL_TREE if it does
2546 not exist in the hash table or if the result field of the operation
2547 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2548 if it exists. */
2550 tree
2551 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2553 vn_nary_op_t vno1
2554 = XALLOCAVAR (struct vn_nary_op_s,
2555 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2556 init_vn_nary_op_from_op (vno1, op);
2557 return vn_nary_op_lookup_1 (vno1, vnresult);
2560 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2561 value number if it exists in the hash table. Return NULL_TREE if
2562 it does not exist in the hash table. VNRESULT will contain the
2563 vn_nary_op_t from the hashtable if it exists. */
2565 tree
2566 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
2568 vn_nary_op_t vno1
2569 = XALLOCAVAR (struct vn_nary_op_s,
2570 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2571 init_vn_nary_op_from_stmt (vno1, stmt);
2572 return vn_nary_op_lookup_1 (vno1, vnresult);
2575 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2577 static vn_nary_op_t
2578 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2580 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2583 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2584 obstack. */
2586 static vn_nary_op_t
2587 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2589 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2590 &current_info->nary_obstack);
2592 vno1->value_id = value_id;
2593 vno1->length = length;
2594 vno1->result = result;
2596 return vno1;
2599 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2600 VNO->HASHCODE first. */
2602 static vn_nary_op_t
2603 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
2604 bool compute_hash)
2606 vn_nary_op_s **slot;
2608 if (compute_hash)
2609 vno->hashcode = vn_nary_op_compute_hash (vno);
2611 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
2612 gcc_assert (!*slot);
2614 *slot = vno;
2615 return vno;
2618 /* Insert a n-ary operation into the current hash table using it's
2619 pieces. Return the vn_nary_op_t structure we created and put in
2620 the hashtable. */
2622 vn_nary_op_t
2623 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2624 tree type, tree *ops,
2625 tree result, unsigned int value_id)
2627 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2628 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2629 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2632 /* Insert OP into the current hash table with a value number of
2633 RESULT. Return the vn_nary_op_t structure we created and put in
2634 the hashtable. */
2636 vn_nary_op_t
2637 vn_nary_op_insert (tree op, tree result)
2639 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2640 vn_nary_op_t vno1;
2642 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2643 init_vn_nary_op_from_op (vno1, op);
2644 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2647 /* Insert the rhs of STMT into the current hash table with a value number of
2648 RESULT. */
2650 vn_nary_op_t
2651 vn_nary_op_insert_stmt (gimple stmt, tree result)
2653 vn_nary_op_t vno1
2654 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2655 result, VN_INFO (result)->value_id);
2656 init_vn_nary_op_from_stmt (vno1, stmt);
2657 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2660 /* Compute a hashcode for PHI operation VP1 and return it. */
2662 static inline hashval_t
2663 vn_phi_compute_hash (vn_phi_t vp1)
2665 inchash::hash hstate (vp1->block->index);
2666 tree phi1op;
2667 tree type;
2668 edge e;
2669 edge_iterator ei;
2671 /* If all PHI arguments are constants we need to distinguish
2672 the PHI node via its type. */
2673 type = vp1->type;
2674 hstate.merge_hash (vn_hash_type (type));
2676 FOR_EACH_EDGE (e, ei, vp1->block->preds)
2678 /* Don't hash backedge values they need to be handled as VN_TOP
2679 for optimistic value-numbering. */
2680 if (e->flags & EDGE_DFS_BACK)
2681 continue;
2683 phi1op = vp1->phiargs[e->dest_idx];
2684 if (phi1op == VN_TOP)
2685 continue;
2686 inchash::add_expr (phi1op, hstate);
2689 return hstate.end ();
2692 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2694 static int
2695 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
2697 if (vp1->hashcode != vp2->hashcode)
2698 return false;
2700 if (vp1->block == vp2->block)
2702 int i;
2703 tree phi1op;
2705 /* If the PHI nodes do not have compatible types
2706 they are not the same. */
2707 if (!types_compatible_p (vp1->type, vp2->type))
2708 return false;
2710 /* Any phi in the same block will have it's arguments in the
2711 same edge order, because of how we store phi nodes. */
2712 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2714 tree phi2op = vp2->phiargs[i];
2715 if (phi1op == VN_TOP || phi2op == VN_TOP)
2716 continue;
2717 if (!expressions_equal_p (phi1op, phi2op))
2718 return false;
2720 return true;
2722 return false;
2725 static vec<tree> shared_lookup_phiargs;
2727 /* Lookup PHI in the current hash table, and return the resulting
2728 value number if it exists in the hash table. Return NULL_TREE if
2729 it does not exist in the hash table. */
2731 static tree
2732 vn_phi_lookup (gimple phi)
2734 vn_phi_s **slot;
2735 struct vn_phi_s vp1;
2736 edge e;
2737 edge_iterator ei;
2739 shared_lookup_phiargs.truncate (0);
2740 shared_lookup_phiargs.safe_grow (gimple_phi_num_args (phi));
2742 /* Canonicalize the SSA_NAME's to their value number. */
2743 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
2745 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
2746 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2747 shared_lookup_phiargs[e->dest_idx] = def;
2749 vp1.type = TREE_TYPE (gimple_phi_result (phi));
2750 vp1.phiargs = shared_lookup_phiargs;
2751 vp1.block = gimple_bb (phi);
2752 vp1.hashcode = vn_phi_compute_hash (&vp1);
2753 slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
2754 NO_INSERT);
2755 if (!slot && current_info == optimistic_info)
2756 slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
2757 NO_INSERT);
2758 if (!slot)
2759 return NULL_TREE;
2760 return (*slot)->result;
2763 /* Insert PHI into the current hash table with a value number of
2764 RESULT. */
2766 static vn_phi_t
2767 vn_phi_insert (gimple phi, tree result)
2769 vn_phi_s **slot;
2770 vn_phi_t vp1 = current_info->phis_pool->allocate ();
2771 vec<tree> args = vNULL;
2772 edge e;
2773 edge_iterator ei;
2775 args.safe_grow (gimple_phi_num_args (phi));
2777 /* Canonicalize the SSA_NAME's to their value number. */
2778 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
2780 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
2781 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2782 args[e->dest_idx] = def;
2784 vp1->value_id = VN_INFO (result)->value_id;
2785 vp1->type = TREE_TYPE (gimple_phi_result (phi));
2786 vp1->phiargs = args;
2787 vp1->block = gimple_bb (phi);
2788 vp1->result = result;
2789 vp1->hashcode = vn_phi_compute_hash (vp1);
2791 slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
2793 /* Because we iterate over phi operations more than once, it's
2794 possible the slot might already exist here, hence no assert.*/
2795 *slot = vp1;
2796 return vp1;
2800 /* Print set of components in strongly connected component SCC to OUT. */
2802 static void
2803 print_scc (FILE *out, vec<tree> scc)
2805 tree var;
2806 unsigned int i;
2808 fprintf (out, "SCC consists of:");
2809 FOR_EACH_VEC_ELT (scc, i, var)
2811 fprintf (out, " ");
2812 print_generic_expr (out, var, 0);
2814 fprintf (out, "\n");
2817 /* Set the value number of FROM to TO, return true if it has changed
2818 as a result. */
2820 static inline bool
2821 set_ssa_val_to (tree from, tree to)
2823 tree currval = SSA_VAL (from);
2824 HOST_WIDE_INT toff, coff;
2826 /* The only thing we allow as value numbers are ssa_names
2827 and invariants. So assert that here. We don't allow VN_TOP
2828 as visiting a stmt should produce a value-number other than
2829 that.
2830 ??? Still VN_TOP can happen for unreachable code, so force
2831 it to varying in that case. Not all code is prepared to
2832 get VN_TOP on valueization. */
2833 if (to == VN_TOP)
2835 if (dump_file && (dump_flags & TDF_DETAILS))
2836 fprintf (dump_file, "Forcing value number to varying on "
2837 "receiving VN_TOP\n");
2838 to = from;
2841 gcc_assert (to != NULL_TREE
2842 && ((TREE_CODE (to) == SSA_NAME
2843 && (to == from || SSA_VAL (to) == to))
2844 || is_gimple_min_invariant (to)));
2846 if (from != to)
2848 if (currval == from)
2850 if (dump_file && (dump_flags & TDF_DETAILS))
2852 fprintf (dump_file, "Not changing value number of ");
2853 print_generic_expr (dump_file, from, 0);
2854 fprintf (dump_file, " from VARYING to ");
2855 print_generic_expr (dump_file, to, 0);
2856 fprintf (dump_file, "\n");
2858 return false;
2860 else if (TREE_CODE (to) == SSA_NAME
2861 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2862 to = from;
2865 if (dump_file && (dump_flags & TDF_DETAILS))
2867 fprintf (dump_file, "Setting value number of ");
2868 print_generic_expr (dump_file, from, 0);
2869 fprintf (dump_file, " to ");
2870 print_generic_expr (dump_file, to, 0);
2873 if (currval != to
2874 && !operand_equal_p (currval, to, 0)
2875 /* ??? For addresses involving volatile objects or types operand_equal_p
2876 does not reliably detect ADDR_EXPRs as equal. We know we are only
2877 getting invariant gimple addresses here, so can use
2878 get_addr_base_and_unit_offset to do this comparison. */
2879 && !(TREE_CODE (currval) == ADDR_EXPR
2880 && TREE_CODE (to) == ADDR_EXPR
2881 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
2882 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
2883 && coff == toff))
2885 VN_INFO (from)->valnum = to;
2886 if (dump_file && (dump_flags & TDF_DETAILS))
2887 fprintf (dump_file, " (changed)\n");
2888 return true;
2890 if (dump_file && (dump_flags & TDF_DETAILS))
2891 fprintf (dump_file, "\n");
2892 return false;
2895 /* Mark as processed all the definitions in the defining stmt of USE, or
2896 the USE itself. */
2898 static void
2899 mark_use_processed (tree use)
2901 ssa_op_iter iter;
2902 def_operand_p defp;
2903 gimple stmt = SSA_NAME_DEF_STMT (use);
2905 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
2907 VN_INFO (use)->use_processed = true;
2908 return;
2911 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2913 tree def = DEF_FROM_PTR (defp);
2915 VN_INFO (def)->use_processed = true;
2919 /* Set all definitions in STMT to value number to themselves.
2920 Return true if a value number changed. */
2922 static bool
2923 defs_to_varying (gimple stmt)
2925 bool changed = false;
2926 ssa_op_iter iter;
2927 def_operand_p defp;
2929 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2931 tree def = DEF_FROM_PTR (defp);
2932 changed |= set_ssa_val_to (def, def);
2934 return changed;
2937 static bool expr_has_constants (tree expr);
2939 /* Visit a copy between LHS and RHS, return true if the value number
2940 changed. */
2942 static bool
2943 visit_copy (tree lhs, tree rhs)
2945 /* The copy may have a more interesting constant filled expression
2946 (we don't, since we know our RHS is just an SSA name). */
2947 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2948 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2950 /* And finally valueize. */
2951 rhs = SSA_VAL (rhs);
2953 return set_ssa_val_to (lhs, rhs);
2956 /* Visit a nary operator RHS, value number it, and return true if the
2957 value number of LHS has changed as a result. */
2959 static bool
2960 visit_nary_op (tree lhs, gimple stmt)
2962 bool changed = false;
2963 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2965 if (result)
2966 changed = set_ssa_val_to (lhs, result);
2967 else
2969 changed = set_ssa_val_to (lhs, lhs);
2970 vn_nary_op_insert_stmt (stmt, lhs);
2973 return changed;
2976 /* Visit a call STMT storing into LHS. Return true if the value number
2977 of the LHS has changed as a result. */
2979 static bool
2980 visit_reference_op_call (tree lhs, gcall *stmt)
2982 bool changed = false;
2983 struct vn_reference_s vr1;
2984 vn_reference_t vnresult = NULL;
2985 tree vdef = gimple_vdef (stmt);
2987 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
2988 if (lhs && TREE_CODE (lhs) != SSA_NAME)
2989 lhs = NULL_TREE;
2991 vn_reference_lookup_call (stmt, &vnresult, &vr1);
2992 if (vnresult)
2994 if (vnresult->result_vdef && vdef)
2995 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
2997 if (!vnresult->result && lhs)
2998 vnresult->result = lhs;
3000 if (vnresult->result && lhs)
3002 changed |= set_ssa_val_to (lhs, vnresult->result);
3004 if (VN_INFO (vnresult->result)->has_constants)
3005 VN_INFO (lhs)->has_constants = true;
3008 else
3010 vn_reference_t vr2;
3011 vn_reference_s **slot;
3012 if (vdef)
3013 changed |= set_ssa_val_to (vdef, vdef);
3014 if (lhs)
3015 changed |= set_ssa_val_to (lhs, lhs);
3016 vr2 = current_info->references_pool->allocate ();
3017 vr2->vuse = vr1.vuse;
3018 /* As we are not walking the virtual operand chain we know the
3019 shared_lookup_references are still original so we can re-use
3020 them here. */
3021 vr2->operands = vr1.operands.copy ();
3022 vr2->type = vr1.type;
3023 vr2->set = vr1.set;
3024 vr2->hashcode = vr1.hashcode;
3025 vr2->result = lhs;
3026 vr2->result_vdef = vdef;
3027 slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3028 INSERT);
3029 gcc_assert (!*slot);
3030 *slot = vr2;
3033 return changed;
3036 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3037 and return true if the value number of the LHS has changed as a result. */
3039 static bool
3040 visit_reference_op_load (tree lhs, tree op, gimple stmt)
3042 bool changed = false;
3043 tree last_vuse;
3044 tree result;
3046 last_vuse = gimple_vuse (stmt);
3047 last_vuse_ptr = &last_vuse;
3048 result = vn_reference_lookup (op, gimple_vuse (stmt),
3049 default_vn_walk_kind, NULL);
3050 last_vuse_ptr = NULL;
3052 /* We handle type-punning through unions by value-numbering based
3053 on offset and size of the access. Be prepared to handle a
3054 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3055 if (result
3056 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3058 /* We will be setting the value number of lhs to the value number
3059 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3060 So first simplify and lookup this expression to see if it
3061 is already available. */
3062 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
3063 if ((CONVERT_EXPR_P (val)
3064 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
3065 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
3067 tree tem = vn_get_expr_for (TREE_OPERAND (val, 0));
3068 if ((CONVERT_EXPR_P (tem)
3069 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
3070 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
3071 TREE_TYPE (val), tem)))
3072 val = tem;
3074 result = val;
3075 if (!is_gimple_min_invariant (val)
3076 && TREE_CODE (val) != SSA_NAME)
3077 result = vn_nary_op_lookup (val, NULL);
3078 /* If the expression is not yet available, value-number lhs to
3079 a new SSA_NAME we create. */
3080 if (!result)
3082 result = make_temp_ssa_name (TREE_TYPE (lhs), gimple_build_nop (),
3083 "vntemp");
3084 /* Initialize value-number information properly. */
3085 VN_INFO_GET (result)->valnum = result;
3086 VN_INFO (result)->value_id = get_next_value_id ();
3087 VN_INFO (result)->expr = val;
3088 VN_INFO (result)->has_constants = expr_has_constants (val);
3089 VN_INFO (result)->needs_insertion = true;
3090 /* As all "inserted" statements are singleton SCCs, insert
3091 to the valid table. This is strictly needed to
3092 avoid re-generating new value SSA_NAMEs for the same
3093 expression during SCC iteration over and over (the
3094 optimistic table gets cleared after each iteration).
3095 We do not need to insert into the optimistic table, as
3096 lookups there will fall back to the valid table. */
3097 if (current_info == optimistic_info)
3099 current_info = valid_info;
3100 vn_nary_op_insert (val, result);
3101 current_info = optimistic_info;
3103 else
3104 vn_nary_op_insert (val, result);
3105 if (dump_file && (dump_flags & TDF_DETAILS))
3107 fprintf (dump_file, "Inserting name ");
3108 print_generic_expr (dump_file, result, 0);
3109 fprintf (dump_file, " for expression ");
3110 print_generic_expr (dump_file, val, 0);
3111 fprintf (dump_file, "\n");
3116 if (result)
3118 changed = set_ssa_val_to (lhs, result);
3119 if (TREE_CODE (result) == SSA_NAME
3120 && VN_INFO (result)->has_constants)
3122 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
3123 VN_INFO (lhs)->has_constants = true;
3126 else
3128 changed = set_ssa_val_to (lhs, lhs);
3129 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
3132 return changed;
3136 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3137 and return true if the value number of the LHS has changed as a result. */
3139 static bool
3140 visit_reference_op_store (tree lhs, tree op, gimple stmt)
3142 bool changed = false;
3143 vn_reference_t vnresult = NULL;
3144 tree result, assign;
3145 bool resultsame = false;
3146 tree vuse = gimple_vuse (stmt);
3147 tree vdef = gimple_vdef (stmt);
3149 if (TREE_CODE (op) == SSA_NAME)
3150 op = SSA_VAL (op);
3152 /* First we want to lookup using the *vuses* from the store and see
3153 if there the last store to this location with the same address
3154 had the same value.
3156 The vuses represent the memory state before the store. If the
3157 memory state, address, and value of the store is the same as the
3158 last store to this location, then this store will produce the
3159 same memory state as that store.
3161 In this case the vdef versions for this store are value numbered to those
3162 vuse versions, since they represent the same memory state after
3163 this store.
3165 Otherwise, the vdefs for the store are used when inserting into
3166 the table, since the store generates a new memory state. */
3168 result = vn_reference_lookup (lhs, vuse, VN_NOWALK, NULL);
3170 if (result)
3172 if (TREE_CODE (result) == SSA_NAME)
3173 result = SSA_VAL (result);
3174 resultsame = expressions_equal_p (result, op);
3177 if ((!result || !resultsame)
3178 /* Only perform the following when being called from PRE
3179 which embeds tail merging. */
3180 && default_vn_walk_kind == VN_WALK)
3182 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3183 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult);
3184 if (vnresult)
3186 VN_INFO (vdef)->use_processed = true;
3187 return set_ssa_val_to (vdef, vnresult->result_vdef);
3191 if (!result || !resultsame)
3193 if (dump_file && (dump_flags & TDF_DETAILS))
3195 fprintf (dump_file, "No store match\n");
3196 fprintf (dump_file, "Value numbering store ");
3197 print_generic_expr (dump_file, lhs, 0);
3198 fprintf (dump_file, " to ");
3199 print_generic_expr (dump_file, op, 0);
3200 fprintf (dump_file, "\n");
3202 /* Have to set value numbers before insert, since insert is
3203 going to valueize the references in-place. */
3204 if (vdef)
3206 changed |= set_ssa_val_to (vdef, vdef);
3209 /* Do not insert structure copies into the tables. */
3210 if (is_gimple_min_invariant (op)
3211 || is_gimple_reg (op))
3212 vn_reference_insert (lhs, op, vdef, NULL);
3214 /* Only perform the following when being called from PRE
3215 which embeds tail merging. */
3216 if (default_vn_walk_kind == VN_WALK)
3218 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3219 vn_reference_insert (assign, lhs, vuse, vdef);
3222 else
3224 /* We had a match, so value number the vdef to have the value
3225 number of the vuse it came from. */
3227 if (dump_file && (dump_flags & TDF_DETAILS))
3228 fprintf (dump_file, "Store matched earlier value,"
3229 "value numbering store vdefs to matching vuses.\n");
3231 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3234 return changed;
3237 /* Visit and value number PHI, return true if the value number
3238 changed. */
3240 static bool
3241 visit_phi (gimple phi)
3243 bool changed = false;
3244 tree result;
3245 tree sameval = VN_TOP;
3246 bool allsame = true;
3248 /* TODO: We could check for this in init_sccvn, and replace this
3249 with a gcc_assert. */
3250 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3251 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3253 /* See if all non-TOP arguments have the same value. TOP is
3254 equivalent to everything, so we can ignore it. */
3255 edge_iterator ei;
3256 edge e;
3257 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3258 if (e->flags & EDGE_EXECUTABLE)
3260 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3262 if (TREE_CODE (def) == SSA_NAME)
3263 def = SSA_VAL (def);
3264 if (def == VN_TOP)
3265 continue;
3266 if (sameval == VN_TOP)
3267 sameval = def;
3268 else if (!expressions_equal_p (def, sameval))
3270 allsame = false;
3271 break;
3275 /* If none of the edges was executable or all incoming values are
3276 undefined keep the value-number at VN_TOP. */
3277 if (sameval == VN_TOP)
3278 return set_ssa_val_to (PHI_RESULT (phi), VN_TOP);
3280 /* First see if it is equivalent to a phi node in this block. We prefer
3281 this as it allows IV elimination - see PRs 66502 and 67167. */
3282 result = vn_phi_lookup (phi);
3283 if (result)
3284 changed = set_ssa_val_to (PHI_RESULT (phi), result);
3285 /* Otherwise all value numbered to the same value, the phi node has that
3286 value. */
3287 else if (allsame)
3288 changed = set_ssa_val_to (PHI_RESULT (phi), sameval);
3289 else
3291 vn_phi_insert (phi, PHI_RESULT (phi));
3292 VN_INFO (PHI_RESULT (phi))->has_constants = false;
3293 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
3294 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3297 return changed;
3300 /* Return true if EXPR contains constants. */
3302 static bool
3303 expr_has_constants (tree expr)
3305 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
3307 case tcc_unary:
3308 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
3310 case tcc_binary:
3311 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
3312 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
3313 /* Constants inside reference ops are rarely interesting, but
3314 it can take a lot of looking to find them. */
3315 case tcc_reference:
3316 case tcc_declaration:
3317 return false;
3318 default:
3319 return is_gimple_min_invariant (expr);
3321 return false;
3324 /* Return true if STMT contains constants. */
3326 static bool
3327 stmt_has_constants (gimple stmt)
3329 tree tem;
3331 if (gimple_code (stmt) != GIMPLE_ASSIGN)
3332 return false;
3334 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
3336 case GIMPLE_TERNARY_RHS:
3337 tem = gimple_assign_rhs3 (stmt);
3338 if (TREE_CODE (tem) == SSA_NAME)
3339 tem = SSA_VAL (tem);
3340 if (is_gimple_min_invariant (tem))
3341 return true;
3342 /* Fallthru. */
3344 case GIMPLE_BINARY_RHS:
3345 tem = gimple_assign_rhs2 (stmt);
3346 if (TREE_CODE (tem) == SSA_NAME)
3347 tem = SSA_VAL (tem);
3348 if (is_gimple_min_invariant (tem))
3349 return true;
3350 /* Fallthru. */
3352 case GIMPLE_SINGLE_RHS:
3353 /* Constants inside reference ops are rarely interesting, but
3354 it can take a lot of looking to find them. */
3355 case GIMPLE_UNARY_RHS:
3356 tem = gimple_assign_rhs1 (stmt);
3357 if (TREE_CODE (tem) == SSA_NAME)
3358 tem = SSA_VAL (tem);
3359 return is_gimple_min_invariant (tem);
3361 default:
3362 gcc_unreachable ();
3364 return false;
3367 /* Simplify the binary expression RHS, and return the result if
3368 simplified. */
3370 static tree
3371 simplify_binary_expression (gimple stmt)
3373 tree result = NULL_TREE;
3374 tree op0 = gimple_assign_rhs1 (stmt);
3375 tree op1 = gimple_assign_rhs2 (stmt);
3376 enum tree_code code = gimple_assign_rhs_code (stmt);
3378 /* This will not catch every single case we could combine, but will
3379 catch those with constants. The goal here is to simultaneously
3380 combine constants between expressions, but avoid infinite
3381 expansion of expressions during simplification. */
3382 op0 = vn_valueize (op0);
3383 if (TREE_CODE (op0) == SSA_NAME
3384 && (VN_INFO (op0)->has_constants
3385 || TREE_CODE_CLASS (code) == tcc_comparison
3386 || code == COMPLEX_EXPR))
3387 op0 = vn_get_expr_for (op0);
3389 op1 = vn_valueize (op1);
3390 if (TREE_CODE (op1) == SSA_NAME
3391 && (VN_INFO (op1)->has_constants
3392 || code == COMPLEX_EXPR))
3393 op1 = vn_get_expr_for (op1);
3395 /* Pointer plus constant can be represented as invariant address.
3396 Do so to allow further propatation, see also tree forwprop. */
3397 if (code == POINTER_PLUS_EXPR
3398 && tree_fits_uhwi_p (op1)
3399 && TREE_CODE (op0) == ADDR_EXPR
3400 && is_gimple_min_invariant (op0))
3401 return build_invariant_address (TREE_TYPE (op0),
3402 TREE_OPERAND (op0, 0),
3403 tree_to_uhwi (op1));
3405 /* Avoid folding if nothing changed. */
3406 if (op0 == gimple_assign_rhs1 (stmt)
3407 && op1 == gimple_assign_rhs2 (stmt))
3408 return NULL_TREE;
3410 fold_defer_overflow_warnings ();
3412 result = fold_binary (code, gimple_expr_type (stmt), op0, op1);
3413 if (result)
3414 STRIP_USELESS_TYPE_CONVERSION (result);
3416 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
3417 stmt, 0);
3419 /* Make sure result is not a complex expression consisting
3420 of operators of operators (IE (a + b) + (a + c))
3421 Otherwise, we will end up with unbounded expressions if
3422 fold does anything at all. */
3423 if (result && valid_gimple_rhs_p (result))
3424 return result;
3426 return NULL_TREE;
3429 /* Simplify the unary expression RHS, and return the result if
3430 simplified. */
3432 static tree
3433 simplify_unary_expression (gassign *stmt)
3435 tree result = NULL_TREE;
3436 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
3437 enum tree_code code = gimple_assign_rhs_code (stmt);
3439 /* We handle some tcc_reference codes here that are all
3440 GIMPLE_ASSIGN_SINGLE codes. */
3441 if (code == REALPART_EXPR
3442 || code == IMAGPART_EXPR
3443 || code == VIEW_CONVERT_EXPR
3444 || code == BIT_FIELD_REF)
3445 op0 = TREE_OPERAND (op0, 0);
3447 orig_op0 = op0;
3448 op0 = vn_valueize (op0);
3449 if (TREE_CODE (op0) == SSA_NAME)
3451 if (VN_INFO (op0)->has_constants)
3452 op0 = vn_get_expr_for (op0);
3453 else if (CONVERT_EXPR_CODE_P (code)
3454 || code == REALPART_EXPR
3455 || code == IMAGPART_EXPR
3456 || code == VIEW_CONVERT_EXPR
3457 || code == BIT_FIELD_REF)
3459 /* We want to do tree-combining on conversion-like expressions.
3460 Make sure we feed only SSA_NAMEs or constants to fold though. */
3461 tree tem = vn_get_expr_for (op0);
3462 if (UNARY_CLASS_P (tem)
3463 || BINARY_CLASS_P (tem)
3464 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
3465 || TREE_CODE (tem) == SSA_NAME
3466 || TREE_CODE (tem) == CONSTRUCTOR
3467 || is_gimple_min_invariant (tem))
3468 op0 = tem;
3472 /* Avoid folding if nothing changed, but remember the expression. */
3473 if (op0 == orig_op0)
3474 return NULL_TREE;
3476 if (code == BIT_FIELD_REF)
3478 tree rhs = gimple_assign_rhs1 (stmt);
3479 result = fold_ternary (BIT_FIELD_REF, TREE_TYPE (rhs),
3480 op0, TREE_OPERAND (rhs, 1), TREE_OPERAND (rhs, 2));
3482 else
3483 result = fold_unary_ignore_overflow (code, gimple_expr_type (stmt), op0);
3484 if (result)
3486 STRIP_USELESS_TYPE_CONVERSION (result);
3487 if (valid_gimple_rhs_p (result))
3488 return result;
3491 return NULL_TREE;
3494 /* Try to simplify RHS using equivalences and constant folding. */
3496 static tree
3497 try_to_simplify (gassign *stmt)
3499 enum tree_code code = gimple_assign_rhs_code (stmt);
3500 tree tem;
3502 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3503 in this case, there is no point in doing extra work. */
3504 if (code == SSA_NAME)
3505 return NULL_TREE;
3507 /* First try constant folding based on our current lattice. */
3508 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
3509 if (tem
3510 && (TREE_CODE (tem) == SSA_NAME
3511 || is_gimple_min_invariant (tem)))
3512 return tem;
3514 /* If that didn't work try combining multiple statements. */
3515 switch (TREE_CODE_CLASS (code))
3517 case tcc_reference:
3518 /* Fallthrough for some unary codes that can operate on registers. */
3519 if (!(code == REALPART_EXPR
3520 || code == IMAGPART_EXPR
3521 || code == VIEW_CONVERT_EXPR
3522 || code == BIT_FIELD_REF))
3523 break;
3524 /* We could do a little more with unary ops, if they expand
3525 into binary ops, but it's debatable whether it is worth it. */
3526 case tcc_unary:
3527 return simplify_unary_expression (stmt);
3529 case tcc_comparison:
3530 case tcc_binary:
3531 return simplify_binary_expression (stmt);
3533 default:
3534 break;
3537 return NULL_TREE;
3540 /* Visit and value number USE, return true if the value number
3541 changed. */
3543 static bool
3544 visit_use (tree use)
3546 bool changed = false;
3547 gimple stmt = SSA_NAME_DEF_STMT (use);
3549 mark_use_processed (use);
3551 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3552 if (dump_file && (dump_flags & TDF_DETAILS)
3553 && !SSA_NAME_IS_DEFAULT_DEF (use))
3555 fprintf (dump_file, "Value numbering ");
3556 print_generic_expr (dump_file, use, 0);
3557 fprintf (dump_file, " stmt = ");
3558 print_gimple_stmt (dump_file, stmt, 0, 0);
3561 /* Handle uninitialized uses. */
3562 if (SSA_NAME_IS_DEFAULT_DEF (use))
3563 changed = set_ssa_val_to (use, use);
3564 else
3566 if (gimple_code (stmt) == GIMPLE_PHI)
3567 changed = visit_phi (stmt);
3568 else if (gimple_has_volatile_ops (stmt))
3569 changed = defs_to_varying (stmt);
3570 else if (is_gimple_assign (stmt))
3572 enum tree_code code = gimple_assign_rhs_code (stmt);
3573 tree lhs = gimple_assign_lhs (stmt);
3574 tree rhs1 = gimple_assign_rhs1 (stmt);
3575 tree simplified;
3577 /* Shortcut for copies. Simplifying copies is pointless,
3578 since we copy the expression and value they represent. */
3579 if (code == SSA_NAME
3580 && TREE_CODE (lhs) == SSA_NAME)
3582 changed = visit_copy (lhs, rhs1);
3583 goto done;
3585 simplified = try_to_simplify (as_a <gassign *> (stmt));
3586 if (simplified)
3588 if (dump_file && (dump_flags & TDF_DETAILS))
3590 fprintf (dump_file, "RHS ");
3591 print_gimple_expr (dump_file, stmt, 0, 0);
3592 fprintf (dump_file, " simplified to ");
3593 print_generic_expr (dump_file, simplified, 0);
3594 if (TREE_CODE (lhs) == SSA_NAME)
3595 fprintf (dump_file, " has constants %d\n",
3596 expr_has_constants (simplified));
3597 else
3598 fprintf (dump_file, "\n");
3601 /* Setting value numbers to constants will occasionally
3602 screw up phi congruence because constants are not
3603 uniquely associated with a single ssa name that can be
3604 looked up. */
3605 if (simplified
3606 && is_gimple_min_invariant (simplified)
3607 && TREE_CODE (lhs) == SSA_NAME)
3609 VN_INFO (lhs)->expr = simplified;
3610 VN_INFO (lhs)->has_constants = true;
3611 changed = set_ssa_val_to (lhs, simplified);
3612 goto done;
3614 else if (simplified
3615 && TREE_CODE (simplified) == SSA_NAME
3616 && TREE_CODE (lhs) == SSA_NAME)
3618 changed = visit_copy (lhs, simplified);
3619 goto done;
3621 else if (simplified)
3623 if (TREE_CODE (lhs) == SSA_NAME)
3625 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
3626 /* We have to unshare the expression or else
3627 valuizing may change the IL stream. */
3628 VN_INFO (lhs)->expr = unshare_expr (simplified);
3631 else if (stmt_has_constants (stmt)
3632 && TREE_CODE (lhs) == SSA_NAME)
3633 VN_INFO (lhs)->has_constants = true;
3634 else if (TREE_CODE (lhs) == SSA_NAME)
3636 /* We reset expr and constantness here because we may
3637 have been value numbering optimistically, and
3638 iterating. They may become non-constant in this case,
3639 even if they were optimistically constant. */
3641 VN_INFO (lhs)->has_constants = false;
3642 VN_INFO (lhs)->expr = NULL_TREE;
3645 if ((TREE_CODE (lhs) == SSA_NAME
3646 /* We can substitute SSA_NAMEs that are live over
3647 abnormal edges with their constant value. */
3648 && !(gimple_assign_copy_p (stmt)
3649 && is_gimple_min_invariant (rhs1))
3650 && !(simplified
3651 && is_gimple_min_invariant (simplified))
3652 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3653 /* Stores or copies from SSA_NAMEs that are live over
3654 abnormal edges are a problem. */
3655 || (code == SSA_NAME
3656 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3657 changed = defs_to_varying (stmt);
3658 else if (REFERENCE_CLASS_P (lhs)
3659 || DECL_P (lhs))
3660 changed = visit_reference_op_store (lhs, rhs1, stmt);
3661 else if (TREE_CODE (lhs) == SSA_NAME)
3663 if ((gimple_assign_copy_p (stmt)
3664 && is_gimple_min_invariant (rhs1))
3665 || (simplified
3666 && is_gimple_min_invariant (simplified)))
3668 VN_INFO (lhs)->has_constants = true;
3669 if (simplified)
3670 changed = set_ssa_val_to (lhs, simplified);
3671 else
3672 changed = set_ssa_val_to (lhs, rhs1);
3674 else
3676 /* First try to lookup the simplified expression. */
3677 if (simplified)
3679 enum gimple_rhs_class rhs_class;
3682 rhs_class = get_gimple_rhs_class (TREE_CODE (simplified));
3683 if ((rhs_class == GIMPLE_UNARY_RHS
3684 || rhs_class == GIMPLE_BINARY_RHS
3685 || rhs_class == GIMPLE_TERNARY_RHS)
3686 && valid_gimple_rhs_p (simplified))
3688 tree result = vn_nary_op_lookup (simplified, NULL);
3689 if (result)
3691 changed = set_ssa_val_to (lhs, result);
3692 goto done;
3697 /* Otherwise visit the original statement. */
3698 switch (vn_get_stmt_kind (stmt))
3700 case VN_NARY:
3701 changed = visit_nary_op (lhs, stmt);
3702 break;
3703 case VN_REFERENCE:
3704 changed = visit_reference_op_load (lhs, rhs1, stmt);
3705 break;
3706 default:
3707 changed = defs_to_varying (stmt);
3708 break;
3712 else
3713 changed = defs_to_varying (stmt);
3715 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
3717 tree lhs = gimple_call_lhs (stmt);
3718 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3720 /* Try constant folding based on our current lattice. */
3721 tree simplified = gimple_fold_stmt_to_constant_1 (stmt,
3722 vn_valueize);
3723 if (simplified)
3725 if (dump_file && (dump_flags & TDF_DETAILS))
3727 fprintf (dump_file, "call ");
3728 print_gimple_expr (dump_file, stmt, 0, 0);
3729 fprintf (dump_file, " simplified to ");
3730 print_generic_expr (dump_file, simplified, 0);
3731 if (TREE_CODE (lhs) == SSA_NAME)
3732 fprintf (dump_file, " has constants %d\n",
3733 expr_has_constants (simplified));
3734 else
3735 fprintf (dump_file, "\n");
3738 /* Setting value numbers to constants will occasionally
3739 screw up phi congruence because constants are not
3740 uniquely associated with a single ssa name that can be
3741 looked up. */
3742 if (simplified
3743 && is_gimple_min_invariant (simplified))
3745 VN_INFO (lhs)->expr = simplified;
3746 VN_INFO (lhs)->has_constants = true;
3747 changed = set_ssa_val_to (lhs, simplified);
3748 if (gimple_vdef (stmt))
3749 changed |= set_ssa_val_to (gimple_vdef (stmt),
3750 SSA_VAL (gimple_vuse (stmt)));
3751 goto done;
3753 else if (simplified
3754 && TREE_CODE (simplified) == SSA_NAME)
3756 changed = visit_copy (lhs, simplified);
3757 if (gimple_vdef (stmt))
3758 changed |= set_ssa_val_to (gimple_vdef (stmt),
3759 SSA_VAL (gimple_vuse (stmt)));
3760 goto done;
3762 else
3764 if (stmt_has_constants (stmt))
3765 VN_INFO (lhs)->has_constants = true;
3766 else
3768 /* We reset expr and constantness here because we may
3769 have been value numbering optimistically, and
3770 iterating. They may become non-constant in this case,
3771 even if they were optimistically constant. */
3772 VN_INFO (lhs)->has_constants = false;
3773 VN_INFO (lhs)->expr = NULL_TREE;
3776 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3778 changed = defs_to_varying (stmt);
3779 goto done;
3784 if (!gimple_call_internal_p (stmt)
3785 && (/* Calls to the same function with the same vuse
3786 and the same operands do not necessarily return the same
3787 value, unless they're pure or const. */
3788 gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST)
3789 /* If calls have a vdef, subsequent calls won't have
3790 the same incoming vuse. So, if 2 calls with vdef have the
3791 same vuse, we know they're not subsequent.
3792 We can value number 2 calls to the same function with the
3793 same vuse and the same operands which are not subsequent
3794 the same, because there is no code in the program that can
3795 compare the 2 values... */
3796 || (gimple_vdef (stmt)
3797 /* ... unless the call returns a pointer which does
3798 not alias with anything else. In which case the
3799 information that the values are distinct are encoded
3800 in the IL. */
3801 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
3802 /* Only perform the following when being called from PRE
3803 which embeds tail merging. */
3804 && default_vn_walk_kind == VN_WALK)))
3805 changed = visit_reference_op_call (lhs, call_stmt);
3806 else
3807 changed = defs_to_varying (stmt);
3809 else
3810 changed = defs_to_varying (stmt);
3812 done:
3813 return changed;
3816 /* Compare two operands by reverse postorder index */
3818 static int
3819 compare_ops (const void *pa, const void *pb)
3821 const tree opa = *((const tree *)pa);
3822 const tree opb = *((const tree *)pb);
3823 gimple opstmta = SSA_NAME_DEF_STMT (opa);
3824 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
3825 basic_block bba;
3826 basic_block bbb;
3828 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3829 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3830 else if (gimple_nop_p (opstmta))
3831 return -1;
3832 else if (gimple_nop_p (opstmtb))
3833 return 1;
3835 bba = gimple_bb (opstmta);
3836 bbb = gimple_bb (opstmtb);
3838 if (!bba && !bbb)
3839 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3840 else if (!bba)
3841 return -1;
3842 else if (!bbb)
3843 return 1;
3845 if (bba == bbb)
3847 if (gimple_code (opstmta) == GIMPLE_PHI
3848 && gimple_code (opstmtb) == GIMPLE_PHI)
3849 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3850 else if (gimple_code (opstmta) == GIMPLE_PHI)
3851 return -1;
3852 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3853 return 1;
3854 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3855 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3856 else
3857 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3859 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3862 /* Sort an array containing members of a strongly connected component
3863 SCC so that the members are ordered by RPO number.
3864 This means that when the sort is complete, iterating through the
3865 array will give you the members in RPO order. */
3867 static void
3868 sort_scc (vec<tree> scc)
3870 scc.qsort (compare_ops);
3873 /* Insert the no longer used nary ONARY to the hash INFO. */
3875 static void
3876 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3878 size_t size = sizeof_vn_nary_op (onary->length);
3879 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3880 &info->nary_obstack);
3881 memcpy (nary, onary, size);
3882 vn_nary_op_insert_into (nary, info->nary, false);
3885 /* Insert the no longer used phi OPHI to the hash INFO. */
3887 static void
3888 copy_phi (vn_phi_t ophi, vn_tables_t info)
3890 vn_phi_t phi = info->phis_pool->allocate ();
3891 vn_phi_s **slot;
3892 memcpy (phi, ophi, sizeof (*phi));
3893 ophi->phiargs.create (0);
3894 slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
3895 gcc_assert (!*slot);
3896 *slot = phi;
3899 /* Insert the no longer used reference OREF to the hash INFO. */
3901 static void
3902 copy_reference (vn_reference_t oref, vn_tables_t info)
3904 vn_reference_t ref;
3905 vn_reference_s **slot;
3906 ref = info->references_pool->allocate ();
3907 memcpy (ref, oref, sizeof (*ref));
3908 oref->operands.create (0);
3909 slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
3910 if (*slot)
3911 free_reference (*slot);
3912 *slot = ref;
3915 /* Process a strongly connected component in the SSA graph. */
3917 static void
3918 process_scc (vec<tree> scc)
3920 tree var;
3921 unsigned int i;
3922 unsigned int iterations = 0;
3923 bool changed = true;
3924 vn_nary_op_iterator_type hin;
3925 vn_phi_iterator_type hip;
3926 vn_reference_iterator_type hir;
3927 vn_nary_op_t nary;
3928 vn_phi_t phi;
3929 vn_reference_t ref;
3931 /* If the SCC has a single member, just visit it. */
3932 if (scc.length () == 1)
3934 tree use = scc[0];
3935 if (VN_INFO (use)->use_processed)
3936 return;
3937 /* We need to make sure it doesn't form a cycle itself, which can
3938 happen for self-referential PHI nodes. In that case we would
3939 end up inserting an expression with VN_TOP operands into the
3940 valid table which makes us derive bogus equivalences later.
3941 The cheapest way to check this is to assume it for all PHI nodes. */
3942 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3943 /* Fallthru to iteration. */ ;
3944 else
3946 visit_use (use);
3947 return;
3951 if (dump_file && (dump_flags & TDF_DETAILS))
3952 print_scc (dump_file, scc);
3954 /* Iterate over the SCC with the optimistic table until it stops
3955 changing. */
3956 current_info = optimistic_info;
3957 while (changed)
3959 changed = false;
3960 iterations++;
3961 if (dump_file && (dump_flags & TDF_DETAILS))
3962 fprintf (dump_file, "Starting iteration %d\n", iterations);
3963 /* As we are value-numbering optimistically we have to
3964 clear the expression tables and the simplified expressions
3965 in each iteration until we converge. */
3966 optimistic_info->nary->empty ();
3967 optimistic_info->phis->empty ();
3968 optimistic_info->references->empty ();
3969 obstack_free (&optimistic_info->nary_obstack, NULL);
3970 gcc_obstack_init (&optimistic_info->nary_obstack);
3971 optimistic_info->phis_pool->release ();
3972 optimistic_info->references_pool->release ();
3973 FOR_EACH_VEC_ELT (scc, i, var)
3974 VN_INFO (var)->expr = NULL_TREE;
3975 FOR_EACH_VEC_ELT (scc, i, var)
3976 changed |= visit_use (var);
3979 if (dump_file && (dump_flags & TDF_DETAILS))
3980 fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
3981 statistics_histogram_event (cfun, "SCC iterations", iterations);
3983 /* Finally, copy the contents of the no longer used optimistic
3984 table to the valid table. */
3985 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
3986 copy_nary (nary, valid_info);
3987 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
3988 copy_phi (phi, valid_info);
3989 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
3990 ref, vn_reference_t, hir)
3991 copy_reference (ref, valid_info);
3993 current_info = valid_info;
3997 /* Pop the components of the found SCC for NAME off the SCC stack
3998 and process them. Returns true if all went well, false if
3999 we run into resource limits. */
4001 static bool
4002 extract_and_process_scc_for_name (tree name)
4004 auto_vec<tree> scc;
4005 tree x;
4007 /* Found an SCC, pop the components off the SCC stack and
4008 process them. */
4011 x = sccstack.pop ();
4013 VN_INFO (x)->on_sccstack = false;
4014 scc.safe_push (x);
4015 } while (x != name);
4017 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
4018 if (scc.length ()
4019 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
4021 if (dump_file)
4022 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
4023 "SCC size %u exceeding %u\n", scc.length (),
4024 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
4026 return false;
4029 if (scc.length () > 1)
4030 sort_scc (scc);
4032 process_scc (scc);
4034 return true;
4037 /* Depth first search on NAME to discover and process SCC's in the SSA
4038 graph.
4039 Execution of this algorithm relies on the fact that the SCC's are
4040 popped off the stack in topological order.
4041 Returns true if successful, false if we stopped processing SCC's due
4042 to resource constraints. */
4044 static bool
4045 DFS (tree name)
4047 vec<ssa_op_iter> itervec = vNULL;
4048 vec<tree> namevec = vNULL;
4049 use_operand_p usep = NULL;
4050 gimple defstmt;
4051 tree use;
4052 ssa_op_iter iter;
4054 start_over:
4055 /* SCC info */
4056 VN_INFO (name)->dfsnum = next_dfs_num++;
4057 VN_INFO (name)->visited = true;
4058 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
4060 sccstack.safe_push (name);
4061 VN_INFO (name)->on_sccstack = true;
4062 defstmt = SSA_NAME_DEF_STMT (name);
4064 /* Recursively DFS on our operands, looking for SCC's. */
4065 if (!gimple_nop_p (defstmt))
4067 /* Push a new iterator. */
4068 if (gphi *phi = dyn_cast <gphi *> (defstmt))
4069 usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
4070 else
4071 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
4073 else
4074 clear_and_done_ssa_iter (&iter);
4076 while (1)
4078 /* If we are done processing uses of a name, go up the stack
4079 of iterators and process SCCs as we found them. */
4080 if (op_iter_done (&iter))
4082 /* See if we found an SCC. */
4083 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
4084 if (!extract_and_process_scc_for_name (name))
4086 namevec.release ();
4087 itervec.release ();
4088 return false;
4091 /* Check if we are done. */
4092 if (namevec.is_empty ())
4094 namevec.release ();
4095 itervec.release ();
4096 return true;
4099 /* Restore the last use walker and continue walking there. */
4100 use = name;
4101 name = namevec.pop ();
4102 memcpy (&iter, &itervec.last (),
4103 sizeof (ssa_op_iter));
4104 itervec.pop ();
4105 goto continue_walking;
4108 use = USE_FROM_PTR (usep);
4110 /* Since we handle phi nodes, we will sometimes get
4111 invariants in the use expression. */
4112 if (TREE_CODE (use) == SSA_NAME)
4114 if (! (VN_INFO (use)->visited))
4116 /* Recurse by pushing the current use walking state on
4117 the stack and starting over. */
4118 itervec.safe_push (iter);
4119 namevec.safe_push (name);
4120 name = use;
4121 goto start_over;
4123 continue_walking:
4124 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
4125 VN_INFO (use)->low);
4127 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
4128 && VN_INFO (use)->on_sccstack)
4130 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
4131 VN_INFO (name)->low);
4135 usep = op_iter_next_use (&iter);
4139 /* Allocate a value number table. */
4141 static void
4142 allocate_vn_table (vn_tables_t table)
4144 table->phis = new vn_phi_table_type (23);
4145 table->nary = new vn_nary_op_table_type (23);
4146 table->references = new vn_reference_table_type (23);
4148 gcc_obstack_init (&table->nary_obstack);
4149 table->phis_pool = new object_allocator<vn_phi_s> ("VN phis", 30);
4150 table->references_pool = new object_allocator<vn_reference_s>
4151 ("VN references", 30);
4154 /* Free a value number table. */
4156 static void
4157 free_vn_table (vn_tables_t table)
4159 delete table->phis;
4160 table->phis = NULL;
4161 delete table->nary;
4162 table->nary = NULL;
4163 delete table->references;
4164 table->references = NULL;
4165 obstack_free (&table->nary_obstack, NULL);
4166 delete table->phis_pool;
4167 delete table->references_pool;
4170 static void
4171 init_scc_vn (void)
4173 size_t i;
4174 int j;
4175 int *rpo_numbers_temp;
4177 calculate_dominance_info (CDI_DOMINATORS);
4178 mark_dfs_back_edges ();
4180 sccstack.create (0);
4181 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
4183 constant_value_ids = BITMAP_ALLOC (NULL);
4185 next_dfs_num = 1;
4186 next_value_id = 1;
4188 vn_ssa_aux_table.create (num_ssa_names + 1);
4189 /* VEC_alloc doesn't actually grow it to the right size, it just
4190 preallocates the space to do so. */
4191 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
4192 gcc_obstack_init (&vn_ssa_aux_obstack);
4194 shared_lookup_phiargs.create (0);
4195 shared_lookup_references.create (0);
4196 rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
4197 rpo_numbers_temp =
4198 XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
4199 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4201 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4202 the i'th block in RPO order is bb. We want to map bb's to RPO
4203 numbers, so we need to rearrange this array. */
4204 for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
4205 rpo_numbers[rpo_numbers_temp[j]] = j;
4207 XDELETE (rpo_numbers_temp);
4209 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
4211 renumber_gimple_stmt_uids ();
4213 /* Create the valid and optimistic value numbering tables. */
4214 valid_info = XCNEW (struct vn_tables_s);
4215 allocate_vn_table (valid_info);
4216 optimistic_info = XCNEW (struct vn_tables_s);
4217 allocate_vn_table (optimistic_info);
4218 current_info = valid_info;
4220 /* Create the VN_INFO structures, and initialize value numbers to
4221 TOP or VARYING for parameters. */
4222 for (i = 1; i < num_ssa_names; i++)
4224 tree name = ssa_name (i);
4225 if (!name)
4226 continue;
4228 VN_INFO_GET (name)->valnum = VN_TOP;
4229 VN_INFO (name)->expr = NULL_TREE;
4230 VN_INFO (name)->value_id = 0;
4232 if (!SSA_NAME_IS_DEFAULT_DEF (name))
4233 continue;
4235 switch (TREE_CODE (SSA_NAME_VAR (name)))
4237 case VAR_DECL:
4238 /* Undefined vars keep TOP. */
4239 break;
4241 case PARM_DECL:
4242 /* Parameters are VARYING but we can record a condition
4243 if we know it is a non-NULL pointer. */
4244 VN_INFO (name)->visited = true;
4245 VN_INFO (name)->valnum = name;
4246 if (POINTER_TYPE_P (TREE_TYPE (name))
4247 && nonnull_arg_p (SSA_NAME_VAR (name)))
4249 tree ops[2];
4250 ops[0] = name;
4251 ops[1] = build_int_cst (TREE_TYPE (name), 0);
4252 vn_nary_op_insert_pieces (2, NE_EXPR, boolean_type_node, ops,
4253 boolean_true_node, 0);
4254 if (dump_file && (dump_flags & TDF_DETAILS))
4256 fprintf (dump_file, "Recording ");
4257 print_generic_expr (dump_file, name, TDF_SLIM);
4258 fprintf (dump_file, " != 0\n");
4261 break;
4263 case RESULT_DECL:
4264 /* If the result is passed by invisible reference the default
4265 def is initialized, otherwise it's uninitialized. */
4266 if (DECL_BY_REFERENCE (SSA_NAME_VAR (name)))
4268 VN_INFO (name)->visited = true;
4269 VN_INFO (name)->valnum = name;
4271 break;
4273 default:
4274 gcc_unreachable ();
4279 void
4280 free_scc_vn (void)
4282 size_t i;
4284 delete constant_to_value_id;
4285 constant_to_value_id = NULL;
4286 BITMAP_FREE (constant_value_ids);
4287 shared_lookup_phiargs.release ();
4288 shared_lookup_references.release ();
4289 XDELETEVEC (rpo_numbers);
4291 for (i = 0; i < num_ssa_names; i++)
4293 tree name = ssa_name (i);
4294 if (name
4295 && SSA_NAME_VERSION (name) < vn_ssa_aux_table.length ()
4296 && vn_ssa_aux_table[SSA_NAME_VERSION (name)]
4297 && VN_INFO (name)->needs_insertion)
4298 release_ssa_name (name);
4300 obstack_free (&vn_ssa_aux_obstack, NULL);
4301 vn_ssa_aux_table.release ();
4303 sccstack.release ();
4304 free_vn_table (valid_info);
4305 XDELETE (valid_info);
4306 free_vn_table (optimistic_info);
4307 XDELETE (optimistic_info);
4310 /* Set *ID according to RESULT. */
4312 static void
4313 set_value_id_for_result (tree result, unsigned int *id)
4315 if (result && TREE_CODE (result) == SSA_NAME)
4316 *id = VN_INFO (result)->value_id;
4317 else if (result && is_gimple_min_invariant (result))
4318 *id = get_or_alloc_constant_value_id (result);
4319 else
4320 *id = get_next_value_id ();
4323 /* Set the value ids in the valid hash tables. */
4325 static void
4326 set_hashtable_value_ids (void)
4328 vn_nary_op_iterator_type hin;
4329 vn_phi_iterator_type hip;
4330 vn_reference_iterator_type hir;
4331 vn_nary_op_t vno;
4332 vn_reference_t vr;
4333 vn_phi_t vp;
4335 /* Now set the value ids of the things we had put in the hash
4336 table. */
4338 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4339 set_value_id_for_result (vno->result, &vno->value_id);
4341 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4342 set_value_id_for_result (vp->result, &vp->value_id);
4344 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4345 hir)
4346 set_value_id_for_result (vr->result, &vr->value_id);
4349 class sccvn_dom_walker : public dom_walker
4351 public:
4352 sccvn_dom_walker ()
4353 : dom_walker (CDI_DOMINATORS), fail (false), cond_stack (vNULL) {}
4355 virtual void before_dom_children (basic_block);
4356 virtual void after_dom_children (basic_block);
4358 void record_cond (basic_block,
4359 enum tree_code code, tree lhs, tree rhs, bool value);
4360 void record_conds (basic_block,
4361 enum tree_code code, tree lhs, tree rhs, bool value);
4363 bool fail;
4364 vec<std::pair <basic_block, std::pair <vn_nary_op_t, vn_nary_op_t> > >
4365 cond_stack;
4368 /* Record a temporary condition for the BB and its dominated blocks. */
4370 void
4371 sccvn_dom_walker::record_cond (basic_block bb,
4372 enum tree_code code, tree lhs, tree rhs,
4373 bool value)
4375 tree ops[2] = { lhs, rhs };
4376 vn_nary_op_t old = NULL;
4377 if (vn_nary_op_lookup_pieces (2, code, boolean_type_node, ops, &old))
4378 current_info->nary->remove_elt_with_hash (old, old->hashcode);
4379 vn_nary_op_t cond
4380 = vn_nary_op_insert_pieces (2, code, boolean_type_node, ops,
4381 value
4382 ? boolean_true_node
4383 : boolean_false_node, 0);
4384 if (dump_file && (dump_flags & TDF_DETAILS))
4386 fprintf (dump_file, "Recording temporarily ");
4387 print_generic_expr (dump_file, ops[0], TDF_SLIM);
4388 fprintf (dump_file, " %s ", get_tree_code_name (code));
4389 print_generic_expr (dump_file, ops[1], TDF_SLIM);
4390 fprintf (dump_file, " == %s%s\n",
4391 value ? "true" : "false",
4392 old ? " (old entry saved)" : "");
4394 cond_stack.safe_push (std::make_pair (bb, std::make_pair (cond, old)));
4397 /* Record temporary conditions for the BB and its dominated blocks
4398 according to LHS CODE RHS == VALUE and its dominated conditions. */
4400 void
4401 sccvn_dom_walker::record_conds (basic_block bb,
4402 enum tree_code code, tree lhs, tree rhs,
4403 bool value)
4405 /* Record the original condition. */
4406 record_cond (bb, code, lhs, rhs, value);
4408 if (!value)
4409 return;
4411 /* Record dominated conditions if the condition is true. Note that
4412 the inversion is already recorded. */
4413 switch (code)
4415 case LT_EXPR:
4416 case GT_EXPR:
4417 record_cond (bb, code == LT_EXPR ? LE_EXPR : GE_EXPR, lhs, rhs, true);
4418 record_cond (bb, NE_EXPR, lhs, rhs, true);
4419 record_cond (bb, EQ_EXPR, lhs, rhs, false);
4420 break;
4422 case EQ_EXPR:
4423 record_cond (bb, LE_EXPR, lhs, rhs, true);
4424 record_cond (bb, GE_EXPR, lhs, rhs, true);
4425 record_cond (bb, LT_EXPR, lhs, rhs, false);
4426 record_cond (bb, GT_EXPR, lhs, rhs, false);
4427 break;
4429 default:
4430 break;
4434 /* Restore expressions and values derived from conditionals. */
4436 void
4437 sccvn_dom_walker::after_dom_children (basic_block bb)
4439 while (!cond_stack.is_empty ()
4440 && cond_stack.last ().first == bb)
4442 vn_nary_op_t cond = cond_stack.last ().second.first;
4443 vn_nary_op_t old = cond_stack.last ().second.second;
4444 current_info->nary->remove_elt_with_hash (cond, cond->hashcode);
4445 if (old)
4446 vn_nary_op_insert_into (old, current_info->nary, false);
4447 cond_stack.pop ();
4451 /* Value number all statements in BB. */
4453 void
4454 sccvn_dom_walker::before_dom_children (basic_block bb)
4456 edge e;
4457 edge_iterator ei;
4459 if (fail)
4460 return;
4462 /* If any of the predecessor edges that do not come from blocks dominated
4463 by us are still marked as possibly executable consider this block
4464 reachable. */
4465 bool reachable = bb == ENTRY_BLOCK_PTR_FOR_FN (cfun);
4466 FOR_EACH_EDGE (e, ei, bb->preds)
4467 if (!dominated_by_p (CDI_DOMINATORS, e->src, bb))
4468 reachable |= (e->flags & EDGE_EXECUTABLE);
4470 /* If the block is not reachable all outgoing edges are not
4471 executable. Neither are incoming edges with src dominated by us. */
4472 if (!reachable)
4474 if (dump_file && (dump_flags & TDF_DETAILS))
4475 fprintf (dump_file, "Marking all outgoing edges of unreachable "
4476 "BB %d as not executable\n", bb->index);
4478 FOR_EACH_EDGE (e, ei, bb->succs)
4479 e->flags &= ~EDGE_EXECUTABLE;
4481 FOR_EACH_EDGE (e, ei, bb->preds)
4483 if (dominated_by_p (CDI_DOMINATORS, e->src, bb))
4485 if (dump_file && (dump_flags & TDF_DETAILS))
4486 fprintf (dump_file, "Marking backedge from BB %d into "
4487 "unreachable BB %d as not executable\n",
4488 e->src->index, bb->index);
4489 e->flags &= ~EDGE_EXECUTABLE;
4492 return;
4495 if (dump_file && (dump_flags & TDF_DETAILS))
4496 fprintf (dump_file, "Visiting BB %d\n", bb->index);
4498 /* If we have a single predecessor record the equivalence from a
4499 possible condition on the predecessor edge. */
4500 if (single_pred_p (bb))
4502 edge e = single_pred_edge (bb);
4503 /* Check if there are multiple executable successor edges in
4504 the source block. Otherwise there is no additional info
4505 to be recorded. */
4506 edge e2;
4507 FOR_EACH_EDGE (e2, ei, e->src->succs)
4508 if (e2 != e
4509 && e2->flags & EDGE_EXECUTABLE)
4510 break;
4511 if (e2 && (e2->flags & EDGE_EXECUTABLE))
4513 gimple stmt = last_stmt (e->src);
4514 if (stmt
4515 && gimple_code (stmt) == GIMPLE_COND)
4517 enum tree_code code = gimple_cond_code (stmt);
4518 tree lhs = gimple_cond_lhs (stmt);
4519 tree rhs = gimple_cond_rhs (stmt);
4520 record_conds (bb, code, lhs, rhs,
4521 (e->flags & EDGE_TRUE_VALUE) != 0);
4522 code = invert_tree_comparison (code, HONOR_NANS (lhs));
4523 if (code != ERROR_MARK)
4524 record_conds (bb, code, lhs, rhs,
4525 (e->flags & EDGE_TRUE_VALUE) == 0);
4530 /* Value-number all defs in the basic-block. */
4531 for (gphi_iterator gsi = gsi_start_phis (bb);
4532 !gsi_end_p (gsi); gsi_next (&gsi))
4534 gphi *phi = gsi.phi ();
4535 tree res = PHI_RESULT (phi);
4536 if (!VN_INFO (res)->visited
4537 && !DFS (res))
4539 fail = true;
4540 return;
4543 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
4544 !gsi_end_p (gsi); gsi_next (&gsi))
4546 ssa_op_iter i;
4547 tree op;
4548 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
4549 if (!VN_INFO (op)->visited
4550 && !DFS (op))
4552 fail = true;
4553 return;
4557 /* Finally look at the last stmt. */
4558 gimple stmt = last_stmt (bb);
4559 if (!stmt)
4560 return;
4562 enum gimple_code code = gimple_code (stmt);
4563 if (code != GIMPLE_COND
4564 && code != GIMPLE_SWITCH
4565 && code != GIMPLE_GOTO)
4566 return;
4568 if (dump_file && (dump_flags & TDF_DETAILS))
4570 fprintf (dump_file, "Visiting control stmt ending BB %d: ", bb->index);
4571 print_gimple_stmt (dump_file, stmt, 0, 0);
4574 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
4575 if value-numbering can prove they are not reachable. Handling
4576 computed gotos is also possible. */
4577 tree val;
4578 switch (code)
4580 case GIMPLE_COND:
4582 tree lhs = gimple_cond_lhs (stmt);
4583 tree rhs = gimple_cond_rhs (stmt);
4584 /* Work hard in computing the condition and take into account
4585 the valueization of the defining stmt. */
4586 if (TREE_CODE (lhs) == SSA_NAME)
4587 lhs = vn_get_expr_for (lhs);
4588 if (TREE_CODE (rhs) == SSA_NAME)
4589 rhs = vn_get_expr_for (rhs);
4590 val = fold_binary (gimple_cond_code (stmt),
4591 boolean_type_node, lhs, rhs);
4592 /* If that didn't simplify to a constant see if we have recorded
4593 temporary expressions from taken edges. */
4594 if (!val || TREE_CODE (val) != INTEGER_CST)
4596 tree ops[2];
4597 ops[0] = gimple_cond_lhs (stmt);
4598 ops[1] = gimple_cond_rhs (stmt);
4599 val = vn_nary_op_lookup_pieces (2, gimple_cond_code (stmt),
4600 boolean_type_node, ops, NULL);
4602 break;
4604 case GIMPLE_SWITCH:
4605 val = gimple_switch_index (as_a <gswitch *> (stmt));
4606 break;
4607 case GIMPLE_GOTO:
4608 val = gimple_goto_dest (stmt);
4609 break;
4610 default:
4611 gcc_unreachable ();
4613 if (!val)
4614 return;
4616 edge taken = find_taken_edge (bb, vn_valueize (val));
4617 if (!taken)
4618 return;
4620 if (dump_file && (dump_flags & TDF_DETAILS))
4621 fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
4622 "not executable\n", bb->index, bb->index, taken->dest->index);
4624 FOR_EACH_EDGE (e, ei, bb->succs)
4625 if (e != taken)
4626 e->flags &= ~EDGE_EXECUTABLE;
4629 /* Do SCCVN. Returns true if it finished, false if we bailed out
4630 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
4631 how we use the alias oracle walking during the VN process. */
4633 bool
4634 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
4636 basic_block bb;
4637 size_t i;
4639 default_vn_walk_kind = default_vn_walk_kind_;
4641 init_scc_vn ();
4643 /* Mark all edges as possibly executable. */
4644 FOR_ALL_BB_FN (bb, cfun)
4646 edge_iterator ei;
4647 edge e;
4648 FOR_EACH_EDGE (e, ei, bb->succs)
4649 e->flags |= EDGE_EXECUTABLE;
4652 /* Walk all blocks in dominator order, value-numbering stmts
4653 SSA defs and decide whether outgoing edges are not executable. */
4654 sccvn_dom_walker walker;
4655 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
4656 if (walker.fail)
4658 free_scc_vn ();
4659 return false;
4662 /* Initialize the value ids and prune out remaining VN_TOPs
4663 from dead code. */
4664 for (i = 1; i < num_ssa_names; ++i)
4666 tree name = ssa_name (i);
4667 vn_ssa_aux_t info;
4668 if (!name)
4669 continue;
4670 info = VN_INFO (name);
4671 if (!info->visited)
4672 info->valnum = name;
4673 if (info->valnum == name
4674 || info->valnum == VN_TOP)
4675 info->value_id = get_next_value_id ();
4676 else if (is_gimple_min_invariant (info->valnum))
4677 info->value_id = get_or_alloc_constant_value_id (info->valnum);
4680 /* Propagate. */
4681 for (i = 1; i < num_ssa_names; ++i)
4683 tree name = ssa_name (i);
4684 vn_ssa_aux_t info;
4685 if (!name)
4686 continue;
4687 info = VN_INFO (name);
4688 if (TREE_CODE (info->valnum) == SSA_NAME
4689 && info->valnum != name
4690 && info->value_id != VN_INFO (info->valnum)->value_id)
4691 info->value_id = VN_INFO (info->valnum)->value_id;
4694 set_hashtable_value_ids ();
4696 if (dump_file && (dump_flags & TDF_DETAILS))
4698 fprintf (dump_file, "Value numbers:\n");
4699 for (i = 0; i < num_ssa_names; i++)
4701 tree name = ssa_name (i);
4702 if (name
4703 && VN_INFO (name)->visited
4704 && SSA_VAL (name) != name)
4706 print_generic_expr (dump_file, name, 0);
4707 fprintf (dump_file, " = ");
4708 print_generic_expr (dump_file, SSA_VAL (name), 0);
4709 fprintf (dump_file, "\n");
4714 return true;
4717 /* Return the maximum value id we have ever seen. */
4719 unsigned int
4720 get_max_value_id (void)
4722 return next_value_id;
4725 /* Return the next unique value id. */
4727 unsigned int
4728 get_next_value_id (void)
4730 return next_value_id++;
4734 /* Compare two expressions E1 and E2 and return true if they are equal. */
4736 bool
4737 expressions_equal_p (tree e1, tree e2)
4739 /* The obvious case. */
4740 if (e1 == e2)
4741 return true;
4743 /* If only one of them is null, they cannot be equal. */
4744 if (!e1 || !e2)
4745 return false;
4747 /* Now perform the actual comparison. */
4748 if (TREE_CODE (e1) == TREE_CODE (e2)
4749 && operand_equal_p (e1, e2, OEP_PURE_SAME))
4750 return true;
4752 return false;
4756 /* Return true if the nary operation NARY may trap. This is a copy
4757 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4759 bool
4760 vn_nary_may_trap (vn_nary_op_t nary)
4762 tree type;
4763 tree rhs2 = NULL_TREE;
4764 bool honor_nans = false;
4765 bool honor_snans = false;
4766 bool fp_operation = false;
4767 bool honor_trapv = false;
4768 bool handled, ret;
4769 unsigned i;
4771 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4772 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4773 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4775 type = nary->type;
4776 fp_operation = FLOAT_TYPE_P (type);
4777 if (fp_operation)
4779 honor_nans = flag_trapping_math && !flag_finite_math_only;
4780 honor_snans = flag_signaling_nans != 0;
4782 else if (INTEGRAL_TYPE_P (type)
4783 && TYPE_OVERFLOW_TRAPS (type))
4784 honor_trapv = true;
4786 if (nary->length >= 2)
4787 rhs2 = nary->op[1];
4788 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4789 honor_trapv,
4790 honor_nans, honor_snans, rhs2,
4791 &handled);
4792 if (handled
4793 && ret)
4794 return true;
4796 for (i = 0; i < nary->length; ++i)
4797 if (tree_could_trap_p (nary->op[i]))
4798 return true;
4800 return false;