Daily bump.
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blobca43f01b1f5054b5b4a7fc0f7832ee7b4ccd7036
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2017 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "insn-config.h"
32 #include "memmodel.h"
33 #include "emit-rtl.h"
34 #include "cgraph.h"
35 #include "gimple-pretty-print.h"
36 #include "alias.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "cfganal.h"
40 #include "tree-inline.h"
41 #include "internal-fn.h"
42 #include "gimple-fold.h"
43 #include "tree-eh.h"
44 #include "gimplify.h"
45 #include "flags.h"
46 #include "dojump.h"
47 #include "explow.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "stmt.h"
51 #include "expr.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "dumpfile.h"
55 #include "cfgloop.h"
56 #include "params.h"
57 #include "tree-ssa-propagate.h"
58 #include "tree-ssa-sccvn.h"
59 #include "tree-cfg.h"
60 #include "domwalk.h"
61 #include "gimple-iterator.h"
62 #include "gimple-match.h"
64 /* This algorithm is based on the SCC algorithm presented by Keith
65 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
66 (http://citeseer.ist.psu.edu/41805.html). In
67 straight line code, it is equivalent to a regular hash based value
68 numbering that is performed in reverse postorder.
70 For code with cycles, there are two alternatives, both of which
71 require keeping the hashtables separate from the actual list of
72 value numbers for SSA names.
74 1. Iterate value numbering in an RPO walk of the blocks, removing
75 all the entries from the hashtable after each iteration (but
76 keeping the SSA name->value number mapping between iterations).
77 Iterate until it does not change.
79 2. Perform value numbering as part of an SCC walk on the SSA graph,
80 iterating only the cycles in the SSA graph until they do not change
81 (using a separate, optimistic hashtable for value numbering the SCC
82 operands).
84 The second is not just faster in practice (because most SSA graph
85 cycles do not involve all the variables in the graph), it also has
86 some nice properties.
88 One of these nice properties is that when we pop an SCC off the
89 stack, we are guaranteed to have processed all the operands coming from
90 *outside of that SCC*, so we do not need to do anything special to
91 ensure they have value numbers.
93 Another nice property is that the SCC walk is done as part of a DFS
94 of the SSA graph, which makes it easy to perform combining and
95 simplifying operations at the same time.
97 The code below is deliberately written in a way that makes it easy
98 to separate the SCC walk from the other work it does.
100 In order to propagate constants through the code, we track which
101 expressions contain constants, and use those while folding. In
102 theory, we could also track expressions whose value numbers are
103 replaced, in case we end up folding based on expression
104 identities.
106 In order to value number memory, we assign value numbers to vuses.
107 This enables us to note that, for example, stores to the same
108 address of the same value from the same starting memory states are
109 equivalent.
110 TODO:
112 1. We can iterate only the changing portions of the SCC's, but
113 I have not seen an SCC big enough for this to be a win.
114 2. If you differentiate between phi nodes for loops and phi nodes
115 for if-then-else, you can properly consider phi nodes in different
116 blocks for equivalence.
117 3. We could value number vuses in more cases, particularly, whole
118 structure copies.
122 static tree *last_vuse_ptr;
123 static vn_lookup_kind vn_walk_kind;
124 static vn_lookup_kind default_vn_walk_kind;
125 bitmap const_parms;
127 /* vn_nary_op hashtable helpers. */
129 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
131 typedef vn_nary_op_s *compare_type;
132 static inline hashval_t hash (const vn_nary_op_s *);
133 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
136 /* Return the computed hashcode for nary operation P1. */
138 inline hashval_t
139 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
141 return vno1->hashcode;
144 /* Compare nary operations P1 and P2 and return true if they are
145 equivalent. */
147 inline bool
148 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
150 return vn_nary_op_eq (vno1, vno2);
153 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
154 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
157 /* vn_phi hashtable helpers. */
159 static int
160 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
162 struct vn_phi_hasher : pointer_hash <vn_phi_s>
164 static inline hashval_t hash (const vn_phi_s *);
165 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
166 static inline void remove (vn_phi_s *);
169 /* Return the computed hashcode for phi operation P1. */
171 inline hashval_t
172 vn_phi_hasher::hash (const vn_phi_s *vp1)
174 return vp1->hashcode;
177 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
179 inline bool
180 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
182 return vn_phi_eq (vp1, vp2);
185 /* Free a phi operation structure VP. */
187 inline void
188 vn_phi_hasher::remove (vn_phi_s *phi)
190 phi->phiargs.release ();
193 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
194 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
197 /* Compare two reference operands P1 and P2 for equality. Return true if
198 they are equal, and false otherwise. */
200 static int
201 vn_reference_op_eq (const void *p1, const void *p2)
203 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
204 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
206 return (vro1->opcode == vro2->opcode
207 /* We do not care for differences in type qualification. */
208 && (vro1->type == vro2->type
209 || (vro1->type && vro2->type
210 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
211 TYPE_MAIN_VARIANT (vro2->type))))
212 && expressions_equal_p (vro1->op0, vro2->op0)
213 && expressions_equal_p (vro1->op1, vro2->op1)
214 && expressions_equal_p (vro1->op2, vro2->op2));
217 /* Free a reference operation structure VP. */
219 static inline void
220 free_reference (vn_reference_s *vr)
222 vr->operands.release ();
226 /* vn_reference hashtable helpers. */
228 struct vn_reference_hasher : pointer_hash <vn_reference_s>
230 static inline hashval_t hash (const vn_reference_s *);
231 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
232 static inline void remove (vn_reference_s *);
235 /* Return the hashcode for a given reference operation P1. */
237 inline hashval_t
238 vn_reference_hasher::hash (const vn_reference_s *vr1)
240 return vr1->hashcode;
243 inline bool
244 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
246 return vn_reference_eq (v, c);
249 inline void
250 vn_reference_hasher::remove (vn_reference_s *v)
252 free_reference (v);
255 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
256 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
259 /* The set of hashtables and alloc_pool's for their items. */
261 typedef struct vn_tables_s
263 vn_nary_op_table_type *nary;
264 vn_phi_table_type *phis;
265 vn_reference_table_type *references;
266 struct obstack nary_obstack;
267 object_allocator<vn_phi_s> *phis_pool;
268 object_allocator<vn_reference_s> *references_pool;
269 } *vn_tables_t;
272 /* vn_constant hashtable helpers. */
274 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
276 static inline hashval_t hash (const vn_constant_s *);
277 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
280 /* Hash table hash function for vn_constant_t. */
282 inline hashval_t
283 vn_constant_hasher::hash (const vn_constant_s *vc1)
285 return vc1->hashcode;
288 /* Hash table equality function for vn_constant_t. */
290 inline bool
291 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
293 if (vc1->hashcode != vc2->hashcode)
294 return false;
296 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
299 static hash_table<vn_constant_hasher> *constant_to_value_id;
300 static bitmap constant_value_ids;
303 /* Valid hashtables storing information we have proven to be
304 correct. */
306 static vn_tables_t valid_info;
308 /* Optimistic hashtables storing information we are making assumptions about
309 during iterations. */
311 static vn_tables_t optimistic_info;
313 /* Pointer to the set of hashtables that is currently being used.
314 Should always point to either the optimistic_info, or the
315 valid_info. */
317 static vn_tables_t current_info;
320 /* Reverse post order index for each basic block. */
322 static int *rpo_numbers;
324 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
326 /* Return the SSA value of the VUSE x, supporting released VDEFs
327 during elimination which will value-number the VDEF to the
328 associated VUSE (but not substitute in the whole lattice). */
330 static inline tree
331 vuse_ssa_val (tree x)
333 if (!x)
334 return NULL_TREE;
338 x = SSA_VAL (x);
340 while (SSA_NAME_IN_FREE_LIST (x));
342 return x;
345 /* This represents the top of the VN lattice, which is the universal
346 value. */
348 tree VN_TOP;
350 /* Unique counter for our value ids. */
352 static unsigned int next_value_id;
354 /* Next DFS number and the stack for strongly connected component
355 detection. */
357 static unsigned int next_dfs_num;
358 static vec<tree> sccstack;
362 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
363 are allocated on an obstack for locality reasons, and to free them
364 without looping over the vec. */
366 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
367 static struct obstack vn_ssa_aux_obstack;
369 /* Return whether there is value numbering information for a given SSA name. */
371 bool
372 has_VN_INFO (tree name)
374 if (SSA_NAME_VERSION (name) < vn_ssa_aux_table.length ())
375 return vn_ssa_aux_table[SSA_NAME_VERSION (name)] != NULL;
376 return false;
379 /* Return the value numbering information for a given SSA name. */
381 vn_ssa_aux_t
382 VN_INFO (tree name)
384 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
385 gcc_checking_assert (res);
386 return res;
389 /* Set the value numbering info for a given SSA name to a given
390 value. */
392 static inline void
393 VN_INFO_SET (tree name, vn_ssa_aux_t value)
395 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
398 /* Initialize the value numbering info for a given SSA name.
399 This should be called just once for every SSA name. */
401 vn_ssa_aux_t
402 VN_INFO_GET (tree name)
404 vn_ssa_aux_t newinfo;
406 gcc_assert (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ()
407 || vn_ssa_aux_table[SSA_NAME_VERSION (name)] == NULL);
408 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
409 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
410 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
411 vn_ssa_aux_table.safe_grow_cleared (SSA_NAME_VERSION (name) + 1);
412 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
413 return newinfo;
417 /* Return the vn_kind the expression computed by the stmt should be
418 associated with. */
420 enum vn_kind
421 vn_get_stmt_kind (gimple *stmt)
423 switch (gimple_code (stmt))
425 case GIMPLE_CALL:
426 return VN_REFERENCE;
427 case GIMPLE_PHI:
428 return VN_PHI;
429 case GIMPLE_ASSIGN:
431 enum tree_code code = gimple_assign_rhs_code (stmt);
432 tree rhs1 = gimple_assign_rhs1 (stmt);
433 switch (get_gimple_rhs_class (code))
435 case GIMPLE_UNARY_RHS:
436 case GIMPLE_BINARY_RHS:
437 case GIMPLE_TERNARY_RHS:
438 return VN_NARY;
439 case GIMPLE_SINGLE_RHS:
440 switch (TREE_CODE_CLASS (code))
442 case tcc_reference:
443 /* VOP-less references can go through unary case. */
444 if ((code == REALPART_EXPR
445 || code == IMAGPART_EXPR
446 || code == VIEW_CONVERT_EXPR
447 || code == BIT_FIELD_REF)
448 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
449 return VN_NARY;
451 /* Fallthrough. */
452 case tcc_declaration:
453 return VN_REFERENCE;
455 case tcc_constant:
456 return VN_CONSTANT;
458 default:
459 if (code == ADDR_EXPR)
460 return (is_gimple_min_invariant (rhs1)
461 ? VN_CONSTANT : VN_REFERENCE);
462 else if (code == CONSTRUCTOR)
463 return VN_NARY;
464 return VN_NONE;
466 default:
467 return VN_NONE;
470 default:
471 return VN_NONE;
475 /* Lookup a value id for CONSTANT and return it. If it does not
476 exist returns 0. */
478 unsigned int
479 get_constant_value_id (tree constant)
481 vn_constant_s **slot;
482 struct vn_constant_s vc;
484 vc.hashcode = vn_hash_constant_with_type (constant);
485 vc.constant = constant;
486 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
487 if (slot)
488 return (*slot)->value_id;
489 return 0;
492 /* Lookup a value id for CONSTANT, and if it does not exist, create a
493 new one and return it. If it does exist, return it. */
495 unsigned int
496 get_or_alloc_constant_value_id (tree constant)
498 vn_constant_s **slot;
499 struct vn_constant_s vc;
500 vn_constant_t vcp;
502 vc.hashcode = vn_hash_constant_with_type (constant);
503 vc.constant = constant;
504 slot = constant_to_value_id->find_slot (&vc, INSERT);
505 if (*slot)
506 return (*slot)->value_id;
508 vcp = XNEW (struct vn_constant_s);
509 vcp->hashcode = vc.hashcode;
510 vcp->constant = constant;
511 vcp->value_id = get_next_value_id ();
512 *slot = vcp;
513 bitmap_set_bit (constant_value_ids, vcp->value_id);
514 return vcp->value_id;
517 /* Return true if V is a value id for a constant. */
519 bool
520 value_id_constant_p (unsigned int v)
522 return bitmap_bit_p (constant_value_ids, v);
525 /* Compute the hash for a reference operand VRO1. */
527 static void
528 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
530 hstate.add_int (vro1->opcode);
531 if (vro1->op0)
532 inchash::add_expr (vro1->op0, hstate);
533 if (vro1->op1)
534 inchash::add_expr (vro1->op1, hstate);
535 if (vro1->op2)
536 inchash::add_expr (vro1->op2, hstate);
539 /* Compute a hash for the reference operation VR1 and return it. */
541 static hashval_t
542 vn_reference_compute_hash (const vn_reference_t vr1)
544 inchash::hash hstate;
545 hashval_t result;
546 int i;
547 vn_reference_op_t vro;
548 HOST_WIDE_INT off = -1;
549 bool deref = false;
551 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
553 if (vro->opcode == MEM_REF)
554 deref = true;
555 else if (vro->opcode != ADDR_EXPR)
556 deref = false;
557 if (vro->off != -1)
559 if (off == -1)
560 off = 0;
561 off += vro->off;
563 else
565 if (off != -1
566 && off != 0)
567 hstate.add_int (off);
568 off = -1;
569 if (deref
570 && vro->opcode == ADDR_EXPR)
572 if (vro->op0)
574 tree op = TREE_OPERAND (vro->op0, 0);
575 hstate.add_int (TREE_CODE (op));
576 inchash::add_expr (op, hstate);
579 else
580 vn_reference_op_compute_hash (vro, hstate);
583 result = hstate.end ();
584 /* ??? We would ICE later if we hash instead of adding that in. */
585 if (vr1->vuse)
586 result += SSA_NAME_VERSION (vr1->vuse);
588 return result;
591 /* Return true if reference operations VR1 and VR2 are equivalent. This
592 means they have the same set of operands and vuses. */
594 bool
595 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
597 unsigned i, j;
599 /* Early out if this is not a hash collision. */
600 if (vr1->hashcode != vr2->hashcode)
601 return false;
603 /* The VOP needs to be the same. */
604 if (vr1->vuse != vr2->vuse)
605 return false;
607 /* If the operands are the same we are done. */
608 if (vr1->operands == vr2->operands)
609 return true;
611 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
612 return false;
614 if (INTEGRAL_TYPE_P (vr1->type)
615 && INTEGRAL_TYPE_P (vr2->type))
617 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
618 return false;
620 else if (INTEGRAL_TYPE_P (vr1->type)
621 && (TYPE_PRECISION (vr1->type)
622 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
623 return false;
624 else if (INTEGRAL_TYPE_P (vr2->type)
625 && (TYPE_PRECISION (vr2->type)
626 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
627 return false;
629 i = 0;
630 j = 0;
633 HOST_WIDE_INT off1 = 0, off2 = 0;
634 vn_reference_op_t vro1, vro2;
635 vn_reference_op_s tem1, tem2;
636 bool deref1 = false, deref2 = false;
637 for (; vr1->operands.iterate (i, &vro1); i++)
639 if (vro1->opcode == MEM_REF)
640 deref1 = true;
641 /* Do not look through a storage order barrier. */
642 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
643 return false;
644 if (vro1->off == -1)
645 break;
646 off1 += vro1->off;
648 for (; vr2->operands.iterate (j, &vro2); j++)
650 if (vro2->opcode == MEM_REF)
651 deref2 = true;
652 /* Do not look through a storage order barrier. */
653 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
654 return false;
655 if (vro2->off == -1)
656 break;
657 off2 += vro2->off;
659 if (off1 != off2)
660 return false;
661 if (deref1 && vro1->opcode == ADDR_EXPR)
663 memset (&tem1, 0, sizeof (tem1));
664 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
665 tem1.type = TREE_TYPE (tem1.op0);
666 tem1.opcode = TREE_CODE (tem1.op0);
667 vro1 = &tem1;
668 deref1 = false;
670 if (deref2 && vro2->opcode == ADDR_EXPR)
672 memset (&tem2, 0, sizeof (tem2));
673 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
674 tem2.type = TREE_TYPE (tem2.op0);
675 tem2.opcode = TREE_CODE (tem2.op0);
676 vro2 = &tem2;
677 deref2 = false;
679 if (deref1 != deref2)
680 return false;
681 if (!vn_reference_op_eq (vro1, vro2))
682 return false;
683 ++j;
684 ++i;
686 while (vr1->operands.length () != i
687 || vr2->operands.length () != j);
689 return true;
692 /* Copy the operations present in load/store REF into RESULT, a vector of
693 vn_reference_op_s's. */
695 static void
696 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
698 if (TREE_CODE (ref) == TARGET_MEM_REF)
700 vn_reference_op_s temp;
702 result->reserve (3);
704 memset (&temp, 0, sizeof (temp));
705 temp.type = TREE_TYPE (ref);
706 temp.opcode = TREE_CODE (ref);
707 temp.op0 = TMR_INDEX (ref);
708 temp.op1 = TMR_STEP (ref);
709 temp.op2 = TMR_OFFSET (ref);
710 temp.off = -1;
711 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
712 temp.base = MR_DEPENDENCE_BASE (ref);
713 result->quick_push (temp);
715 memset (&temp, 0, sizeof (temp));
716 temp.type = NULL_TREE;
717 temp.opcode = ERROR_MARK;
718 temp.op0 = TMR_INDEX2 (ref);
719 temp.off = -1;
720 result->quick_push (temp);
722 memset (&temp, 0, sizeof (temp));
723 temp.type = NULL_TREE;
724 temp.opcode = TREE_CODE (TMR_BASE (ref));
725 temp.op0 = TMR_BASE (ref);
726 temp.off = -1;
727 result->quick_push (temp);
728 return;
731 /* For non-calls, store the information that makes up the address. */
732 tree orig = ref;
733 while (ref)
735 vn_reference_op_s temp;
737 memset (&temp, 0, sizeof (temp));
738 temp.type = TREE_TYPE (ref);
739 temp.opcode = TREE_CODE (ref);
740 temp.off = -1;
742 switch (temp.opcode)
744 case MODIFY_EXPR:
745 temp.op0 = TREE_OPERAND (ref, 1);
746 break;
747 case WITH_SIZE_EXPR:
748 temp.op0 = TREE_OPERAND (ref, 1);
749 temp.off = 0;
750 break;
751 case MEM_REF:
752 /* The base address gets its own vn_reference_op_s structure. */
753 temp.op0 = TREE_OPERAND (ref, 1);
755 offset_int off = mem_ref_offset (ref);
756 if (wi::fits_shwi_p (off))
757 temp.off = off.to_shwi ();
759 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
760 temp.base = MR_DEPENDENCE_BASE (ref);
761 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
762 break;
763 case BIT_FIELD_REF:
764 /* Record bits, position and storage order. */
765 temp.op0 = TREE_OPERAND (ref, 1);
766 temp.op1 = TREE_OPERAND (ref, 2);
767 if (tree_fits_shwi_p (TREE_OPERAND (ref, 2)))
769 HOST_WIDE_INT off = tree_to_shwi (TREE_OPERAND (ref, 2));
770 if (off % BITS_PER_UNIT == 0)
771 temp.off = off / BITS_PER_UNIT;
773 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
774 break;
775 case COMPONENT_REF:
776 /* The field decl is enough to unambiguously specify the field,
777 a matching type is not necessary and a mismatching type
778 is always a spurious difference. */
779 temp.type = NULL_TREE;
780 temp.op0 = TREE_OPERAND (ref, 1);
781 temp.op1 = TREE_OPERAND (ref, 2);
783 tree this_offset = component_ref_field_offset (ref);
784 if (this_offset
785 && TREE_CODE (this_offset) == INTEGER_CST)
787 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
788 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
790 offset_int off
791 = (wi::to_offset (this_offset)
792 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
793 if (wi::fits_shwi_p (off)
794 /* Probibit value-numbering zero offset components
795 of addresses the same before the pass folding
796 __builtin_object_size had a chance to run
797 (checking cfun->after_inlining does the
798 trick here). */
799 && (TREE_CODE (orig) != ADDR_EXPR
800 || off != 0
801 || cfun->after_inlining))
802 temp.off = off.to_shwi ();
806 break;
807 case ARRAY_RANGE_REF:
808 case ARRAY_REF:
810 tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
811 /* Record index as operand. */
812 temp.op0 = TREE_OPERAND (ref, 1);
813 /* Always record lower bounds and element size. */
814 temp.op1 = array_ref_low_bound (ref);
815 /* But record element size in units of the type alignment. */
816 temp.op2 = TREE_OPERAND (ref, 3);
817 temp.align = eltype->type_common.align;
818 if (! temp.op2)
819 temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
820 size_int (TYPE_ALIGN_UNIT (eltype)));
821 if (TREE_CODE (temp.op0) == INTEGER_CST
822 && TREE_CODE (temp.op1) == INTEGER_CST
823 && TREE_CODE (temp.op2) == INTEGER_CST)
825 offset_int off = ((wi::to_offset (temp.op0)
826 - wi::to_offset (temp.op1))
827 * wi::to_offset (temp.op2)
828 * vn_ref_op_align_unit (&temp));
829 if (wi::fits_shwi_p (off))
830 temp.off = off.to_shwi();
833 break;
834 case VAR_DECL:
835 if (DECL_HARD_REGISTER (ref))
837 temp.op0 = ref;
838 break;
840 /* Fallthru. */
841 case PARM_DECL:
842 case CONST_DECL:
843 case RESULT_DECL:
844 /* Canonicalize decls to MEM[&decl] which is what we end up with
845 when valueizing MEM[ptr] with ptr = &decl. */
846 temp.opcode = MEM_REF;
847 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
848 temp.off = 0;
849 result->safe_push (temp);
850 temp.opcode = ADDR_EXPR;
851 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
852 temp.type = TREE_TYPE (temp.op0);
853 temp.off = -1;
854 break;
855 case STRING_CST:
856 case INTEGER_CST:
857 case COMPLEX_CST:
858 case VECTOR_CST:
859 case REAL_CST:
860 case FIXED_CST:
861 case CONSTRUCTOR:
862 case SSA_NAME:
863 temp.op0 = ref;
864 break;
865 case ADDR_EXPR:
866 if (is_gimple_min_invariant (ref))
868 temp.op0 = ref;
869 break;
871 break;
872 /* These are only interesting for their operands, their
873 existence, and their type. They will never be the last
874 ref in the chain of references (IE they require an
875 operand), so we don't have to put anything
876 for op* as it will be handled by the iteration */
877 case REALPART_EXPR:
878 temp.off = 0;
879 break;
880 case VIEW_CONVERT_EXPR:
881 temp.off = 0;
882 temp.reverse = storage_order_barrier_p (ref);
883 break;
884 case IMAGPART_EXPR:
885 /* This is only interesting for its constant offset. */
886 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
887 break;
888 default:
889 gcc_unreachable ();
891 result->safe_push (temp);
893 if (REFERENCE_CLASS_P (ref)
894 || TREE_CODE (ref) == MODIFY_EXPR
895 || TREE_CODE (ref) == WITH_SIZE_EXPR
896 || (TREE_CODE (ref) == ADDR_EXPR
897 && !is_gimple_min_invariant (ref)))
898 ref = TREE_OPERAND (ref, 0);
899 else
900 ref = NULL_TREE;
904 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
905 operands in *OPS, the reference alias set SET and the reference type TYPE.
906 Return true if something useful was produced. */
908 bool
909 ao_ref_init_from_vn_reference (ao_ref *ref,
910 alias_set_type set, tree type,
911 vec<vn_reference_op_s> ops)
913 vn_reference_op_t op;
914 unsigned i;
915 tree base = NULL_TREE;
916 tree *op0_p = &base;
917 offset_int offset = 0;
918 offset_int max_size;
919 offset_int size = -1;
920 tree size_tree = NULL_TREE;
921 alias_set_type base_alias_set = -1;
923 /* First get the final access size from just the outermost expression. */
924 op = &ops[0];
925 if (op->opcode == COMPONENT_REF)
926 size_tree = DECL_SIZE (op->op0);
927 else if (op->opcode == BIT_FIELD_REF)
928 size_tree = op->op0;
929 else
931 machine_mode mode = TYPE_MODE (type);
932 if (mode == BLKmode)
933 size_tree = TYPE_SIZE (type);
934 else
935 size = int (GET_MODE_BITSIZE (mode));
937 if (size_tree != NULL_TREE
938 && TREE_CODE (size_tree) == INTEGER_CST)
939 size = wi::to_offset (size_tree);
941 /* Initially, maxsize is the same as the accessed element size.
942 In the following it will only grow (or become -1). */
943 max_size = size;
945 /* Compute cumulative bit-offset for nested component-refs and array-refs,
946 and find the ultimate containing object. */
947 FOR_EACH_VEC_ELT (ops, i, op)
949 switch (op->opcode)
951 /* These may be in the reference ops, but we cannot do anything
952 sensible with them here. */
953 case ADDR_EXPR:
954 /* Apart from ADDR_EXPR arguments to MEM_REF. */
955 if (base != NULL_TREE
956 && TREE_CODE (base) == MEM_REF
957 && op->op0
958 && DECL_P (TREE_OPERAND (op->op0, 0)))
960 vn_reference_op_t pop = &ops[i-1];
961 base = TREE_OPERAND (op->op0, 0);
962 if (pop->off == -1)
964 max_size = -1;
965 offset = 0;
967 else
968 offset += pop->off * BITS_PER_UNIT;
969 op0_p = NULL;
970 break;
972 /* Fallthru. */
973 case CALL_EXPR:
974 return false;
976 /* Record the base objects. */
977 case MEM_REF:
978 base_alias_set = get_deref_alias_set (op->op0);
979 *op0_p = build2 (MEM_REF, op->type,
980 NULL_TREE, op->op0);
981 MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
982 MR_DEPENDENCE_BASE (*op0_p) = op->base;
983 op0_p = &TREE_OPERAND (*op0_p, 0);
984 break;
986 case VAR_DECL:
987 case PARM_DECL:
988 case RESULT_DECL:
989 case SSA_NAME:
990 *op0_p = op->op0;
991 op0_p = NULL;
992 break;
994 /* And now the usual component-reference style ops. */
995 case BIT_FIELD_REF:
996 offset += wi::to_offset (op->op1);
997 break;
999 case COMPONENT_REF:
1001 tree field = op->op0;
1002 /* We do not have a complete COMPONENT_REF tree here so we
1003 cannot use component_ref_field_offset. Do the interesting
1004 parts manually. */
1005 tree this_offset = DECL_FIELD_OFFSET (field);
1007 if (op->op1 || TREE_CODE (this_offset) != INTEGER_CST)
1008 max_size = -1;
1009 else
1011 offset_int woffset = (wi::to_offset (this_offset)
1012 << LOG2_BITS_PER_UNIT);
1013 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1014 offset += woffset;
1016 break;
1019 case ARRAY_RANGE_REF:
1020 case ARRAY_REF:
1021 /* We recorded the lower bound and the element size. */
1022 if (TREE_CODE (op->op0) != INTEGER_CST
1023 || TREE_CODE (op->op1) != INTEGER_CST
1024 || TREE_CODE (op->op2) != INTEGER_CST)
1025 max_size = -1;
1026 else
1028 offset_int woffset
1029 = wi::sext (wi::to_offset (op->op0) - wi::to_offset (op->op1),
1030 TYPE_PRECISION (TREE_TYPE (op->op0)));
1031 woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
1032 woffset <<= LOG2_BITS_PER_UNIT;
1033 offset += woffset;
1035 break;
1037 case REALPART_EXPR:
1038 break;
1040 case IMAGPART_EXPR:
1041 offset += size;
1042 break;
1044 case VIEW_CONVERT_EXPR:
1045 break;
1047 case STRING_CST:
1048 case INTEGER_CST:
1049 case COMPLEX_CST:
1050 case VECTOR_CST:
1051 case REAL_CST:
1052 case CONSTRUCTOR:
1053 case CONST_DECL:
1054 return false;
1056 default:
1057 return false;
1061 if (base == NULL_TREE)
1062 return false;
1064 ref->ref = NULL_TREE;
1065 ref->base = base;
1066 ref->ref_alias_set = set;
1067 if (base_alias_set != -1)
1068 ref->base_alias_set = base_alias_set;
1069 else
1070 ref->base_alias_set = get_alias_set (base);
1071 /* We discount volatiles from value-numbering elsewhere. */
1072 ref->volatile_p = false;
1074 if (!wi::fits_shwi_p (size) || wi::neg_p (size))
1076 ref->offset = 0;
1077 ref->size = -1;
1078 ref->max_size = -1;
1079 return true;
1082 ref->size = size.to_shwi ();
1084 if (!wi::fits_shwi_p (offset))
1086 ref->offset = 0;
1087 ref->max_size = -1;
1088 return true;
1091 ref->offset = offset.to_shwi ();
1093 if (!wi::fits_shwi_p (max_size) || wi::neg_p (max_size))
1094 ref->max_size = -1;
1095 else
1096 ref->max_size = max_size.to_shwi ();
1098 return true;
1101 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1102 vn_reference_op_s's. */
1104 static void
1105 copy_reference_ops_from_call (gcall *call,
1106 vec<vn_reference_op_s> *result)
1108 vn_reference_op_s temp;
1109 unsigned i;
1110 tree lhs = gimple_call_lhs (call);
1111 int lr;
1113 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1114 different. By adding the lhs here in the vector, we ensure that the
1115 hashcode is different, guaranteeing a different value number. */
1116 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1118 memset (&temp, 0, sizeof (temp));
1119 temp.opcode = MODIFY_EXPR;
1120 temp.type = TREE_TYPE (lhs);
1121 temp.op0 = lhs;
1122 temp.off = -1;
1123 result->safe_push (temp);
1126 /* Copy the type, opcode, function, static chain and EH region, if any. */
1127 memset (&temp, 0, sizeof (temp));
1128 temp.type = gimple_call_return_type (call);
1129 temp.opcode = CALL_EXPR;
1130 temp.op0 = gimple_call_fn (call);
1131 temp.op1 = gimple_call_chain (call);
1132 if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1133 temp.op2 = size_int (lr);
1134 temp.off = -1;
1135 if (gimple_call_with_bounds_p (call))
1136 temp.with_bounds = 1;
1137 result->safe_push (temp);
1139 /* Copy the call arguments. As they can be references as well,
1140 just chain them together. */
1141 for (i = 0; i < gimple_call_num_args (call); ++i)
1143 tree callarg = gimple_call_arg (call, i);
1144 copy_reference_ops_from_ref (callarg, result);
1148 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1149 *I_P to point to the last element of the replacement. */
1150 static bool
1151 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1152 unsigned int *i_p)
1154 unsigned int i = *i_p;
1155 vn_reference_op_t op = &(*ops)[i];
1156 vn_reference_op_t mem_op = &(*ops)[i - 1];
1157 tree addr_base;
1158 HOST_WIDE_INT addr_offset = 0;
1160 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1161 from .foo.bar to the preceding MEM_REF offset and replace the
1162 address with &OBJ. */
1163 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1164 &addr_offset);
1165 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1166 if (addr_base != TREE_OPERAND (op->op0, 0))
1168 offset_int off = offset_int::from (mem_op->op0, SIGNED);
1169 off += addr_offset;
1170 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1171 op->op0 = build_fold_addr_expr (addr_base);
1172 if (tree_fits_shwi_p (mem_op->op0))
1173 mem_op->off = tree_to_shwi (mem_op->op0);
1174 else
1175 mem_op->off = -1;
1176 return true;
1178 return false;
1181 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1182 *I_P to point to the last element of the replacement. */
1183 static bool
1184 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1185 unsigned int *i_p)
1187 unsigned int i = *i_p;
1188 vn_reference_op_t op = &(*ops)[i];
1189 vn_reference_op_t mem_op = &(*ops)[i - 1];
1190 gimple *def_stmt;
1191 enum tree_code code;
1192 offset_int off;
1194 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1195 if (!is_gimple_assign (def_stmt))
1196 return false;
1198 code = gimple_assign_rhs_code (def_stmt);
1199 if (code != ADDR_EXPR
1200 && code != POINTER_PLUS_EXPR)
1201 return false;
1203 off = offset_int::from (mem_op->op0, SIGNED);
1205 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1206 from .foo.bar to the preceding MEM_REF offset and replace the
1207 address with &OBJ. */
1208 if (code == ADDR_EXPR)
1210 tree addr, addr_base;
1211 HOST_WIDE_INT addr_offset;
1213 addr = gimple_assign_rhs1 (def_stmt);
1214 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1215 &addr_offset);
1216 /* If that didn't work because the address isn't invariant propagate
1217 the reference tree from the address operation in case the current
1218 dereference isn't offsetted. */
1219 if (!addr_base
1220 && *i_p == ops->length () - 1
1221 && off == 0
1222 /* This makes us disable this transform for PRE where the
1223 reference ops might be also used for code insertion which
1224 is invalid. */
1225 && default_vn_walk_kind == VN_WALKREWRITE)
1227 auto_vec<vn_reference_op_s, 32> tem;
1228 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1229 /* Make sure to preserve TBAA info. The only objects not
1230 wrapped in MEM_REFs that can have their address taken are
1231 STRING_CSTs. */
1232 if (tem.length () >= 2
1233 && tem[tem.length () - 2].opcode == MEM_REF)
1235 vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1236 new_mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0),
1237 new_mem_op->op0);
1239 else
1240 gcc_assert (tem.last ().opcode == STRING_CST);
1241 ops->pop ();
1242 ops->pop ();
1243 ops->safe_splice (tem);
1244 --*i_p;
1245 return true;
1247 if (!addr_base
1248 || TREE_CODE (addr_base) != MEM_REF)
1249 return false;
1251 off += addr_offset;
1252 off += mem_ref_offset (addr_base);
1253 op->op0 = TREE_OPERAND (addr_base, 0);
1255 else
1257 tree ptr, ptroff;
1258 ptr = gimple_assign_rhs1 (def_stmt);
1259 ptroff = gimple_assign_rhs2 (def_stmt);
1260 if (TREE_CODE (ptr) != SSA_NAME
1261 || TREE_CODE (ptroff) != INTEGER_CST)
1262 return false;
1264 off += wi::to_offset (ptroff);
1265 op->op0 = ptr;
1268 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1269 if (tree_fits_shwi_p (mem_op->op0))
1270 mem_op->off = tree_to_shwi (mem_op->op0);
1271 else
1272 mem_op->off = -1;
1273 if (TREE_CODE (op->op0) == SSA_NAME)
1274 op->op0 = SSA_VAL (op->op0);
1275 if (TREE_CODE (op->op0) != SSA_NAME)
1276 op->opcode = TREE_CODE (op->op0);
1278 /* And recurse. */
1279 if (TREE_CODE (op->op0) == SSA_NAME)
1280 vn_reference_maybe_forwprop_address (ops, i_p);
1281 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1282 vn_reference_fold_indirect (ops, i_p);
1283 return true;
1286 /* Optimize the reference REF to a constant if possible or return
1287 NULL_TREE if not. */
1289 tree
1290 fully_constant_vn_reference_p (vn_reference_t ref)
1292 vec<vn_reference_op_s> operands = ref->operands;
1293 vn_reference_op_t op;
1295 /* Try to simplify the translated expression if it is
1296 a call to a builtin function with at most two arguments. */
1297 op = &operands[0];
1298 if (op->opcode == CALL_EXPR
1299 && TREE_CODE (op->op0) == ADDR_EXPR
1300 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1301 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1302 && operands.length () >= 2
1303 && operands.length () <= 3)
1305 vn_reference_op_t arg0, arg1 = NULL;
1306 bool anyconst = false;
1307 arg0 = &operands[1];
1308 if (operands.length () > 2)
1309 arg1 = &operands[2];
1310 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1311 || (arg0->opcode == ADDR_EXPR
1312 && is_gimple_min_invariant (arg0->op0)))
1313 anyconst = true;
1314 if (arg1
1315 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1316 || (arg1->opcode == ADDR_EXPR
1317 && is_gimple_min_invariant (arg1->op0))))
1318 anyconst = true;
1319 if (anyconst)
1321 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1322 arg1 ? 2 : 1,
1323 arg0->op0,
1324 arg1 ? arg1->op0 : NULL);
1325 if (folded
1326 && TREE_CODE (folded) == NOP_EXPR)
1327 folded = TREE_OPERAND (folded, 0);
1328 if (folded
1329 && is_gimple_min_invariant (folded))
1330 return folded;
1334 /* Simplify reads from constants or constant initializers. */
1335 else if (BITS_PER_UNIT == 8
1336 && is_gimple_reg_type (ref->type)
1337 && (!INTEGRAL_TYPE_P (ref->type)
1338 || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
1340 HOST_WIDE_INT off = 0;
1341 HOST_WIDE_INT size;
1342 if (INTEGRAL_TYPE_P (ref->type))
1343 size = TYPE_PRECISION (ref->type);
1344 else
1345 size = tree_to_shwi (TYPE_SIZE (ref->type));
1346 if (size % BITS_PER_UNIT != 0
1347 || size > MAX_BITSIZE_MODE_ANY_MODE)
1348 return NULL_TREE;
1349 size /= BITS_PER_UNIT;
1350 unsigned i;
1351 for (i = 0; i < operands.length (); ++i)
1353 if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1355 ++i;
1356 break;
1358 if (operands[i].off == -1)
1359 return NULL_TREE;
1360 off += operands[i].off;
1361 if (operands[i].opcode == MEM_REF)
1363 ++i;
1364 break;
1367 vn_reference_op_t base = &operands[--i];
1368 tree ctor = error_mark_node;
1369 tree decl = NULL_TREE;
1370 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1371 ctor = base->op0;
1372 else if (base->opcode == MEM_REF
1373 && base[1].opcode == ADDR_EXPR
1374 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1375 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL))
1377 decl = TREE_OPERAND (base[1].op0, 0);
1378 ctor = ctor_for_folding (decl);
1380 if (ctor == NULL_TREE)
1381 return build_zero_cst (ref->type);
1382 else if (ctor != error_mark_node)
1384 if (decl)
1386 tree res = fold_ctor_reference (ref->type, ctor,
1387 off * BITS_PER_UNIT,
1388 size * BITS_PER_UNIT, decl);
1389 if (res)
1391 STRIP_USELESS_TYPE_CONVERSION (res);
1392 if (is_gimple_min_invariant (res))
1393 return res;
1396 else
1398 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1399 int len = native_encode_expr (ctor, buf, size, off);
1400 if (len > 0)
1401 return native_interpret_expr (ref->type, buf, len);
1406 return NULL_TREE;
1409 /* Return true if OPS contain a storage order barrier. */
1411 static bool
1412 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1414 vn_reference_op_t op;
1415 unsigned i;
1417 FOR_EACH_VEC_ELT (ops, i, op)
1418 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1419 return true;
1421 return false;
1424 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1425 structures into their value numbers. This is done in-place, and
1426 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1427 whether any operands were valueized. */
1429 static vec<vn_reference_op_s>
1430 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1432 vn_reference_op_t vro;
1433 unsigned int i;
1435 *valueized_anything = false;
1437 FOR_EACH_VEC_ELT (orig, i, vro)
1439 if (vro->opcode == SSA_NAME
1440 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1442 tree tem = SSA_VAL (vro->op0);
1443 if (tem != vro->op0)
1445 *valueized_anything = true;
1446 vro->op0 = tem;
1448 /* If it transforms from an SSA_NAME to a constant, update
1449 the opcode. */
1450 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1451 vro->opcode = TREE_CODE (vro->op0);
1453 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1455 tree tem = SSA_VAL (vro->op1);
1456 if (tem != vro->op1)
1458 *valueized_anything = true;
1459 vro->op1 = tem;
1462 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1464 tree tem = SSA_VAL (vro->op2);
1465 if (tem != vro->op2)
1467 *valueized_anything = true;
1468 vro->op2 = tem;
1471 /* If it transforms from an SSA_NAME to an address, fold with
1472 a preceding indirect reference. */
1473 if (i > 0
1474 && vro->op0
1475 && TREE_CODE (vro->op0) == ADDR_EXPR
1476 && orig[i - 1].opcode == MEM_REF)
1478 if (vn_reference_fold_indirect (&orig, &i))
1479 *valueized_anything = true;
1481 else if (i > 0
1482 && vro->opcode == SSA_NAME
1483 && orig[i - 1].opcode == MEM_REF)
1485 if (vn_reference_maybe_forwprop_address (&orig, &i))
1486 *valueized_anything = true;
1488 /* If it transforms a non-constant ARRAY_REF into a constant
1489 one, adjust the constant offset. */
1490 else if (vro->opcode == ARRAY_REF
1491 && vro->off == -1
1492 && TREE_CODE (vro->op0) == INTEGER_CST
1493 && TREE_CODE (vro->op1) == INTEGER_CST
1494 && TREE_CODE (vro->op2) == INTEGER_CST)
1496 offset_int off = ((wi::to_offset (vro->op0)
1497 - wi::to_offset (vro->op1))
1498 * wi::to_offset (vro->op2)
1499 * vn_ref_op_align_unit (vro));
1500 if (wi::fits_shwi_p (off))
1501 vro->off = off.to_shwi ();
1505 return orig;
1508 static vec<vn_reference_op_s>
1509 valueize_refs (vec<vn_reference_op_s> orig)
1511 bool tem;
1512 return valueize_refs_1 (orig, &tem);
1515 static vec<vn_reference_op_s> shared_lookup_references;
1517 /* Create a vector of vn_reference_op_s structures from REF, a
1518 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1519 this function. *VALUEIZED_ANYTHING will specify whether any
1520 operands were valueized. */
1522 static vec<vn_reference_op_s>
1523 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1525 if (!ref)
1526 return vNULL;
1527 shared_lookup_references.truncate (0);
1528 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1529 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1530 valueized_anything);
1531 return shared_lookup_references;
1534 /* Create a vector of vn_reference_op_s structures from CALL, a
1535 call statement. The vector is shared among all callers of
1536 this function. */
1538 static vec<vn_reference_op_s>
1539 valueize_shared_reference_ops_from_call (gcall *call)
1541 if (!call)
1542 return vNULL;
1543 shared_lookup_references.truncate (0);
1544 copy_reference_ops_from_call (call, &shared_lookup_references);
1545 shared_lookup_references = valueize_refs (shared_lookup_references);
1546 return shared_lookup_references;
1549 /* Lookup a SCCVN reference operation VR in the current hash table.
1550 Returns the resulting value number if it exists in the hash table,
1551 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1552 vn_reference_t stored in the hashtable if something is found. */
1554 static tree
1555 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1557 vn_reference_s **slot;
1558 hashval_t hash;
1560 hash = vr->hashcode;
1561 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1562 if (!slot && current_info == optimistic_info)
1563 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1564 if (slot)
1566 if (vnresult)
1567 *vnresult = (vn_reference_t)*slot;
1568 return ((vn_reference_t)*slot)->result;
1571 return NULL_TREE;
1574 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1575 with the current VUSE and performs the expression lookup. */
1577 static void *
1578 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1579 unsigned int cnt, void *vr_)
1581 vn_reference_t vr = (vn_reference_t)vr_;
1582 vn_reference_s **slot;
1583 hashval_t hash;
1585 /* This bounds the stmt walks we perform on reference lookups
1586 to O(1) instead of O(N) where N is the number of dominating
1587 stores. */
1588 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1589 return (void *)-1;
1591 if (last_vuse_ptr)
1592 *last_vuse_ptr = vuse;
1594 /* Fixup vuse and hash. */
1595 if (vr->vuse)
1596 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1597 vr->vuse = vuse_ssa_val (vuse);
1598 if (vr->vuse)
1599 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1601 hash = vr->hashcode;
1602 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1603 if (!slot && current_info == optimistic_info)
1604 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1605 if (slot)
1606 return *slot;
1608 return NULL;
1611 /* Lookup an existing or insert a new vn_reference entry into the
1612 value table for the VUSE, SET, TYPE, OPERANDS reference which
1613 has the value VALUE which is either a constant or an SSA name. */
1615 static vn_reference_t
1616 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1617 alias_set_type set,
1618 tree type,
1619 vec<vn_reference_op_s,
1620 va_heap> operands,
1621 tree value)
1623 vn_reference_s vr1;
1624 vn_reference_t result;
1625 unsigned value_id;
1626 vr1.vuse = vuse;
1627 vr1.operands = operands;
1628 vr1.type = type;
1629 vr1.set = set;
1630 vr1.hashcode = vn_reference_compute_hash (&vr1);
1631 if (vn_reference_lookup_1 (&vr1, &result))
1632 return result;
1633 if (TREE_CODE (value) == SSA_NAME)
1634 value_id = VN_INFO (value)->value_id;
1635 else
1636 value_id = get_or_alloc_constant_value_id (value);
1637 return vn_reference_insert_pieces (vuse, set, type,
1638 operands.copy (), value, value_id);
1641 static vn_nary_op_t vn_nary_op_insert_stmt (gimple *stmt, tree result);
1642 static unsigned mprts_hook_cnt;
1644 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
1646 static tree
1647 vn_lookup_simplify_result (code_helper rcode, tree type, tree *ops)
1649 if (!rcode.is_tree_code ())
1650 return NULL_TREE;
1651 vn_nary_op_t vnresult = NULL;
1652 tree res = vn_nary_op_lookup_pieces (TREE_CODE_LENGTH ((tree_code) rcode),
1653 (tree_code) rcode, type, ops, &vnresult);
1654 /* We can end up endlessly recursing simplifications if the lookup above
1655 presents us with a def-use chain that mirrors the original simplification.
1656 See PR80887 for an example. Limit successful lookup artificially
1657 to 10 times if we are called as mprts_hook. */
1658 if (res
1659 && mprts_hook
1660 && --mprts_hook_cnt == 0)
1662 if (dump_file && (dump_flags & TDF_DETAILS))
1663 fprintf (dump_file, "Resetting mprts_hook after too many "
1664 "invocations.\n");
1665 mprts_hook = NULL;
1667 return res;
1670 /* Return a value-number for RCODE OPS... either by looking up an existing
1671 value-number for the simplified result or by inserting the operation if
1672 INSERT is true. */
1674 static tree
1675 vn_nary_build_or_lookup_1 (code_helper rcode, tree type, tree *ops,
1676 bool insert)
1678 tree result = NULL_TREE;
1679 /* We will be creating a value number for
1680 RCODE (OPS...).
1681 So first simplify and lookup this expression to see if it
1682 is already available. */
1683 mprts_hook = vn_lookup_simplify_result;
1684 mprts_hook_cnt = 9;
1685 bool res = false;
1686 switch (TREE_CODE_LENGTH ((tree_code) rcode))
1688 case 1:
1689 res = gimple_resimplify1 (NULL, &rcode, type, ops, vn_valueize);
1690 break;
1691 case 2:
1692 res = gimple_resimplify2 (NULL, &rcode, type, ops, vn_valueize);
1693 break;
1694 case 3:
1695 res = gimple_resimplify3 (NULL, &rcode, type, ops, vn_valueize);
1696 break;
1698 mprts_hook = NULL;
1699 gimple *new_stmt = NULL;
1700 if (res
1701 && gimple_simplified_result_is_gimple_val (rcode, ops))
1702 /* The expression is already available. */
1703 result = ops[0];
1704 else
1706 tree val = vn_lookup_simplify_result (rcode, type, ops);
1707 if (!val && insert)
1709 gimple_seq stmts = NULL;
1710 result = maybe_push_res_to_seq (rcode, type, ops, &stmts);
1711 if (result)
1713 gcc_assert (gimple_seq_singleton_p (stmts));
1714 new_stmt = gimple_seq_first_stmt (stmts);
1717 else
1718 /* The expression is already available. */
1719 result = val;
1721 if (new_stmt)
1723 /* The expression is not yet available, value-number lhs to
1724 the new SSA_NAME we created. */
1725 /* Initialize value-number information properly. */
1726 VN_INFO_GET (result)->valnum = result;
1727 VN_INFO (result)->value_id = get_next_value_id ();
1728 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
1729 new_stmt);
1730 VN_INFO (result)->needs_insertion = true;
1731 /* ??? PRE phi-translation inserts NARYs without corresponding
1732 SSA name result. Re-use those but set their result according
1733 to the stmt we just built. */
1734 vn_nary_op_t nary = NULL;
1735 vn_nary_op_lookup_stmt (new_stmt, &nary);
1736 if (nary)
1738 gcc_assert (nary->result == NULL_TREE);
1739 nary->result = gimple_assign_lhs (new_stmt);
1741 /* As all "inserted" statements are singleton SCCs, insert
1742 to the valid table. This is strictly needed to
1743 avoid re-generating new value SSA_NAMEs for the same
1744 expression during SCC iteration over and over (the
1745 optimistic table gets cleared after each iteration).
1746 We do not need to insert into the optimistic table, as
1747 lookups there will fall back to the valid table. */
1748 else if (current_info == optimistic_info)
1750 current_info = valid_info;
1751 vn_nary_op_insert_stmt (new_stmt, result);
1752 current_info = optimistic_info;
1754 else
1755 vn_nary_op_insert_stmt (new_stmt, result);
1756 if (dump_file && (dump_flags & TDF_DETAILS))
1758 fprintf (dump_file, "Inserting name ");
1759 print_generic_expr (dump_file, result);
1760 fprintf (dump_file, " for expression ");
1761 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
1762 fprintf (dump_file, "\n");
1765 return result;
1768 /* Return a value-number for RCODE OPS... either by looking up an existing
1769 value-number for the simplified result or by inserting the operation. */
1771 static tree
1772 vn_nary_build_or_lookup (code_helper rcode, tree type, tree *ops)
1774 return vn_nary_build_or_lookup_1 (rcode, type, ops, true);
1777 /* Try to simplify the expression RCODE OPS... of type TYPE and return
1778 its value if present. */
1780 tree
1781 vn_nary_simplify (vn_nary_op_t nary)
1783 if (nary->length > 3)
1784 return NULL_TREE;
1785 tree ops[3];
1786 memcpy (ops, nary->op, sizeof (tree) * nary->length);
1787 return vn_nary_build_or_lookup_1 (nary->opcode, nary->type, ops, false);
1791 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1792 from the statement defining VUSE and if not successful tries to
1793 translate *REFP and VR_ through an aggregate copy at the definition
1794 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
1795 of *REF and *VR. If only disambiguation was performed then
1796 *DISAMBIGUATE_ONLY is set to true. */
1798 static void *
1799 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1800 bool *disambiguate_only)
1802 vn_reference_t vr = (vn_reference_t)vr_;
1803 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
1804 tree base = ao_ref_base (ref);
1805 HOST_WIDE_INT offset, maxsize;
1806 static vec<vn_reference_op_s> lhs_ops;
1807 ao_ref lhs_ref;
1808 bool lhs_ref_ok = false;
1810 /* If the reference is based on a parameter that was determined as
1811 pointing to readonly memory it doesn't change. */
1812 if (TREE_CODE (base) == MEM_REF
1813 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
1814 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
1815 && bitmap_bit_p (const_parms,
1816 SSA_NAME_VERSION (TREE_OPERAND (base, 0))))
1818 *disambiguate_only = true;
1819 return NULL;
1822 /* First try to disambiguate after value-replacing in the definitions LHS. */
1823 if (is_gimple_assign (def_stmt))
1825 tree lhs = gimple_assign_lhs (def_stmt);
1826 bool valueized_anything = false;
1827 /* Avoid re-allocation overhead. */
1828 lhs_ops.truncate (0);
1829 copy_reference_ops_from_ref (lhs, &lhs_ops);
1830 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1831 if (valueized_anything)
1833 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1834 get_alias_set (lhs),
1835 TREE_TYPE (lhs), lhs_ops);
1836 if (lhs_ref_ok
1837 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1839 *disambiguate_only = true;
1840 return NULL;
1843 else
1845 ao_ref_init (&lhs_ref, lhs);
1846 lhs_ref_ok = true;
1849 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1850 && gimple_call_num_args (def_stmt) <= 4)
1852 /* For builtin calls valueize its arguments and call the
1853 alias oracle again. Valueization may improve points-to
1854 info of pointers and constify size and position arguments.
1855 Originally this was motivated by PR61034 which has
1856 conditional calls to free falsely clobbering ref because
1857 of imprecise points-to info of the argument. */
1858 tree oldargs[4];
1859 bool valueized_anything = false;
1860 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1862 oldargs[i] = gimple_call_arg (def_stmt, i);
1863 if (TREE_CODE (oldargs[i]) == SSA_NAME
1864 && VN_INFO (oldargs[i])->valnum != oldargs[i])
1866 gimple_call_set_arg (def_stmt, i, VN_INFO (oldargs[i])->valnum);
1867 valueized_anything = true;
1870 if (valueized_anything)
1872 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1873 ref);
1874 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1875 gimple_call_set_arg (def_stmt, i, oldargs[i]);
1876 if (!res)
1878 *disambiguate_only = true;
1879 return NULL;
1884 if (*disambiguate_only)
1885 return (void *)-1;
1887 offset = ref->offset;
1888 maxsize = ref->max_size;
1890 /* If we cannot constrain the size of the reference we cannot
1891 test if anything kills it. */
1892 if (maxsize == -1)
1893 return (void *)-1;
1895 /* We can't deduce anything useful from clobbers. */
1896 if (gimple_clobber_p (def_stmt))
1897 return (void *)-1;
1899 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1900 from that definition.
1901 1) Memset. */
1902 if (is_gimple_reg_type (vr->type)
1903 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1904 && integer_zerop (gimple_call_arg (def_stmt, 1))
1905 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2))
1906 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1908 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1909 tree base2;
1910 HOST_WIDE_INT offset2, size2, maxsize2;
1911 bool reverse;
1912 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
1913 &reverse);
1914 size2 = tree_to_uhwi (gimple_call_arg (def_stmt, 2)) * 8;
1915 if ((unsigned HOST_WIDE_INT)size2 / 8
1916 == tree_to_uhwi (gimple_call_arg (def_stmt, 2))
1917 && maxsize2 != -1
1918 && operand_equal_p (base, base2, 0)
1919 && offset2 <= offset
1920 && offset2 + size2 >= offset + maxsize)
1922 tree val = build_zero_cst (vr->type);
1923 return vn_reference_lookup_or_insert_for_pieces
1924 (vuse, vr->set, vr->type, vr->operands, val);
1928 /* 2) Assignment from an empty CONSTRUCTOR. */
1929 else if (is_gimple_reg_type (vr->type)
1930 && gimple_assign_single_p (def_stmt)
1931 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1932 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1934 tree base2;
1935 HOST_WIDE_INT offset2, size2, maxsize2;
1936 bool reverse;
1937 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1938 &offset2, &size2, &maxsize2, &reverse);
1939 if (maxsize2 != -1
1940 && operand_equal_p (base, base2, 0)
1941 && offset2 <= offset
1942 && offset2 + size2 >= offset + maxsize)
1944 tree val = build_zero_cst (vr->type);
1945 return vn_reference_lookup_or_insert_for_pieces
1946 (vuse, vr->set, vr->type, vr->operands, val);
1950 /* 3) Assignment from a constant. We can use folds native encode/interpret
1951 routines to extract the assigned bits. */
1952 else if (ref->size == maxsize
1953 && is_gimple_reg_type (vr->type)
1954 && !contains_storage_order_barrier_p (vr->operands)
1955 && gimple_assign_single_p (def_stmt)
1956 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1957 && maxsize % BITS_PER_UNIT == 0
1958 && offset % BITS_PER_UNIT == 0
1959 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
1960 || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
1961 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
1963 tree base2;
1964 HOST_WIDE_INT offset2, size2, maxsize2;
1965 bool reverse;
1966 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1967 &offset2, &size2, &maxsize2, &reverse);
1968 if (!reverse
1969 && maxsize2 != -1
1970 && maxsize2 == size2
1971 && size2 % BITS_PER_UNIT == 0
1972 && offset2 % BITS_PER_UNIT == 0
1973 && operand_equal_p (base, base2, 0)
1974 && offset2 <= offset
1975 && offset2 + size2 >= offset + maxsize)
1977 /* We support up to 512-bit values (for V8DFmode). */
1978 unsigned char buffer[64];
1979 int len;
1981 tree rhs = gimple_assign_rhs1 (def_stmt);
1982 if (TREE_CODE (rhs) == SSA_NAME)
1983 rhs = SSA_VAL (rhs);
1984 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1985 buffer, sizeof (buffer));
1986 if (len > 0)
1988 tree type = vr->type;
1989 /* Make sure to interpret in a type that has a range
1990 covering the whole access size. */
1991 if (INTEGRAL_TYPE_P (vr->type)
1992 && ref->size != TYPE_PRECISION (vr->type))
1993 type = build_nonstandard_integer_type (ref->size,
1994 TYPE_UNSIGNED (type));
1995 tree val = native_interpret_expr (type,
1996 buffer
1997 + ((offset - offset2)
1998 / BITS_PER_UNIT),
1999 ref->size / BITS_PER_UNIT);
2000 /* If we chop off bits because the types precision doesn't
2001 match the memory access size this is ok when optimizing
2002 reads but not when called from the DSE code during
2003 elimination. */
2004 if (val
2005 && type != vr->type)
2007 if (! int_fits_type_p (val, vr->type))
2008 val = NULL_TREE;
2009 else
2010 val = fold_convert (vr->type, val);
2013 if (val)
2014 return vn_reference_lookup_or_insert_for_pieces
2015 (vuse, vr->set, vr->type, vr->operands, val);
2020 /* 4) Assignment from an SSA name which definition we may be able
2021 to access pieces from. */
2022 else if (ref->size == maxsize
2023 && is_gimple_reg_type (vr->type)
2024 && !contains_storage_order_barrier_p (vr->operands)
2025 && gimple_assign_single_p (def_stmt)
2026 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
2028 tree base2;
2029 HOST_WIDE_INT offset2, size2, maxsize2;
2030 bool reverse;
2031 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2032 &offset2, &size2, &maxsize2,
2033 &reverse);
2034 if (!reverse
2035 && maxsize2 != -1
2036 && maxsize2 == size2
2037 && operand_equal_p (base, base2, 0)
2038 && offset2 <= offset
2039 && offset2 + size2 >= offset + maxsize
2040 /* ??? We can't handle bitfield precision extracts without
2041 either using an alternate type for the BIT_FIELD_REF and
2042 then doing a conversion or possibly adjusting the offset
2043 according to endianness. */
2044 && (! INTEGRAL_TYPE_P (vr->type)
2045 || ref->size == TYPE_PRECISION (vr->type))
2046 && ref->size % BITS_PER_UNIT == 0)
2048 code_helper rcode = BIT_FIELD_REF;
2049 tree ops[3];
2050 ops[0] = SSA_VAL (gimple_assign_rhs1 (def_stmt));
2051 ops[1] = bitsize_int (ref->size);
2052 ops[2] = bitsize_int (offset - offset2);
2053 tree val = vn_nary_build_or_lookup (rcode, vr->type, ops);
2054 if (val
2055 && (TREE_CODE (val) != SSA_NAME
2056 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2058 vn_reference_t res = vn_reference_lookup_or_insert_for_pieces
2059 (vuse, vr->set, vr->type, vr->operands, val);
2060 return res;
2065 /* 5) For aggregate copies translate the reference through them if
2066 the copy kills ref. */
2067 else if (vn_walk_kind == VN_WALKREWRITE
2068 && gimple_assign_single_p (def_stmt)
2069 && (DECL_P (gimple_assign_rhs1 (def_stmt))
2070 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
2071 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
2073 tree base2;
2074 HOST_WIDE_INT maxsize2;
2075 int i, j, k;
2076 auto_vec<vn_reference_op_s> rhs;
2077 vn_reference_op_t vro;
2078 ao_ref r;
2080 if (!lhs_ref_ok)
2081 return (void *)-1;
2083 /* See if the assignment kills REF. */
2084 base2 = ao_ref_base (&lhs_ref);
2085 maxsize2 = lhs_ref.max_size;
2086 if (maxsize2 == -1
2087 || (base != base2
2088 && (TREE_CODE (base) != MEM_REF
2089 || TREE_CODE (base2) != MEM_REF
2090 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
2091 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
2092 TREE_OPERAND (base2, 1))))
2093 || !stmt_kills_ref_p (def_stmt, ref))
2094 return (void *)-1;
2096 /* Find the common base of ref and the lhs. lhs_ops already
2097 contains valueized operands for the lhs. */
2098 i = vr->operands.length () - 1;
2099 j = lhs_ops.length () - 1;
2100 while (j >= 0 && i >= 0
2101 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
2103 i--;
2104 j--;
2107 /* ??? The innermost op should always be a MEM_REF and we already
2108 checked that the assignment to the lhs kills vr. Thus for
2109 aggregate copies using char[] types the vn_reference_op_eq
2110 may fail when comparing types for compatibility. But we really
2111 don't care here - further lookups with the rewritten operands
2112 will simply fail if we messed up types too badly. */
2113 HOST_WIDE_INT extra_off = 0;
2114 if (j == 0 && i >= 0
2115 && lhs_ops[0].opcode == MEM_REF
2116 && lhs_ops[0].off != -1)
2118 if (lhs_ops[0].off == vr->operands[i].off)
2119 i--, j--;
2120 else if (vr->operands[i].opcode == MEM_REF
2121 && vr->operands[i].off != -1)
2123 extra_off = vr->operands[i].off - lhs_ops[0].off;
2124 i--, j--;
2128 /* i now points to the first additional op.
2129 ??? LHS may not be completely contained in VR, one or more
2130 VIEW_CONVERT_EXPRs could be in its way. We could at least
2131 try handling outermost VIEW_CONVERT_EXPRs. */
2132 if (j != -1)
2133 return (void *)-1;
2135 /* Punt if the additional ops contain a storage order barrier. */
2136 for (k = i; k >= 0; k--)
2138 vro = &vr->operands[k];
2139 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
2140 return (void *)-1;
2143 /* Now re-write REF to be based on the rhs of the assignment. */
2144 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
2146 /* Apply an extra offset to the inner MEM_REF of the RHS. */
2147 if (extra_off != 0)
2149 if (rhs.length () < 2
2150 || rhs[0].opcode != MEM_REF
2151 || rhs[0].off == -1)
2152 return (void *)-1;
2153 rhs[0].off += extra_off;
2154 rhs[0].op0 = int_const_binop (PLUS_EXPR, rhs[0].op0,
2155 build_int_cst (TREE_TYPE (rhs[0].op0),
2156 extra_off));
2159 /* We need to pre-pend vr->operands[0..i] to rhs. */
2160 vec<vn_reference_op_s> old = vr->operands;
2161 if (i + 1 + rhs.length () > vr->operands.length ())
2162 vr->operands.safe_grow (i + 1 + rhs.length ());
2163 else
2164 vr->operands.truncate (i + 1 + rhs.length ());
2165 FOR_EACH_VEC_ELT (rhs, j, vro)
2166 vr->operands[i + 1 + j] = *vro;
2167 vr->operands = valueize_refs (vr->operands);
2168 if (old == shared_lookup_references)
2169 shared_lookup_references = vr->operands;
2170 vr->hashcode = vn_reference_compute_hash (vr);
2172 /* Try folding the new reference to a constant. */
2173 tree val = fully_constant_vn_reference_p (vr);
2174 if (val)
2175 return vn_reference_lookup_or_insert_for_pieces
2176 (vuse, vr->set, vr->type, vr->operands, val);
2178 /* Adjust *ref from the new operands. */
2179 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2180 return (void *)-1;
2181 /* This can happen with bitfields. */
2182 if (ref->size != r.size)
2183 return (void *)-1;
2184 *ref = r;
2186 /* Do not update last seen VUSE after translating. */
2187 last_vuse_ptr = NULL;
2189 /* Keep looking for the adjusted *REF / VR pair. */
2190 return NULL;
2193 /* 6) For memcpy copies translate the reference through them if
2194 the copy kills ref. */
2195 else if (vn_walk_kind == VN_WALKREWRITE
2196 && is_gimple_reg_type (vr->type)
2197 /* ??? Handle BCOPY as well. */
2198 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2199 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2200 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2201 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2202 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2203 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2204 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2205 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2)))
2207 tree lhs, rhs;
2208 ao_ref r;
2209 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
2210 vn_reference_op_s op;
2211 HOST_WIDE_INT at;
2213 /* Only handle non-variable, addressable refs. */
2214 if (ref->size != maxsize
2215 || offset % BITS_PER_UNIT != 0
2216 || ref->size % BITS_PER_UNIT != 0)
2217 return (void *)-1;
2219 /* Extract a pointer base and an offset for the destination. */
2220 lhs = gimple_call_arg (def_stmt, 0);
2221 lhs_offset = 0;
2222 if (TREE_CODE (lhs) == SSA_NAME)
2224 lhs = SSA_VAL (lhs);
2225 if (TREE_CODE (lhs) == SSA_NAME)
2227 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
2228 if (gimple_assign_single_p (def_stmt)
2229 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2230 lhs = gimple_assign_rhs1 (def_stmt);
2233 if (TREE_CODE (lhs) == ADDR_EXPR)
2235 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2236 &lhs_offset);
2237 if (!tem)
2238 return (void *)-1;
2239 if (TREE_CODE (tem) == MEM_REF
2240 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2242 lhs = TREE_OPERAND (tem, 0);
2243 if (TREE_CODE (lhs) == SSA_NAME)
2244 lhs = SSA_VAL (lhs);
2245 lhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2247 else if (DECL_P (tem))
2248 lhs = build_fold_addr_expr (tem);
2249 else
2250 return (void *)-1;
2252 if (TREE_CODE (lhs) != SSA_NAME
2253 && TREE_CODE (lhs) != ADDR_EXPR)
2254 return (void *)-1;
2256 /* Extract a pointer base and an offset for the source. */
2257 rhs = gimple_call_arg (def_stmt, 1);
2258 rhs_offset = 0;
2259 if (TREE_CODE (rhs) == SSA_NAME)
2260 rhs = SSA_VAL (rhs);
2261 if (TREE_CODE (rhs) == ADDR_EXPR)
2263 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2264 &rhs_offset);
2265 if (!tem)
2266 return (void *)-1;
2267 if (TREE_CODE (tem) == MEM_REF
2268 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2270 rhs = TREE_OPERAND (tem, 0);
2271 rhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2273 else if (DECL_P (tem))
2274 rhs = build_fold_addr_expr (tem);
2275 else
2276 return (void *)-1;
2278 if (TREE_CODE (rhs) != SSA_NAME
2279 && TREE_CODE (rhs) != ADDR_EXPR)
2280 return (void *)-1;
2282 copy_size = tree_to_uhwi (gimple_call_arg (def_stmt, 2));
2284 /* The bases of the destination and the references have to agree. */
2285 if ((TREE_CODE (base) != MEM_REF
2286 && !DECL_P (base))
2287 || (TREE_CODE (base) == MEM_REF
2288 && (TREE_OPERAND (base, 0) != lhs
2289 || !tree_fits_uhwi_p (TREE_OPERAND (base, 1))))
2290 || (DECL_P (base)
2291 && (TREE_CODE (lhs) != ADDR_EXPR
2292 || TREE_OPERAND (lhs, 0) != base)))
2293 return (void *)-1;
2295 at = offset / BITS_PER_UNIT;
2296 if (TREE_CODE (base) == MEM_REF)
2297 at += tree_to_uhwi (TREE_OPERAND (base, 1));
2298 /* If the access is completely outside of the memcpy destination
2299 area there is no aliasing. */
2300 if (lhs_offset >= at + maxsize / BITS_PER_UNIT
2301 || lhs_offset + copy_size <= at)
2302 return NULL;
2303 /* And the access has to be contained within the memcpy destination. */
2304 if (lhs_offset > at
2305 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
2306 return (void *)-1;
2308 /* Make room for 2 operands in the new reference. */
2309 if (vr->operands.length () < 2)
2311 vec<vn_reference_op_s> old = vr->operands;
2312 vr->operands.safe_grow_cleared (2);
2313 if (old == shared_lookup_references)
2314 shared_lookup_references = vr->operands;
2316 else
2317 vr->operands.truncate (2);
2319 /* The looked-through reference is a simple MEM_REF. */
2320 memset (&op, 0, sizeof (op));
2321 op.type = vr->type;
2322 op.opcode = MEM_REF;
2323 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
2324 op.off = at - lhs_offset + rhs_offset;
2325 vr->operands[0] = op;
2326 op.type = TREE_TYPE (rhs);
2327 op.opcode = TREE_CODE (rhs);
2328 op.op0 = rhs;
2329 op.off = -1;
2330 vr->operands[1] = op;
2331 vr->hashcode = vn_reference_compute_hash (vr);
2333 /* Try folding the new reference to a constant. */
2334 tree val = fully_constant_vn_reference_p (vr);
2335 if (val)
2336 return vn_reference_lookup_or_insert_for_pieces
2337 (vuse, vr->set, vr->type, vr->operands, val);
2339 /* Adjust *ref from the new operands. */
2340 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2341 return (void *)-1;
2342 /* This can happen with bitfields. */
2343 if (ref->size != r.size)
2344 return (void *)-1;
2345 *ref = r;
2347 /* Do not update last seen VUSE after translating. */
2348 last_vuse_ptr = NULL;
2350 /* Keep looking for the adjusted *REF / VR pair. */
2351 return NULL;
2354 /* Bail out and stop walking. */
2355 return (void *)-1;
2358 /* Return a reference op vector from OP that can be used for
2359 vn_reference_lookup_pieces. The caller is responsible for releasing
2360 the vector. */
2362 vec<vn_reference_op_s>
2363 vn_reference_operands_for_lookup (tree op)
2365 bool valueized;
2366 return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
2369 /* Lookup a reference operation by it's parts, in the current hash table.
2370 Returns the resulting value number if it exists in the hash table,
2371 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2372 vn_reference_t stored in the hashtable if something is found. */
2374 tree
2375 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2376 vec<vn_reference_op_s> operands,
2377 vn_reference_t *vnresult, vn_lookup_kind kind)
2379 struct vn_reference_s vr1;
2380 vn_reference_t tmp;
2381 tree cst;
2383 if (!vnresult)
2384 vnresult = &tmp;
2385 *vnresult = NULL;
2387 vr1.vuse = vuse_ssa_val (vuse);
2388 shared_lookup_references.truncate (0);
2389 shared_lookup_references.safe_grow (operands.length ());
2390 memcpy (shared_lookup_references.address (),
2391 operands.address (),
2392 sizeof (vn_reference_op_s)
2393 * operands.length ());
2394 vr1.operands = operands = shared_lookup_references
2395 = valueize_refs (shared_lookup_references);
2396 vr1.type = type;
2397 vr1.set = set;
2398 vr1.hashcode = vn_reference_compute_hash (&vr1);
2399 if ((cst = fully_constant_vn_reference_p (&vr1)))
2400 return cst;
2402 vn_reference_lookup_1 (&vr1, vnresult);
2403 if (!*vnresult
2404 && kind != VN_NOWALK
2405 && vr1.vuse)
2407 ao_ref r;
2408 vn_walk_kind = kind;
2409 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2410 *vnresult =
2411 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2412 vn_reference_lookup_2,
2413 vn_reference_lookup_3,
2414 vuse_ssa_val, &vr1);
2415 gcc_checking_assert (vr1.operands == shared_lookup_references);
2418 if (*vnresult)
2419 return (*vnresult)->result;
2421 return NULL_TREE;
2424 /* Lookup OP in the current hash table, and return the resulting value
2425 number if it exists in the hash table. Return NULL_TREE if it does
2426 not exist in the hash table or if the result field of the structure
2427 was NULL.. VNRESULT will be filled in with the vn_reference_t
2428 stored in the hashtable if one exists. When TBAA_P is false assume
2429 we are looking up a store and treat it as having alias-set zero. */
2431 tree
2432 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2433 vn_reference_t *vnresult, bool tbaa_p)
2435 vec<vn_reference_op_s> operands;
2436 struct vn_reference_s vr1;
2437 tree cst;
2438 bool valuezied_anything;
2440 if (vnresult)
2441 *vnresult = NULL;
2443 vr1.vuse = vuse_ssa_val (vuse);
2444 vr1.operands = operands
2445 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2446 vr1.type = TREE_TYPE (op);
2447 vr1.set = tbaa_p ? get_alias_set (op) : 0;
2448 vr1.hashcode = vn_reference_compute_hash (&vr1);
2449 if ((cst = fully_constant_vn_reference_p (&vr1)))
2450 return cst;
2452 if (kind != VN_NOWALK
2453 && vr1.vuse)
2455 vn_reference_t wvnresult;
2456 ao_ref r;
2457 /* Make sure to use a valueized reference if we valueized anything.
2458 Otherwise preserve the full reference for advanced TBAA. */
2459 if (!valuezied_anything
2460 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2461 vr1.operands))
2462 ao_ref_init (&r, op);
2463 if (! tbaa_p)
2464 r.ref_alias_set = r.base_alias_set = 0;
2465 vn_walk_kind = kind;
2466 wvnresult =
2467 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2468 vn_reference_lookup_2,
2469 vn_reference_lookup_3,
2470 vuse_ssa_val, &vr1);
2471 gcc_checking_assert (vr1.operands == shared_lookup_references);
2472 if (wvnresult)
2474 if (vnresult)
2475 *vnresult = wvnresult;
2476 return wvnresult->result;
2479 return NULL_TREE;
2482 return vn_reference_lookup_1 (&vr1, vnresult);
2485 /* Lookup CALL in the current hash table and return the entry in
2486 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2488 void
2489 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2490 vn_reference_t vr)
2492 if (vnresult)
2493 *vnresult = NULL;
2495 tree vuse = gimple_vuse (call);
2497 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2498 vr->operands = valueize_shared_reference_ops_from_call (call);
2499 vr->type = gimple_expr_type (call);
2500 vr->set = 0;
2501 vr->hashcode = vn_reference_compute_hash (vr);
2502 vn_reference_lookup_1 (vr, vnresult);
2505 /* Insert OP into the current hash table with a value number of
2506 RESULT, and return the resulting reference structure we created. */
2508 static vn_reference_t
2509 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2511 vn_reference_s **slot;
2512 vn_reference_t vr1;
2513 bool tem;
2515 vr1 = current_info->references_pool->allocate ();
2516 if (TREE_CODE (result) == SSA_NAME)
2517 vr1->value_id = VN_INFO (result)->value_id;
2518 else
2519 vr1->value_id = get_or_alloc_constant_value_id (result);
2520 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2521 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2522 vr1->type = TREE_TYPE (op);
2523 vr1->set = get_alias_set (op);
2524 vr1->hashcode = vn_reference_compute_hash (vr1);
2525 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2526 vr1->result_vdef = vdef;
2528 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2529 INSERT);
2531 /* Because we lookup stores using vuses, and value number failures
2532 using the vdefs (see visit_reference_op_store for how and why),
2533 it's possible that on failure we may try to insert an already
2534 inserted store. This is not wrong, there is no ssa name for a
2535 store that we could use as a differentiator anyway. Thus, unlike
2536 the other lookup functions, you cannot gcc_assert (!*slot)
2537 here. */
2539 /* But free the old slot in case of a collision. */
2540 if (*slot)
2541 free_reference (*slot);
2543 *slot = vr1;
2544 return vr1;
2547 /* Insert a reference by it's pieces into the current hash table with
2548 a value number of RESULT. Return the resulting reference
2549 structure we created. */
2551 vn_reference_t
2552 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2553 vec<vn_reference_op_s> operands,
2554 tree result, unsigned int value_id)
2557 vn_reference_s **slot;
2558 vn_reference_t vr1;
2560 vr1 = current_info->references_pool->allocate ();
2561 vr1->value_id = value_id;
2562 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2563 vr1->operands = valueize_refs (operands);
2564 vr1->type = type;
2565 vr1->set = set;
2566 vr1->hashcode = vn_reference_compute_hash (vr1);
2567 if (result && TREE_CODE (result) == SSA_NAME)
2568 result = SSA_VAL (result);
2569 vr1->result = result;
2571 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2572 INSERT);
2574 /* At this point we should have all the things inserted that we have
2575 seen before, and we should never try inserting something that
2576 already exists. */
2577 gcc_assert (!*slot);
2578 if (*slot)
2579 free_reference (*slot);
2581 *slot = vr1;
2582 return vr1;
2585 /* Compute and return the hash value for nary operation VBO1. */
2587 static hashval_t
2588 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2590 inchash::hash hstate;
2591 unsigned i;
2593 for (i = 0; i < vno1->length; ++i)
2594 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2595 vno1->op[i] = SSA_VAL (vno1->op[i]);
2597 if (((vno1->length == 2
2598 && commutative_tree_code (vno1->opcode))
2599 || (vno1->length == 3
2600 && commutative_ternary_tree_code (vno1->opcode)))
2601 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2602 std::swap (vno1->op[0], vno1->op[1]);
2603 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
2604 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2606 std::swap (vno1->op[0], vno1->op[1]);
2607 vno1->opcode = swap_tree_comparison (vno1->opcode);
2610 hstate.add_int (vno1->opcode);
2611 for (i = 0; i < vno1->length; ++i)
2612 inchash::add_expr (vno1->op[i], hstate);
2614 return hstate.end ();
2617 /* Compare nary operations VNO1 and VNO2 and return true if they are
2618 equivalent. */
2620 bool
2621 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2623 unsigned i;
2625 if (vno1->hashcode != vno2->hashcode)
2626 return false;
2628 if (vno1->length != vno2->length)
2629 return false;
2631 if (vno1->opcode != vno2->opcode
2632 || !types_compatible_p (vno1->type, vno2->type))
2633 return false;
2635 for (i = 0; i < vno1->length; ++i)
2636 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2637 return false;
2639 /* BIT_INSERT_EXPR has an implict operand as the type precision
2640 of op1. Need to check to make sure they are the same. */
2641 if (vno1->opcode == BIT_INSERT_EXPR
2642 && TREE_CODE (vno1->op[1]) == INTEGER_CST
2643 && TYPE_PRECISION (TREE_TYPE (vno1->op[1]))
2644 != TYPE_PRECISION (TREE_TYPE (vno2->op[1])))
2645 return false;
2647 return true;
2650 /* Initialize VNO from the pieces provided. */
2652 static void
2653 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2654 enum tree_code code, tree type, tree *ops)
2656 vno->opcode = code;
2657 vno->length = length;
2658 vno->type = type;
2659 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2662 /* Initialize VNO from OP. */
2664 static void
2665 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2667 unsigned i;
2669 vno->opcode = TREE_CODE (op);
2670 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2671 vno->type = TREE_TYPE (op);
2672 for (i = 0; i < vno->length; ++i)
2673 vno->op[i] = TREE_OPERAND (op, i);
2676 /* Return the number of operands for a vn_nary ops structure from STMT. */
2678 static unsigned int
2679 vn_nary_length_from_stmt (gimple *stmt)
2681 switch (gimple_assign_rhs_code (stmt))
2683 case REALPART_EXPR:
2684 case IMAGPART_EXPR:
2685 case VIEW_CONVERT_EXPR:
2686 return 1;
2688 case BIT_FIELD_REF:
2689 return 3;
2691 case CONSTRUCTOR:
2692 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2694 default:
2695 return gimple_num_ops (stmt) - 1;
2699 /* Initialize VNO from STMT. */
2701 static void
2702 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
2704 unsigned i;
2706 vno->opcode = gimple_assign_rhs_code (stmt);
2707 vno->type = gimple_expr_type (stmt);
2708 switch (vno->opcode)
2710 case REALPART_EXPR:
2711 case IMAGPART_EXPR:
2712 case VIEW_CONVERT_EXPR:
2713 vno->length = 1;
2714 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2715 break;
2717 case BIT_FIELD_REF:
2718 vno->length = 3;
2719 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2720 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2721 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2722 break;
2724 case CONSTRUCTOR:
2725 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2726 for (i = 0; i < vno->length; ++i)
2727 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2728 break;
2730 default:
2731 gcc_checking_assert (!gimple_assign_single_p (stmt));
2732 vno->length = gimple_num_ops (stmt) - 1;
2733 for (i = 0; i < vno->length; ++i)
2734 vno->op[i] = gimple_op (stmt, i + 1);
2738 /* Compute the hashcode for VNO and look for it in the hash table;
2739 return the resulting value number if it exists in the hash table.
2740 Return NULL_TREE if it does not exist in the hash table or if the
2741 result field of the operation is NULL. VNRESULT will contain the
2742 vn_nary_op_t from the hashtable if it exists. */
2744 static tree
2745 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2747 vn_nary_op_s **slot;
2749 if (vnresult)
2750 *vnresult = NULL;
2752 vno->hashcode = vn_nary_op_compute_hash (vno);
2753 slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2754 NO_INSERT);
2755 if (!slot && current_info == optimistic_info)
2756 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2757 NO_INSERT);
2758 if (!slot)
2759 return NULL_TREE;
2760 if (vnresult)
2761 *vnresult = *slot;
2762 return (*slot)->result;
2765 /* Lookup a n-ary operation by its pieces and return the resulting value
2766 number if it exists in the hash table. Return NULL_TREE if it does
2767 not exist in the hash table or if the result field of the operation
2768 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2769 if it exists. */
2771 tree
2772 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2773 tree type, tree *ops, vn_nary_op_t *vnresult)
2775 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2776 sizeof_vn_nary_op (length));
2777 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2778 return vn_nary_op_lookup_1 (vno1, vnresult);
2781 /* Lookup OP in the current hash table, and return the resulting value
2782 number if it exists in the hash table. Return NULL_TREE if it does
2783 not exist in the hash table or if the result field of the operation
2784 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2785 if it exists. */
2787 tree
2788 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2790 vn_nary_op_t vno1
2791 = XALLOCAVAR (struct vn_nary_op_s,
2792 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2793 init_vn_nary_op_from_op (vno1, op);
2794 return vn_nary_op_lookup_1 (vno1, vnresult);
2797 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2798 value number if it exists in the hash table. Return NULL_TREE if
2799 it does not exist in the hash table. VNRESULT will contain the
2800 vn_nary_op_t from the hashtable if it exists. */
2802 tree
2803 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
2805 vn_nary_op_t vno1
2806 = XALLOCAVAR (struct vn_nary_op_s,
2807 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2808 init_vn_nary_op_from_stmt (vno1, stmt);
2809 return vn_nary_op_lookup_1 (vno1, vnresult);
2812 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2814 static vn_nary_op_t
2815 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2817 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2820 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2821 obstack. */
2823 static vn_nary_op_t
2824 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2826 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2827 &current_info->nary_obstack);
2829 vno1->value_id = value_id;
2830 vno1->length = length;
2831 vno1->result = result;
2833 return vno1;
2836 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2837 VNO->HASHCODE first. */
2839 static vn_nary_op_t
2840 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
2841 bool compute_hash)
2843 vn_nary_op_s **slot;
2845 if (compute_hash)
2846 vno->hashcode = vn_nary_op_compute_hash (vno);
2848 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
2849 /* While we do not want to insert things twice it's awkward to
2850 avoid it in the case where visit_nary_op pattern-matches stuff
2851 and ends up simplifying the replacement to itself. We then
2852 get two inserts, one from visit_nary_op and one from
2853 vn_nary_build_or_lookup.
2854 So allow inserts with the same value number. */
2855 if (*slot && (*slot)->result == vno->result)
2856 return *slot;
2858 gcc_assert (!*slot);
2860 *slot = vno;
2861 return vno;
2864 /* Insert a n-ary operation into the current hash table using it's
2865 pieces. Return the vn_nary_op_t structure we created and put in
2866 the hashtable. */
2868 vn_nary_op_t
2869 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2870 tree type, tree *ops,
2871 tree result, unsigned int value_id)
2873 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2874 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2875 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2878 /* Insert OP into the current hash table with a value number of
2879 RESULT. Return the vn_nary_op_t structure we created and put in
2880 the hashtable. */
2882 vn_nary_op_t
2883 vn_nary_op_insert (tree op, tree result)
2885 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2886 vn_nary_op_t vno1;
2888 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2889 init_vn_nary_op_from_op (vno1, op);
2890 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2893 /* Insert the rhs of STMT into the current hash table with a value number of
2894 RESULT. */
2896 static vn_nary_op_t
2897 vn_nary_op_insert_stmt (gimple *stmt, tree result)
2899 vn_nary_op_t vno1
2900 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2901 result, VN_INFO (result)->value_id);
2902 init_vn_nary_op_from_stmt (vno1, stmt);
2903 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2906 /* Compute a hashcode for PHI operation VP1 and return it. */
2908 static inline hashval_t
2909 vn_phi_compute_hash (vn_phi_t vp1)
2911 inchash::hash hstate (vp1->phiargs.length () > 2
2912 ? vp1->block->index : vp1->phiargs.length ());
2913 tree phi1op;
2914 tree type;
2915 edge e;
2916 edge_iterator ei;
2918 /* If all PHI arguments are constants we need to distinguish
2919 the PHI node via its type. */
2920 type = vp1->type;
2921 hstate.merge_hash (vn_hash_type (type));
2923 FOR_EACH_EDGE (e, ei, vp1->block->preds)
2925 /* Don't hash backedge values they need to be handled as VN_TOP
2926 for optimistic value-numbering. */
2927 if (e->flags & EDGE_DFS_BACK)
2928 continue;
2930 phi1op = vp1->phiargs[e->dest_idx];
2931 if (phi1op == VN_TOP)
2932 continue;
2933 inchash::add_expr (phi1op, hstate);
2936 return hstate.end ();
2940 /* Return true if COND1 and COND2 represent the same condition, set
2941 *INVERTED_P if one needs to be inverted to make it the same as
2942 the other. */
2944 static bool
2945 cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
2946 gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
2948 enum tree_code code1 = gimple_cond_code (cond1);
2949 enum tree_code code2 = gimple_cond_code (cond2);
2951 *inverted_p = false;
2952 if (code1 == code2)
2954 else if (code1 == swap_tree_comparison (code2))
2955 std::swap (lhs2, rhs2);
2956 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
2957 *inverted_p = true;
2958 else if (code1 == invert_tree_comparison
2959 (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
2961 std::swap (lhs2, rhs2);
2962 *inverted_p = true;
2964 else
2965 return false;
2967 return ((expressions_equal_p (lhs1, lhs2)
2968 && expressions_equal_p (rhs1, rhs2))
2969 || (commutative_tree_code (code1)
2970 && expressions_equal_p (lhs1, rhs2)
2971 && expressions_equal_p (rhs1, lhs2)));
2974 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2976 static int
2977 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
2979 if (vp1->hashcode != vp2->hashcode)
2980 return false;
2982 if (vp1->block != vp2->block)
2984 if (vp1->phiargs.length () != vp2->phiargs.length ())
2985 return false;
2987 switch (vp1->phiargs.length ())
2989 case 1:
2990 /* Single-arg PHIs are just copies. */
2991 break;
2993 case 2:
2995 /* Rule out backedges into the PHI. */
2996 if (vp1->block->loop_father->header == vp1->block
2997 || vp2->block->loop_father->header == vp2->block)
2998 return false;
3000 /* If the PHI nodes do not have compatible types
3001 they are not the same. */
3002 if (!types_compatible_p (vp1->type, vp2->type))
3003 return false;
3005 basic_block idom1
3006 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3007 basic_block idom2
3008 = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
3009 /* If the immediate dominator end in switch stmts multiple
3010 values may end up in the same PHI arg via intermediate
3011 CFG merges. */
3012 if (EDGE_COUNT (idom1->succs) != 2
3013 || EDGE_COUNT (idom2->succs) != 2)
3014 return false;
3016 /* Verify the controlling stmt is the same. */
3017 gimple *last1 = last_stmt (idom1);
3018 gimple *last2 = last_stmt (idom2);
3019 if (gimple_code (last1) != GIMPLE_COND
3020 || gimple_code (last2) != GIMPLE_COND)
3021 return false;
3022 bool inverted_p;
3023 if (! cond_stmts_equal_p (as_a <gcond *> (last1),
3024 vp1->cclhs, vp1->ccrhs,
3025 as_a <gcond *> (last2),
3026 vp2->cclhs, vp2->ccrhs,
3027 &inverted_p))
3028 return false;
3030 /* Get at true/false controlled edges into the PHI. */
3031 edge te1, te2, fe1, fe2;
3032 if (! extract_true_false_controlled_edges (idom1, vp1->block,
3033 &te1, &fe1)
3034 || ! extract_true_false_controlled_edges (idom2, vp2->block,
3035 &te2, &fe2))
3036 return false;
3038 /* Swap edges if the second condition is the inverted of the
3039 first. */
3040 if (inverted_p)
3041 std::swap (te2, fe2);
3043 /* ??? Handle VN_TOP specially. */
3044 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
3045 vp2->phiargs[te2->dest_idx])
3046 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
3047 vp2->phiargs[fe2->dest_idx]))
3048 return false;
3050 return true;
3053 default:
3054 return false;
3058 /* If the PHI nodes do not have compatible types
3059 they are not the same. */
3060 if (!types_compatible_p (vp1->type, vp2->type))
3061 return false;
3063 /* Any phi in the same block will have it's arguments in the
3064 same edge order, because of how we store phi nodes. */
3065 int i;
3066 tree phi1op;
3067 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
3069 tree phi2op = vp2->phiargs[i];
3070 if (phi1op == VN_TOP || phi2op == VN_TOP)
3071 continue;
3072 if (!expressions_equal_p (phi1op, phi2op))
3073 return false;
3076 return true;
3079 static vec<tree> shared_lookup_phiargs;
3081 /* Lookup PHI in the current hash table, and return the resulting
3082 value number if it exists in the hash table. Return NULL_TREE if
3083 it does not exist in the hash table. */
3085 static tree
3086 vn_phi_lookup (gimple *phi)
3088 vn_phi_s **slot;
3089 struct vn_phi_s vp1;
3090 edge e;
3091 edge_iterator ei;
3093 shared_lookup_phiargs.truncate (0);
3094 shared_lookup_phiargs.safe_grow (gimple_phi_num_args (phi));
3096 /* Canonicalize the SSA_NAME's to their value number. */
3097 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3099 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3100 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3101 shared_lookup_phiargs[e->dest_idx] = def;
3103 vp1.type = TREE_TYPE (gimple_phi_result (phi));
3104 vp1.phiargs = shared_lookup_phiargs;
3105 vp1.block = gimple_bb (phi);
3106 /* Extract values of the controlling condition. */
3107 vp1.cclhs = NULL_TREE;
3108 vp1.ccrhs = NULL_TREE;
3109 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1.block);
3110 if (EDGE_COUNT (idom1->succs) == 2)
3111 if (gcond *last1 = dyn_cast <gcond *> (last_stmt (idom1)))
3113 vp1.cclhs = vn_valueize (gimple_cond_lhs (last1));
3114 vp1.ccrhs = vn_valueize (gimple_cond_rhs (last1));
3116 vp1.hashcode = vn_phi_compute_hash (&vp1);
3117 slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3118 NO_INSERT);
3119 if (!slot && current_info == optimistic_info)
3120 slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3121 NO_INSERT);
3122 if (!slot)
3123 return NULL_TREE;
3124 return (*slot)->result;
3127 /* Insert PHI into the current hash table with a value number of
3128 RESULT. */
3130 static vn_phi_t
3131 vn_phi_insert (gimple *phi, tree result)
3133 vn_phi_s **slot;
3134 vn_phi_t vp1 = current_info->phis_pool->allocate ();
3135 vec<tree> args = vNULL;
3136 edge e;
3137 edge_iterator ei;
3139 args.safe_grow (gimple_phi_num_args (phi));
3141 /* Canonicalize the SSA_NAME's to their value number. */
3142 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3144 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3145 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3146 args[e->dest_idx] = def;
3148 vp1->value_id = VN_INFO (result)->value_id;
3149 vp1->type = TREE_TYPE (gimple_phi_result (phi));
3150 vp1->phiargs = args;
3151 vp1->block = gimple_bb (phi);
3152 /* Extract values of the controlling condition. */
3153 vp1->cclhs = NULL_TREE;
3154 vp1->ccrhs = NULL_TREE;
3155 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3156 if (EDGE_COUNT (idom1->succs) == 2)
3157 if (gcond *last1 = dyn_cast <gcond *> (last_stmt (idom1)))
3159 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
3160 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
3162 vp1->result = result;
3163 vp1->hashcode = vn_phi_compute_hash (vp1);
3165 slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
3167 /* Because we iterate over phi operations more than once, it's
3168 possible the slot might already exist here, hence no assert.*/
3169 *slot = vp1;
3170 return vp1;
3174 /* Print set of components in strongly connected component SCC to OUT. */
3176 static void
3177 print_scc (FILE *out, vec<tree> scc)
3179 tree var;
3180 unsigned int i;
3182 fprintf (out, "SCC consists of %u:", scc.length ());
3183 FOR_EACH_VEC_ELT (scc, i, var)
3185 fprintf (out, " ");
3186 print_generic_expr (out, var);
3188 fprintf (out, "\n");
3191 /* Return true if BB1 is dominated by BB2 taking into account edges
3192 that are not executable. */
3194 static bool
3195 dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
3197 edge_iterator ei;
3198 edge e;
3200 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3201 return true;
3203 /* Before iterating we'd like to know if there exists a
3204 (executable) path from bb2 to bb1 at all, if not we can
3205 directly return false. For now simply iterate once. */
3207 /* Iterate to the single executable bb1 predecessor. */
3208 if (EDGE_COUNT (bb1->preds) > 1)
3210 edge prede = NULL;
3211 FOR_EACH_EDGE (e, ei, bb1->preds)
3212 if (e->flags & EDGE_EXECUTABLE)
3214 if (prede)
3216 prede = NULL;
3217 break;
3219 prede = e;
3221 if (prede)
3223 bb1 = prede->src;
3225 /* Re-do the dominance check with changed bb1. */
3226 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3227 return true;
3231 /* Iterate to the single executable bb2 successor. */
3232 edge succe = NULL;
3233 FOR_EACH_EDGE (e, ei, bb2->succs)
3234 if (e->flags & EDGE_EXECUTABLE)
3236 if (succe)
3238 succe = NULL;
3239 break;
3241 succe = e;
3243 if (succe)
3245 /* Verify the reached block is only reached through succe.
3246 If there is only one edge we can spare us the dominator
3247 check and iterate directly. */
3248 if (EDGE_COUNT (succe->dest->preds) > 1)
3250 FOR_EACH_EDGE (e, ei, succe->dest->preds)
3251 if (e != succe
3252 && (e->flags & EDGE_EXECUTABLE))
3254 succe = NULL;
3255 break;
3258 if (succe)
3260 bb2 = succe->dest;
3262 /* Re-do the dominance check with changed bb2. */
3263 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3264 return true;
3268 /* We could now iterate updating bb1 / bb2. */
3269 return false;
3272 /* Set the value number of FROM to TO, return true if it has changed
3273 as a result. */
3275 static inline bool
3276 set_ssa_val_to (tree from, tree to)
3278 tree currval = SSA_VAL (from);
3279 HOST_WIDE_INT toff, coff;
3281 /* The only thing we allow as value numbers are ssa_names
3282 and invariants. So assert that here. We don't allow VN_TOP
3283 as visiting a stmt should produce a value-number other than
3284 that.
3285 ??? Still VN_TOP can happen for unreachable code, so force
3286 it to varying in that case. Not all code is prepared to
3287 get VN_TOP on valueization. */
3288 if (to == VN_TOP)
3290 if (dump_file && (dump_flags & TDF_DETAILS))
3291 fprintf (dump_file, "Forcing value number to varying on "
3292 "receiving VN_TOP\n");
3293 to = from;
3296 gcc_assert (to != NULL_TREE
3297 && ((TREE_CODE (to) == SSA_NAME
3298 && (to == from || SSA_VAL (to) == to))
3299 || is_gimple_min_invariant (to)));
3301 if (from != to)
3303 if (currval == from)
3305 if (dump_file && (dump_flags & TDF_DETAILS))
3307 fprintf (dump_file, "Not changing value number of ");
3308 print_generic_expr (dump_file, from);
3309 fprintf (dump_file, " from VARYING to ");
3310 print_generic_expr (dump_file, to);
3311 fprintf (dump_file, "\n");
3313 return false;
3315 else if (currval != VN_TOP
3316 && ! is_gimple_min_invariant (currval)
3317 && is_gimple_min_invariant (to))
3319 if (dump_file && (dump_flags & TDF_DETAILS))
3321 fprintf (dump_file, "Forcing VARYING instead of changing "
3322 "value number of ");
3323 print_generic_expr (dump_file, from);
3324 fprintf (dump_file, " from ");
3325 print_generic_expr (dump_file, currval);
3326 fprintf (dump_file, " (non-constant) to ");
3327 print_generic_expr (dump_file, to);
3328 fprintf (dump_file, " (constant)\n");
3330 to = from;
3332 else if (TREE_CODE (to) == SSA_NAME
3333 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
3334 to = from;
3337 if (dump_file && (dump_flags & TDF_DETAILS))
3339 fprintf (dump_file, "Setting value number of ");
3340 print_generic_expr (dump_file, from);
3341 fprintf (dump_file, " to ");
3342 print_generic_expr (dump_file, to);
3345 if (currval != to
3346 && !operand_equal_p (currval, to, 0)
3347 /* ??? For addresses involving volatile objects or types operand_equal_p
3348 does not reliably detect ADDR_EXPRs as equal. We know we are only
3349 getting invariant gimple addresses here, so can use
3350 get_addr_base_and_unit_offset to do this comparison. */
3351 && !(TREE_CODE (currval) == ADDR_EXPR
3352 && TREE_CODE (to) == ADDR_EXPR
3353 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
3354 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
3355 && coff == toff))
3357 if (dump_file && (dump_flags & TDF_DETAILS))
3358 fprintf (dump_file, " (changed)\n");
3360 /* If we equate two SSA names we have to make the side-band info
3361 of the leader conservative (and remember whatever original value
3362 was present). */
3363 if (TREE_CODE (to) == SSA_NAME)
3365 if (INTEGRAL_TYPE_P (TREE_TYPE (to))
3366 && SSA_NAME_RANGE_INFO (to))
3368 if (SSA_NAME_IS_DEFAULT_DEF (to)
3369 || dominated_by_p_w_unex
3370 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3371 gimple_bb (SSA_NAME_DEF_STMT (to))))
3372 /* Keep the info from the dominator. */
3374 else
3376 /* Save old info. */
3377 if (! VN_INFO (to)->info.range_info)
3379 VN_INFO (to)->info.range_info = SSA_NAME_RANGE_INFO (to);
3380 VN_INFO (to)->range_info_anti_range_p
3381 = SSA_NAME_ANTI_RANGE_P (to);
3383 /* Rather than allocating memory and unioning the info
3384 just clear it. */
3385 if (dump_file && (dump_flags & TDF_DETAILS))
3387 fprintf (dump_file, "clearing range info of ");
3388 print_generic_expr (dump_file, to);
3389 fprintf (dump_file, "\n");
3391 SSA_NAME_RANGE_INFO (to) = NULL;
3394 else if (POINTER_TYPE_P (TREE_TYPE (to))
3395 && SSA_NAME_PTR_INFO (to))
3397 if (SSA_NAME_IS_DEFAULT_DEF (to)
3398 || dominated_by_p_w_unex
3399 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3400 gimple_bb (SSA_NAME_DEF_STMT (to))))
3401 /* Keep the info from the dominator. */
3403 else if (! SSA_NAME_PTR_INFO (from)
3404 /* Handle the case of trivially equivalent info. */
3405 || memcmp (SSA_NAME_PTR_INFO (to),
3406 SSA_NAME_PTR_INFO (from),
3407 sizeof (ptr_info_def)) != 0)
3409 /* Save old info. */
3410 if (! VN_INFO (to)->info.ptr_info)
3411 VN_INFO (to)->info.ptr_info = SSA_NAME_PTR_INFO (to);
3412 /* Rather than allocating memory and unioning the info
3413 just clear it. */
3414 if (dump_file && (dump_flags & TDF_DETAILS))
3416 fprintf (dump_file, "clearing points-to info of ");
3417 print_generic_expr (dump_file, to);
3418 fprintf (dump_file, "\n");
3420 SSA_NAME_PTR_INFO (to) = NULL;
3425 VN_INFO (from)->valnum = to;
3426 return true;
3428 if (dump_file && (dump_flags & TDF_DETAILS))
3429 fprintf (dump_file, "\n");
3430 return false;
3433 /* Mark as processed all the definitions in the defining stmt of USE, or
3434 the USE itself. */
3436 static void
3437 mark_use_processed (tree use)
3439 ssa_op_iter iter;
3440 def_operand_p defp;
3441 gimple *stmt = SSA_NAME_DEF_STMT (use);
3443 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
3445 VN_INFO (use)->use_processed = true;
3446 return;
3449 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3451 tree def = DEF_FROM_PTR (defp);
3453 VN_INFO (def)->use_processed = true;
3457 /* Set all definitions in STMT to value number to themselves.
3458 Return true if a value number changed. */
3460 static bool
3461 defs_to_varying (gimple *stmt)
3463 bool changed = false;
3464 ssa_op_iter iter;
3465 def_operand_p defp;
3467 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3469 tree def = DEF_FROM_PTR (defp);
3470 changed |= set_ssa_val_to (def, def);
3472 return changed;
3475 /* Visit a copy between LHS and RHS, return true if the value number
3476 changed. */
3478 static bool
3479 visit_copy (tree lhs, tree rhs)
3481 /* Valueize. */
3482 rhs = SSA_VAL (rhs);
3484 return set_ssa_val_to (lhs, rhs);
3487 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
3488 is the same. */
3490 static tree
3491 valueized_wider_op (tree wide_type, tree op)
3493 if (TREE_CODE (op) == SSA_NAME)
3494 op = SSA_VAL (op);
3496 /* Either we have the op widened available. */
3497 tree ops[3] = {};
3498 ops[0] = op;
3499 tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
3500 wide_type, ops, NULL);
3501 if (tem)
3502 return tem;
3504 /* Or the op is truncated from some existing value. */
3505 if (TREE_CODE (op) == SSA_NAME)
3507 gimple *def = SSA_NAME_DEF_STMT (op);
3508 if (is_gimple_assign (def)
3509 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3511 tem = gimple_assign_rhs1 (def);
3512 if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)))
3514 if (TREE_CODE (tem) == SSA_NAME)
3515 tem = SSA_VAL (tem);
3516 return tem;
3521 /* For constants simply extend it. */
3522 if (TREE_CODE (op) == INTEGER_CST)
3523 return wide_int_to_tree (wide_type, op);
3525 return NULL_TREE;
3528 /* Visit a nary operator RHS, value number it, and return true if the
3529 value number of LHS has changed as a result. */
3531 static bool
3532 visit_nary_op (tree lhs, gassign *stmt)
3534 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
3535 if (result)
3536 return set_ssa_val_to (lhs, result);
3538 /* Do some special pattern matching for redundancies of operations
3539 in different types. */
3540 enum tree_code code = gimple_assign_rhs_code (stmt);
3541 tree type = TREE_TYPE (lhs);
3542 tree rhs1 = gimple_assign_rhs1 (stmt);
3543 switch (code)
3545 CASE_CONVERT:
3546 /* Match arithmetic done in a different type where we can easily
3547 substitute the result from some earlier sign-changed or widened
3548 operation. */
3549 if (INTEGRAL_TYPE_P (type)
3550 && TREE_CODE (rhs1) == SSA_NAME
3551 /* We only handle sign-changes or zero-extension -> & mask. */
3552 && ((TYPE_UNSIGNED (TREE_TYPE (rhs1))
3553 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs1)))
3554 || TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (rhs1))))
3556 gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
3557 if (def
3558 && (gimple_assign_rhs_code (def) == PLUS_EXPR
3559 || gimple_assign_rhs_code (def) == MINUS_EXPR
3560 || gimple_assign_rhs_code (def) == MULT_EXPR))
3562 tree ops[3] = {};
3563 /* Either we have the op widened available. */
3564 ops[0] = valueized_wider_op (type,
3565 gimple_assign_rhs1 (def));
3566 if (ops[0])
3567 ops[1] = valueized_wider_op (type,
3568 gimple_assign_rhs2 (def));
3569 if (ops[0] && ops[1])
3571 ops[0] = vn_nary_op_lookup_pieces
3572 (2, gimple_assign_rhs_code (def), type, ops, NULL);
3573 /* We have wider operation available. */
3574 if (ops[0])
3576 unsigned lhs_prec = TYPE_PRECISION (type);
3577 unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1));
3578 if (lhs_prec == rhs_prec)
3580 ops[1] = NULL_TREE;
3581 result = vn_nary_build_or_lookup (NOP_EXPR,
3582 type, ops);
3583 if (result)
3585 bool changed = set_ssa_val_to (lhs, result);
3586 vn_nary_op_insert_stmt (stmt, result);
3587 return changed;
3590 else
3592 ops[1] = wide_int_to_tree (type,
3593 wi::mask (rhs_prec, false,
3594 lhs_prec));
3595 result = vn_nary_build_or_lookup (BIT_AND_EXPR,
3596 TREE_TYPE (lhs),
3597 ops);
3598 if (result)
3600 bool changed = set_ssa_val_to (lhs, result);
3601 vn_nary_op_insert_stmt (stmt, result);
3602 return changed;
3609 default:;
3612 bool changed = set_ssa_val_to (lhs, lhs);
3613 vn_nary_op_insert_stmt (stmt, lhs);
3614 return changed;
3617 /* Visit a call STMT storing into LHS. Return true if the value number
3618 of the LHS has changed as a result. */
3620 static bool
3621 visit_reference_op_call (tree lhs, gcall *stmt)
3623 bool changed = false;
3624 struct vn_reference_s vr1;
3625 vn_reference_t vnresult = NULL;
3626 tree vdef = gimple_vdef (stmt);
3628 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
3629 if (lhs && TREE_CODE (lhs) != SSA_NAME)
3630 lhs = NULL_TREE;
3632 vn_reference_lookup_call (stmt, &vnresult, &vr1);
3633 if (vnresult)
3635 if (vnresult->result_vdef && vdef)
3636 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
3637 else if (vdef)
3638 /* If the call was discovered to be pure or const reflect
3639 that as far as possible. */
3640 changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt)));
3642 if (!vnresult->result && lhs)
3643 vnresult->result = lhs;
3645 if (vnresult->result && lhs)
3646 changed |= set_ssa_val_to (lhs, vnresult->result);
3648 else
3650 vn_reference_t vr2;
3651 vn_reference_s **slot;
3652 tree vdef_val = vdef;
3653 if (vdef)
3655 /* If we value numbered an indirect functions function to
3656 one not clobbering memory value number its VDEF to its
3657 VUSE. */
3658 tree fn = gimple_call_fn (stmt);
3659 if (fn && TREE_CODE (fn) == SSA_NAME)
3661 fn = SSA_VAL (fn);
3662 if (TREE_CODE (fn) == ADDR_EXPR
3663 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
3664 && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
3665 & (ECF_CONST | ECF_PURE)))
3666 vdef_val = vuse_ssa_val (gimple_vuse (stmt));
3668 changed |= set_ssa_val_to (vdef, vdef_val);
3670 if (lhs)
3671 changed |= set_ssa_val_to (lhs, lhs);
3672 vr2 = current_info->references_pool->allocate ();
3673 vr2->vuse = vr1.vuse;
3674 /* As we are not walking the virtual operand chain we know the
3675 shared_lookup_references are still original so we can re-use
3676 them here. */
3677 vr2->operands = vr1.operands.copy ();
3678 vr2->type = vr1.type;
3679 vr2->set = vr1.set;
3680 vr2->hashcode = vr1.hashcode;
3681 vr2->result = lhs;
3682 vr2->result_vdef = vdef_val;
3683 slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3684 INSERT);
3685 gcc_assert (!*slot);
3686 *slot = vr2;
3689 return changed;
3692 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3693 and return true if the value number of the LHS has changed as a result. */
3695 static bool
3696 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
3698 bool changed = false;
3699 tree last_vuse;
3700 tree result;
3702 last_vuse = gimple_vuse (stmt);
3703 last_vuse_ptr = &last_vuse;
3704 result = vn_reference_lookup (op, gimple_vuse (stmt),
3705 default_vn_walk_kind, NULL, true);
3706 last_vuse_ptr = NULL;
3708 /* We handle type-punning through unions by value-numbering based
3709 on offset and size of the access. Be prepared to handle a
3710 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3711 if (result
3712 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3714 /* We will be setting the value number of lhs to the value number
3715 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3716 So first simplify and lookup this expression to see if it
3717 is already available. */
3718 code_helper rcode = VIEW_CONVERT_EXPR;
3719 tree ops[3] = { result };
3720 result = vn_nary_build_or_lookup (rcode, TREE_TYPE (op), ops);
3723 if (result)
3724 changed = set_ssa_val_to (lhs, result);
3725 else
3727 changed = set_ssa_val_to (lhs, lhs);
3728 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
3731 return changed;
3735 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3736 and return true if the value number of the LHS has changed as a result. */
3738 static bool
3739 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
3741 bool changed = false;
3742 vn_reference_t vnresult = NULL;
3743 tree assign;
3744 bool resultsame = false;
3745 tree vuse = gimple_vuse (stmt);
3746 tree vdef = gimple_vdef (stmt);
3748 if (TREE_CODE (op) == SSA_NAME)
3749 op = SSA_VAL (op);
3751 /* First we want to lookup using the *vuses* from the store and see
3752 if there the last store to this location with the same address
3753 had the same value.
3755 The vuses represent the memory state before the store. If the
3756 memory state, address, and value of the store is the same as the
3757 last store to this location, then this store will produce the
3758 same memory state as that store.
3760 In this case the vdef versions for this store are value numbered to those
3761 vuse versions, since they represent the same memory state after
3762 this store.
3764 Otherwise, the vdefs for the store are used when inserting into
3765 the table, since the store generates a new memory state. */
3767 vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
3768 if (vnresult
3769 && vnresult->result)
3771 tree result = vnresult->result;
3772 if (TREE_CODE (result) == SSA_NAME)
3773 result = SSA_VAL (result);
3774 resultsame = expressions_equal_p (result, op);
3775 if (resultsame)
3777 /* If the TBAA state isn't compatible for downstream reads
3778 we cannot value-number the VDEFs the same. */
3779 alias_set_type set = get_alias_set (lhs);
3780 if (vnresult->set != set
3781 && ! alias_set_subset_of (set, vnresult->set))
3782 resultsame = false;
3786 if (!resultsame)
3788 /* Only perform the following when being called from PRE
3789 which embeds tail merging. */
3790 if (default_vn_walk_kind == VN_WALK)
3792 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3793 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
3794 if (vnresult)
3796 VN_INFO (vdef)->use_processed = true;
3797 return set_ssa_val_to (vdef, vnresult->result_vdef);
3801 if (dump_file && (dump_flags & TDF_DETAILS))
3803 fprintf (dump_file, "No store match\n");
3804 fprintf (dump_file, "Value numbering store ");
3805 print_generic_expr (dump_file, lhs);
3806 fprintf (dump_file, " to ");
3807 print_generic_expr (dump_file, op);
3808 fprintf (dump_file, "\n");
3810 /* Have to set value numbers before insert, since insert is
3811 going to valueize the references in-place. */
3812 if (vdef)
3813 changed |= set_ssa_val_to (vdef, vdef);
3815 /* Do not insert structure copies into the tables. */
3816 if (is_gimple_min_invariant (op)
3817 || is_gimple_reg (op))
3818 vn_reference_insert (lhs, op, vdef, NULL);
3820 /* Only perform the following when being called from PRE
3821 which embeds tail merging. */
3822 if (default_vn_walk_kind == VN_WALK)
3824 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3825 vn_reference_insert (assign, lhs, vuse, vdef);
3828 else
3830 /* We had a match, so value number the vdef to have the value
3831 number of the vuse it came from. */
3833 if (dump_file && (dump_flags & TDF_DETAILS))
3834 fprintf (dump_file, "Store matched earlier value, "
3835 "value numbering store vdefs to matching vuses.\n");
3837 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3840 return changed;
3843 /* Visit and value number PHI, return true if the value number
3844 changed. */
3846 static bool
3847 visit_phi (gimple *phi)
3849 bool changed = false;
3850 tree result;
3851 tree sameval = VN_TOP;
3852 bool allsame = true;
3853 unsigned n_executable = 0;
3855 /* TODO: We could check for this in init_sccvn, and replace this
3856 with a gcc_assert. */
3857 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3858 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3860 /* See if all non-TOP arguments have the same value. TOP is
3861 equivalent to everything, so we can ignore it. */
3862 edge_iterator ei;
3863 edge e;
3864 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3865 if (e->flags & EDGE_EXECUTABLE)
3867 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3869 ++n_executable;
3870 if (TREE_CODE (def) == SSA_NAME)
3871 def = SSA_VAL (def);
3872 if (def == VN_TOP)
3873 continue;
3874 if (sameval == VN_TOP)
3875 sameval = def;
3876 else if (!expressions_equal_p (def, sameval))
3878 allsame = false;
3879 break;
3883 /* If none of the edges was executable or all incoming values are
3884 undefined keep the value-number at VN_TOP. If only a single edge
3885 is exectuable use its value. */
3886 if (sameval == VN_TOP
3887 || n_executable == 1)
3888 return set_ssa_val_to (PHI_RESULT (phi), sameval);
3890 /* First see if it is equivalent to a phi node in this block. We prefer
3891 this as it allows IV elimination - see PRs 66502 and 67167. */
3892 result = vn_phi_lookup (phi);
3893 if (result)
3894 changed = set_ssa_val_to (PHI_RESULT (phi), result);
3895 /* Otherwise all value numbered to the same value, the phi node has that
3896 value. */
3897 else if (allsame)
3898 changed = set_ssa_val_to (PHI_RESULT (phi), sameval);
3899 else
3901 vn_phi_insert (phi, PHI_RESULT (phi));
3902 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3905 return changed;
3908 /* Try to simplify RHS using equivalences and constant folding. */
3910 static tree
3911 try_to_simplify (gassign *stmt)
3913 enum tree_code code = gimple_assign_rhs_code (stmt);
3914 tree tem;
3916 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3917 in this case, there is no point in doing extra work. */
3918 if (code == SSA_NAME)
3919 return NULL_TREE;
3921 /* First try constant folding based on our current lattice. */
3922 mprts_hook = vn_lookup_simplify_result;
3923 mprts_hook_cnt = 9;
3924 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
3925 mprts_hook = NULL;
3926 if (tem
3927 && (TREE_CODE (tem) == SSA_NAME
3928 || is_gimple_min_invariant (tem)))
3929 return tem;
3931 return NULL_TREE;
3934 /* Visit and value number USE, return true if the value number
3935 changed. */
3937 static bool
3938 visit_use (tree use)
3940 bool changed = false;
3941 gimple *stmt = SSA_NAME_DEF_STMT (use);
3943 mark_use_processed (use);
3945 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3946 if (dump_file && (dump_flags & TDF_DETAILS)
3947 && !SSA_NAME_IS_DEFAULT_DEF (use))
3949 fprintf (dump_file, "Value numbering ");
3950 print_generic_expr (dump_file, use);
3951 fprintf (dump_file, " stmt = ");
3952 print_gimple_stmt (dump_file, stmt, 0);
3955 /* Handle uninitialized uses. */
3956 if (SSA_NAME_IS_DEFAULT_DEF (use))
3957 changed = set_ssa_val_to (use, use);
3958 else if (gimple_code (stmt) == GIMPLE_PHI)
3959 changed = visit_phi (stmt);
3960 else if (gimple_has_volatile_ops (stmt))
3961 changed = defs_to_varying (stmt);
3962 else if (gassign *ass = dyn_cast <gassign *> (stmt))
3964 enum tree_code code = gimple_assign_rhs_code (ass);
3965 tree lhs = gimple_assign_lhs (ass);
3966 tree rhs1 = gimple_assign_rhs1 (ass);
3967 tree simplified;
3969 /* Shortcut for copies. Simplifying copies is pointless,
3970 since we copy the expression and value they represent. */
3971 if (code == SSA_NAME
3972 && TREE_CODE (lhs) == SSA_NAME)
3974 changed = visit_copy (lhs, rhs1);
3975 goto done;
3977 simplified = try_to_simplify (ass);
3978 if (simplified)
3980 if (dump_file && (dump_flags & TDF_DETAILS))
3982 fprintf (dump_file, "RHS ");
3983 print_gimple_expr (dump_file, ass, 0);
3984 fprintf (dump_file, " simplified to ");
3985 print_generic_expr (dump_file, simplified);
3986 fprintf (dump_file, "\n");
3989 /* Setting value numbers to constants will occasionally
3990 screw up phi congruence because constants are not
3991 uniquely associated with a single ssa name that can be
3992 looked up. */
3993 if (simplified
3994 && is_gimple_min_invariant (simplified)
3995 && TREE_CODE (lhs) == SSA_NAME)
3997 changed = set_ssa_val_to (lhs, simplified);
3998 goto done;
4000 else if (simplified
4001 && TREE_CODE (simplified) == SSA_NAME
4002 && TREE_CODE (lhs) == SSA_NAME)
4004 changed = visit_copy (lhs, simplified);
4005 goto done;
4008 if ((TREE_CODE (lhs) == SSA_NAME
4009 /* We can substitute SSA_NAMEs that are live over
4010 abnormal edges with their constant value. */
4011 && !(gimple_assign_copy_p (ass)
4012 && is_gimple_min_invariant (rhs1))
4013 && !(simplified
4014 && is_gimple_min_invariant (simplified))
4015 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4016 /* Stores or copies from SSA_NAMEs that are live over
4017 abnormal edges are a problem. */
4018 || (code == SSA_NAME
4019 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
4020 changed = defs_to_varying (ass);
4021 else if (REFERENCE_CLASS_P (lhs)
4022 || DECL_P (lhs))
4023 changed = visit_reference_op_store (lhs, rhs1, ass);
4024 else if (TREE_CODE (lhs) == SSA_NAME)
4026 if ((gimple_assign_copy_p (ass)
4027 && is_gimple_min_invariant (rhs1))
4028 || (simplified
4029 && is_gimple_min_invariant (simplified)))
4031 if (simplified)
4032 changed = set_ssa_val_to (lhs, simplified);
4033 else
4034 changed = set_ssa_val_to (lhs, rhs1);
4036 else
4038 /* Visit the original statement. */
4039 switch (vn_get_stmt_kind (ass))
4041 case VN_NARY:
4042 changed = visit_nary_op (lhs, ass);
4043 break;
4044 case VN_REFERENCE:
4045 changed = visit_reference_op_load (lhs, rhs1, ass);
4046 break;
4047 default:
4048 changed = defs_to_varying (ass);
4049 break;
4053 else
4054 changed = defs_to_varying (ass);
4056 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
4058 tree lhs = gimple_call_lhs (call_stmt);
4059 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4061 /* Try constant folding based on our current lattice. */
4062 tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
4063 vn_valueize);
4064 if (simplified)
4066 if (dump_file && (dump_flags & TDF_DETAILS))
4068 fprintf (dump_file, "call ");
4069 print_gimple_expr (dump_file, call_stmt, 0);
4070 fprintf (dump_file, " simplified to ");
4071 print_generic_expr (dump_file, simplified);
4072 fprintf (dump_file, "\n");
4075 /* Setting value numbers to constants will occasionally
4076 screw up phi congruence because constants are not
4077 uniquely associated with a single ssa name that can be
4078 looked up. */
4079 if (simplified
4080 && is_gimple_min_invariant (simplified))
4082 changed = set_ssa_val_to (lhs, simplified);
4083 if (gimple_vdef (call_stmt))
4084 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4085 SSA_VAL (gimple_vuse (call_stmt)));
4086 goto done;
4088 else if (simplified
4089 && TREE_CODE (simplified) == SSA_NAME)
4091 changed = visit_copy (lhs, simplified);
4092 if (gimple_vdef (call_stmt))
4093 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4094 SSA_VAL (gimple_vuse (call_stmt)));
4095 goto done;
4097 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4099 changed = defs_to_varying (call_stmt);
4100 goto done;
4104 /* Pick up flags from a devirtualization target. */
4105 tree fn = gimple_call_fn (stmt);
4106 int extra_fnflags = 0;
4107 if (fn && TREE_CODE (fn) == SSA_NAME)
4109 fn = SSA_VAL (fn);
4110 if (TREE_CODE (fn) == ADDR_EXPR
4111 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
4112 extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0));
4114 if (!gimple_call_internal_p (call_stmt)
4115 && (/* Calls to the same function with the same vuse
4116 and the same operands do not necessarily return the same
4117 value, unless they're pure or const. */
4118 ((gimple_call_flags (call_stmt) | extra_fnflags)
4119 & (ECF_PURE | ECF_CONST))
4120 /* If calls have a vdef, subsequent calls won't have
4121 the same incoming vuse. So, if 2 calls with vdef have the
4122 same vuse, we know they're not subsequent.
4123 We can value number 2 calls to the same function with the
4124 same vuse and the same operands which are not subsequent
4125 the same, because there is no code in the program that can
4126 compare the 2 values... */
4127 || (gimple_vdef (call_stmt)
4128 /* ... unless the call returns a pointer which does
4129 not alias with anything else. In which case the
4130 information that the values are distinct are encoded
4131 in the IL. */
4132 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
4133 /* Only perform the following when being called from PRE
4134 which embeds tail merging. */
4135 && default_vn_walk_kind == VN_WALK)))
4136 changed = visit_reference_op_call (lhs, call_stmt);
4137 else
4138 changed = defs_to_varying (call_stmt);
4140 else
4141 changed = defs_to_varying (stmt);
4142 done:
4143 return changed;
4146 /* Compare two operands by reverse postorder index */
4148 static int
4149 compare_ops (const void *pa, const void *pb)
4151 const tree opa = *((const tree *)pa);
4152 const tree opb = *((const tree *)pb);
4153 gimple *opstmta = SSA_NAME_DEF_STMT (opa);
4154 gimple *opstmtb = SSA_NAME_DEF_STMT (opb);
4155 basic_block bba;
4156 basic_block bbb;
4158 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
4159 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4160 else if (gimple_nop_p (opstmta))
4161 return -1;
4162 else if (gimple_nop_p (opstmtb))
4163 return 1;
4165 bba = gimple_bb (opstmta);
4166 bbb = gimple_bb (opstmtb);
4168 if (!bba && !bbb)
4169 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4170 else if (!bba)
4171 return -1;
4172 else if (!bbb)
4173 return 1;
4175 if (bba == bbb)
4177 if (gimple_code (opstmta) == GIMPLE_PHI
4178 && gimple_code (opstmtb) == GIMPLE_PHI)
4179 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4180 else if (gimple_code (opstmta) == GIMPLE_PHI)
4181 return -1;
4182 else if (gimple_code (opstmtb) == GIMPLE_PHI)
4183 return 1;
4184 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
4185 return gimple_uid (opstmta) - gimple_uid (opstmtb);
4186 else
4187 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4189 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
4192 /* Sort an array containing members of a strongly connected component
4193 SCC so that the members are ordered by RPO number.
4194 This means that when the sort is complete, iterating through the
4195 array will give you the members in RPO order. */
4197 static void
4198 sort_scc (vec<tree> scc)
4200 scc.qsort (compare_ops);
4203 /* Insert the no longer used nary ONARY to the hash INFO. */
4205 static void
4206 copy_nary (vn_nary_op_t onary, vn_tables_t info)
4208 size_t size = sizeof_vn_nary_op (onary->length);
4209 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
4210 &info->nary_obstack);
4211 memcpy (nary, onary, size);
4212 vn_nary_op_insert_into (nary, info->nary, false);
4215 /* Insert the no longer used phi OPHI to the hash INFO. */
4217 static void
4218 copy_phi (vn_phi_t ophi, vn_tables_t info)
4220 vn_phi_t phi = info->phis_pool->allocate ();
4221 vn_phi_s **slot;
4222 memcpy (phi, ophi, sizeof (*phi));
4223 ophi->phiargs.create (0);
4224 slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
4225 gcc_assert (!*slot);
4226 *slot = phi;
4229 /* Insert the no longer used reference OREF to the hash INFO. */
4231 static void
4232 copy_reference (vn_reference_t oref, vn_tables_t info)
4234 vn_reference_t ref;
4235 vn_reference_s **slot;
4236 ref = info->references_pool->allocate ();
4237 memcpy (ref, oref, sizeof (*ref));
4238 oref->operands.create (0);
4239 slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
4240 if (*slot)
4241 free_reference (*slot);
4242 *slot = ref;
4245 /* Process a strongly connected component in the SSA graph. */
4247 static void
4248 process_scc (vec<tree> scc)
4250 tree var;
4251 unsigned int i;
4252 unsigned int iterations = 0;
4253 bool changed = true;
4254 vn_nary_op_iterator_type hin;
4255 vn_phi_iterator_type hip;
4256 vn_reference_iterator_type hir;
4257 vn_nary_op_t nary;
4258 vn_phi_t phi;
4259 vn_reference_t ref;
4261 /* If the SCC has a single member, just visit it. */
4262 if (scc.length () == 1)
4264 tree use = scc[0];
4265 if (VN_INFO (use)->use_processed)
4266 return;
4267 /* We need to make sure it doesn't form a cycle itself, which can
4268 happen for self-referential PHI nodes. In that case we would
4269 end up inserting an expression with VN_TOP operands into the
4270 valid table which makes us derive bogus equivalences later.
4271 The cheapest way to check this is to assume it for all PHI nodes. */
4272 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
4273 /* Fallthru to iteration. */ ;
4274 else
4276 visit_use (use);
4277 return;
4281 if (dump_file && (dump_flags & TDF_DETAILS))
4282 print_scc (dump_file, scc);
4284 /* Iterate over the SCC with the optimistic table until it stops
4285 changing. */
4286 current_info = optimistic_info;
4287 while (changed)
4289 changed = false;
4290 iterations++;
4291 if (dump_file && (dump_flags & TDF_DETAILS))
4292 fprintf (dump_file, "Starting iteration %d\n", iterations);
4293 /* As we are value-numbering optimistically we have to
4294 clear the expression tables and the simplified expressions
4295 in each iteration until we converge. */
4296 optimistic_info->nary->empty ();
4297 optimistic_info->phis->empty ();
4298 optimistic_info->references->empty ();
4299 obstack_free (&optimistic_info->nary_obstack, NULL);
4300 gcc_obstack_init (&optimistic_info->nary_obstack);
4301 optimistic_info->phis_pool->release ();
4302 optimistic_info->references_pool->release ();
4303 FOR_EACH_VEC_ELT (scc, i, var)
4304 gcc_assert (!VN_INFO (var)->needs_insertion
4305 && VN_INFO (var)->expr == NULL);
4306 FOR_EACH_VEC_ELT (scc, i, var)
4307 changed |= visit_use (var);
4310 if (dump_file && (dump_flags & TDF_DETAILS))
4311 fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
4312 statistics_histogram_event (cfun, "SCC iterations", iterations);
4314 /* Finally, copy the contents of the no longer used optimistic
4315 table to the valid table. */
4316 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
4317 copy_nary (nary, valid_info);
4318 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
4319 copy_phi (phi, valid_info);
4320 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
4321 ref, vn_reference_t, hir)
4322 copy_reference (ref, valid_info);
4324 current_info = valid_info;
4328 /* Pop the components of the found SCC for NAME off the SCC stack
4329 and process them. Returns true if all went well, false if
4330 we run into resource limits. */
4332 static void
4333 extract_and_process_scc_for_name (tree name)
4335 auto_vec<tree> scc;
4336 tree x;
4338 /* Found an SCC, pop the components off the SCC stack and
4339 process them. */
4342 x = sccstack.pop ();
4344 VN_INFO (x)->on_sccstack = false;
4345 scc.safe_push (x);
4346 } while (x != name);
4348 /* Drop all defs in the SCC to varying in case a SCC turns out to be
4349 incredibly large.
4350 ??? Just switch to a non-optimistic mode that avoids any iteration. */
4351 if (scc.length () > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
4353 if (dump_file)
4355 print_scc (dump_file, scc);
4356 fprintf (dump_file, "WARNING: Giving up value-numbering SCC due to "
4357 "size %u exceeding %u\n", scc.length (),
4358 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
4360 tree var;
4361 unsigned i;
4362 FOR_EACH_VEC_ELT (scc, i, var)
4364 gimple *def = SSA_NAME_DEF_STMT (var);
4365 mark_use_processed (var);
4366 if (SSA_NAME_IS_DEFAULT_DEF (var)
4367 || gimple_code (def) == GIMPLE_PHI)
4368 set_ssa_val_to (var, var);
4369 else
4370 defs_to_varying (def);
4372 return;
4375 if (scc.length () > 1)
4376 sort_scc (scc);
4378 process_scc (scc);
4381 /* Depth first search on NAME to discover and process SCC's in the SSA
4382 graph.
4383 Execution of this algorithm relies on the fact that the SCC's are
4384 popped off the stack in topological order.
4385 Returns true if successful, false if we stopped processing SCC's due
4386 to resource constraints. */
4388 static void
4389 DFS (tree name)
4391 auto_vec<ssa_op_iter> itervec;
4392 auto_vec<tree> namevec;
4393 use_operand_p usep = NULL;
4394 gimple *defstmt;
4395 tree use;
4396 ssa_op_iter iter;
4398 start_over:
4399 /* SCC info */
4400 VN_INFO (name)->dfsnum = next_dfs_num++;
4401 VN_INFO (name)->visited = true;
4402 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
4404 sccstack.safe_push (name);
4405 VN_INFO (name)->on_sccstack = true;
4406 defstmt = SSA_NAME_DEF_STMT (name);
4408 /* Recursively DFS on our operands, looking for SCC's. */
4409 if (!gimple_nop_p (defstmt))
4411 /* Push a new iterator. */
4412 if (gphi *phi = dyn_cast <gphi *> (defstmt))
4413 usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
4414 else
4415 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
4417 else
4418 clear_and_done_ssa_iter (&iter);
4420 while (1)
4422 /* If we are done processing uses of a name, go up the stack
4423 of iterators and process SCCs as we found them. */
4424 if (op_iter_done (&iter))
4426 /* See if we found an SCC. */
4427 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
4428 extract_and_process_scc_for_name (name);
4430 /* Check if we are done. */
4431 if (namevec.is_empty ())
4432 return;
4434 /* Restore the last use walker and continue walking there. */
4435 use = name;
4436 name = namevec.pop ();
4437 memcpy (&iter, &itervec.last (),
4438 sizeof (ssa_op_iter));
4439 itervec.pop ();
4440 goto continue_walking;
4443 use = USE_FROM_PTR (usep);
4445 /* Since we handle phi nodes, we will sometimes get
4446 invariants in the use expression. */
4447 if (TREE_CODE (use) == SSA_NAME)
4449 if (! (VN_INFO (use)->visited))
4451 /* Recurse by pushing the current use walking state on
4452 the stack and starting over. */
4453 itervec.safe_push (iter);
4454 namevec.safe_push (name);
4455 name = use;
4456 goto start_over;
4458 continue_walking:
4459 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
4460 VN_INFO (use)->low);
4462 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
4463 && VN_INFO (use)->on_sccstack)
4465 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
4466 VN_INFO (name)->low);
4470 usep = op_iter_next_use (&iter);
4474 /* Allocate a value number table. */
4476 static void
4477 allocate_vn_table (vn_tables_t table)
4479 table->phis = new vn_phi_table_type (23);
4480 table->nary = new vn_nary_op_table_type (23);
4481 table->references = new vn_reference_table_type (23);
4483 gcc_obstack_init (&table->nary_obstack);
4484 table->phis_pool = new object_allocator<vn_phi_s> ("VN phis");
4485 table->references_pool = new object_allocator<vn_reference_s>
4486 ("VN references");
4489 /* Free a value number table. */
4491 static void
4492 free_vn_table (vn_tables_t table)
4494 delete table->phis;
4495 table->phis = NULL;
4496 delete table->nary;
4497 table->nary = NULL;
4498 delete table->references;
4499 table->references = NULL;
4500 obstack_free (&table->nary_obstack, NULL);
4501 delete table->phis_pool;
4502 delete table->references_pool;
4505 static void
4506 init_scc_vn (void)
4508 int j;
4509 int *rpo_numbers_temp;
4511 calculate_dominance_info (CDI_DOMINATORS);
4512 mark_dfs_back_edges ();
4514 sccstack.create (0);
4515 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
4517 constant_value_ids = BITMAP_ALLOC (NULL);
4519 next_dfs_num = 1;
4520 next_value_id = 1;
4522 vn_ssa_aux_table.create (num_ssa_names + 1);
4523 /* VEC_alloc doesn't actually grow it to the right size, it just
4524 preallocates the space to do so. */
4525 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
4526 gcc_obstack_init (&vn_ssa_aux_obstack);
4528 shared_lookup_phiargs.create (0);
4529 shared_lookup_references.create (0);
4530 rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
4531 rpo_numbers_temp =
4532 XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
4533 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4535 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4536 the i'th block in RPO order is bb. We want to map bb's to RPO
4537 numbers, so we need to rearrange this array. */
4538 for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
4539 rpo_numbers[rpo_numbers_temp[j]] = j;
4541 XDELETE (rpo_numbers_temp);
4543 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
4545 renumber_gimple_stmt_uids ();
4547 /* Create the valid and optimistic value numbering tables. */
4548 valid_info = XCNEW (struct vn_tables_s);
4549 allocate_vn_table (valid_info);
4550 optimistic_info = XCNEW (struct vn_tables_s);
4551 allocate_vn_table (optimistic_info);
4552 current_info = valid_info;
4554 /* Create the VN_INFO structures, and initialize value numbers to
4555 TOP or VARYING for parameters. */
4556 size_t i;
4557 tree name;
4559 FOR_EACH_SSA_NAME (i, name, cfun)
4561 VN_INFO_GET (name)->valnum = VN_TOP;
4562 VN_INFO (name)->needs_insertion = false;
4563 VN_INFO (name)->expr = NULL;
4564 VN_INFO (name)->value_id = 0;
4566 if (!SSA_NAME_IS_DEFAULT_DEF (name))
4567 continue;
4569 switch (TREE_CODE (SSA_NAME_VAR (name)))
4571 case VAR_DECL:
4572 /* Undefined vars keep TOP. */
4573 break;
4575 case PARM_DECL:
4576 /* Parameters are VARYING but we can record a condition
4577 if we know it is a non-NULL pointer. */
4578 VN_INFO (name)->visited = true;
4579 VN_INFO (name)->valnum = name;
4580 if (POINTER_TYPE_P (TREE_TYPE (name))
4581 && nonnull_arg_p (SSA_NAME_VAR (name)))
4583 tree ops[2];
4584 ops[0] = name;
4585 ops[1] = build_int_cst (TREE_TYPE (name), 0);
4586 vn_nary_op_insert_pieces (2, NE_EXPR, boolean_type_node, ops,
4587 boolean_true_node, 0);
4588 if (dump_file && (dump_flags & TDF_DETAILS))
4590 fprintf (dump_file, "Recording ");
4591 print_generic_expr (dump_file, name, TDF_SLIM);
4592 fprintf (dump_file, " != 0\n");
4595 break;
4597 case RESULT_DECL:
4598 /* If the result is passed by invisible reference the default
4599 def is initialized, otherwise it's uninitialized. */
4600 if (DECL_BY_REFERENCE (SSA_NAME_VAR (name)))
4602 VN_INFO (name)->visited = true;
4603 VN_INFO (name)->valnum = name;
4605 break;
4607 default:
4608 gcc_unreachable ();
4613 /* Restore SSA info that has been reset on value leaders. */
4615 void
4616 scc_vn_restore_ssa_info (void)
4618 unsigned i;
4619 tree name;
4621 FOR_EACH_SSA_NAME (i, name, cfun)
4623 if (has_VN_INFO (name))
4625 if (VN_INFO (name)->needs_insertion)
4627 else if (POINTER_TYPE_P (TREE_TYPE (name))
4628 && VN_INFO (name)->info.ptr_info)
4629 SSA_NAME_PTR_INFO (name) = VN_INFO (name)->info.ptr_info;
4630 else if (INTEGRAL_TYPE_P (TREE_TYPE (name))
4631 && VN_INFO (name)->info.range_info)
4633 SSA_NAME_RANGE_INFO (name) = VN_INFO (name)->info.range_info;
4634 SSA_NAME_ANTI_RANGE_P (name)
4635 = VN_INFO (name)->range_info_anti_range_p;
4641 void
4642 free_scc_vn (void)
4644 size_t i;
4645 tree name;
4647 delete constant_to_value_id;
4648 constant_to_value_id = NULL;
4649 BITMAP_FREE (constant_value_ids);
4650 shared_lookup_phiargs.release ();
4651 shared_lookup_references.release ();
4652 XDELETEVEC (rpo_numbers);
4654 FOR_EACH_SSA_NAME (i, name, cfun)
4656 if (has_VN_INFO (name)
4657 && VN_INFO (name)->needs_insertion)
4658 release_ssa_name (name);
4660 obstack_free (&vn_ssa_aux_obstack, NULL);
4661 vn_ssa_aux_table.release ();
4663 sccstack.release ();
4664 free_vn_table (valid_info);
4665 XDELETE (valid_info);
4666 free_vn_table (optimistic_info);
4667 XDELETE (optimistic_info);
4669 BITMAP_FREE (const_parms);
4672 /* Set *ID according to RESULT. */
4674 static void
4675 set_value_id_for_result (tree result, unsigned int *id)
4677 if (result && TREE_CODE (result) == SSA_NAME)
4678 *id = VN_INFO (result)->value_id;
4679 else if (result && is_gimple_min_invariant (result))
4680 *id = get_or_alloc_constant_value_id (result);
4681 else
4682 *id = get_next_value_id ();
4685 /* Set the value ids in the valid hash tables. */
4687 static void
4688 set_hashtable_value_ids (void)
4690 vn_nary_op_iterator_type hin;
4691 vn_phi_iterator_type hip;
4692 vn_reference_iterator_type hir;
4693 vn_nary_op_t vno;
4694 vn_reference_t vr;
4695 vn_phi_t vp;
4697 /* Now set the value ids of the things we had put in the hash
4698 table. */
4700 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4701 set_value_id_for_result (vno->result, &vno->value_id);
4703 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4704 set_value_id_for_result (vp->result, &vp->value_id);
4706 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4707 hir)
4708 set_value_id_for_result (vr->result, &vr->value_id);
4711 class sccvn_dom_walker : public dom_walker
4713 public:
4714 sccvn_dom_walker ()
4715 : dom_walker (CDI_DOMINATORS, true), cond_stack (0) {}
4717 virtual edge before_dom_children (basic_block);
4718 virtual void after_dom_children (basic_block);
4720 void record_cond (basic_block,
4721 enum tree_code code, tree lhs, tree rhs, bool value);
4722 void record_conds (basic_block,
4723 enum tree_code code, tree lhs, tree rhs, bool value);
4725 auto_vec<std::pair <basic_block, std::pair <vn_nary_op_t, vn_nary_op_t> > >
4726 cond_stack;
4729 /* Record a temporary condition for the BB and its dominated blocks. */
4731 void
4732 sccvn_dom_walker::record_cond (basic_block bb,
4733 enum tree_code code, tree lhs, tree rhs,
4734 bool value)
4736 tree ops[2] = { lhs, rhs };
4737 vn_nary_op_t old = NULL;
4738 if (vn_nary_op_lookup_pieces (2, code, boolean_type_node, ops, &old))
4739 current_info->nary->remove_elt_with_hash (old, old->hashcode);
4740 vn_nary_op_t cond
4741 = vn_nary_op_insert_pieces (2, code, boolean_type_node, ops,
4742 value
4743 ? boolean_true_node
4744 : boolean_false_node, 0);
4745 if (dump_file && (dump_flags & TDF_DETAILS))
4747 fprintf (dump_file, "Recording temporarily ");
4748 print_generic_expr (dump_file, ops[0], TDF_SLIM);
4749 fprintf (dump_file, " %s ", get_tree_code_name (code));
4750 print_generic_expr (dump_file, ops[1], TDF_SLIM);
4751 fprintf (dump_file, " == %s%s\n",
4752 value ? "true" : "false",
4753 old ? " (old entry saved)" : "");
4755 cond_stack.safe_push (std::make_pair (bb, std::make_pair (cond, old)));
4758 /* Record temporary conditions for the BB and its dominated blocks
4759 according to LHS CODE RHS == VALUE and its dominated conditions. */
4761 void
4762 sccvn_dom_walker::record_conds (basic_block bb,
4763 enum tree_code code, tree lhs, tree rhs,
4764 bool value)
4766 /* Record the original condition. */
4767 record_cond (bb, code, lhs, rhs, value);
4769 if (!value)
4770 return;
4772 /* Record dominated conditions if the condition is true. Note that
4773 the inversion is already recorded. */
4774 switch (code)
4776 case LT_EXPR:
4777 case GT_EXPR:
4778 record_cond (bb, code == LT_EXPR ? LE_EXPR : GE_EXPR, lhs, rhs, true);
4779 record_cond (bb, NE_EXPR, lhs, rhs, true);
4780 record_cond (bb, EQ_EXPR, lhs, rhs, false);
4781 break;
4783 case EQ_EXPR:
4784 record_cond (bb, LE_EXPR, lhs, rhs, true);
4785 record_cond (bb, GE_EXPR, lhs, rhs, true);
4786 record_cond (bb, LT_EXPR, lhs, rhs, false);
4787 record_cond (bb, GT_EXPR, lhs, rhs, false);
4788 break;
4790 default:
4791 break;
4795 /* Restore expressions and values derived from conditionals. */
4797 void
4798 sccvn_dom_walker::after_dom_children (basic_block bb)
4800 while (!cond_stack.is_empty ()
4801 && cond_stack.last ().first == bb)
4803 vn_nary_op_t cond = cond_stack.last ().second.first;
4804 vn_nary_op_t old = cond_stack.last ().second.second;
4805 current_info->nary->remove_elt_with_hash (cond, cond->hashcode);
4806 if (old)
4807 vn_nary_op_insert_into (old, current_info->nary, false);
4808 cond_stack.pop ();
4812 /* Value number all statements in BB. */
4814 edge
4815 sccvn_dom_walker::before_dom_children (basic_block bb)
4817 edge e;
4818 edge_iterator ei;
4820 if (dump_file && (dump_flags & TDF_DETAILS))
4821 fprintf (dump_file, "Visiting BB %d\n", bb->index);
4823 /* If we have a single predecessor record the equivalence from a
4824 possible condition on the predecessor edge. */
4825 edge pred_e = NULL;
4826 FOR_EACH_EDGE (e, ei, bb->preds)
4828 /* Ignore simple backedges from this to allow recording conditions
4829 in loop headers. */
4830 if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
4831 continue;
4832 if (! pred_e)
4833 pred_e = e;
4834 else
4836 pred_e = NULL;
4837 break;
4840 if (pred_e)
4842 /* Check if there are multiple executable successor edges in
4843 the source block. Otherwise there is no additional info
4844 to be recorded. */
4845 edge e2;
4846 FOR_EACH_EDGE (e2, ei, pred_e->src->succs)
4847 if (e2 != pred_e
4848 && e2->flags & EDGE_EXECUTABLE)
4849 break;
4850 if (e2 && (e2->flags & EDGE_EXECUTABLE))
4852 gimple *stmt = last_stmt (pred_e->src);
4853 if (stmt
4854 && gimple_code (stmt) == GIMPLE_COND)
4856 enum tree_code code = gimple_cond_code (stmt);
4857 tree lhs = gimple_cond_lhs (stmt);
4858 tree rhs = gimple_cond_rhs (stmt);
4859 record_conds (bb, code, lhs, rhs,
4860 (pred_e->flags & EDGE_TRUE_VALUE) != 0);
4861 code = invert_tree_comparison (code, HONOR_NANS (lhs));
4862 if (code != ERROR_MARK)
4863 record_conds (bb, code, lhs, rhs,
4864 (pred_e->flags & EDGE_TRUE_VALUE) == 0);
4869 /* Value-number all defs in the basic-block. */
4870 for (gphi_iterator gsi = gsi_start_phis (bb);
4871 !gsi_end_p (gsi); gsi_next (&gsi))
4873 gphi *phi = gsi.phi ();
4874 tree res = PHI_RESULT (phi);
4875 if (!VN_INFO (res)->visited)
4876 DFS (res);
4878 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
4879 !gsi_end_p (gsi); gsi_next (&gsi))
4881 ssa_op_iter i;
4882 tree op;
4883 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
4884 if (!VN_INFO (op)->visited)
4885 DFS (op);
4888 /* Finally look at the last stmt. */
4889 gimple *stmt = last_stmt (bb);
4890 if (!stmt)
4891 return NULL;
4893 enum gimple_code code = gimple_code (stmt);
4894 if (code != GIMPLE_COND
4895 && code != GIMPLE_SWITCH
4896 && code != GIMPLE_GOTO)
4897 return NULL;
4899 if (dump_file && (dump_flags & TDF_DETAILS))
4901 fprintf (dump_file, "Visiting control stmt ending BB %d: ", bb->index);
4902 print_gimple_stmt (dump_file, stmt, 0);
4905 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
4906 if value-numbering can prove they are not reachable. Handling
4907 computed gotos is also possible. */
4908 tree val;
4909 switch (code)
4911 case GIMPLE_COND:
4913 tree lhs = vn_valueize (gimple_cond_lhs (stmt));
4914 tree rhs = vn_valueize (gimple_cond_rhs (stmt));
4915 val = gimple_simplify (gimple_cond_code (stmt),
4916 boolean_type_node, lhs, rhs,
4917 NULL, vn_valueize);
4918 /* If that didn't simplify to a constant see if we have recorded
4919 temporary expressions from taken edges. */
4920 if (!val || TREE_CODE (val) != INTEGER_CST)
4922 tree ops[2];
4923 ops[0] = lhs;
4924 ops[1] = rhs;
4925 val = vn_nary_op_lookup_pieces (2, gimple_cond_code (stmt),
4926 boolean_type_node, ops, NULL);
4928 break;
4930 case GIMPLE_SWITCH:
4931 val = gimple_switch_index (as_a <gswitch *> (stmt));
4932 break;
4933 case GIMPLE_GOTO:
4934 val = gimple_goto_dest (stmt);
4935 break;
4936 default:
4937 gcc_unreachable ();
4939 if (!val)
4940 return NULL;
4942 edge taken = find_taken_edge (bb, vn_valueize (val));
4943 if (!taken)
4944 return NULL;
4946 if (dump_file && (dump_flags & TDF_DETAILS))
4947 fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
4948 "not executable\n", bb->index, bb->index, taken->dest->index);
4950 return taken;
4953 /* Do SCCVN. Returns true if it finished, false if we bailed out
4954 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
4955 how we use the alias oracle walking during the VN process. */
4957 void
4958 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
4960 size_t i;
4962 default_vn_walk_kind = default_vn_walk_kind_;
4964 init_scc_vn ();
4966 /* Collect pointers we know point to readonly memory. */
4967 const_parms = BITMAP_ALLOC (NULL);
4968 tree fnspec = lookup_attribute ("fn spec",
4969 TYPE_ATTRIBUTES (TREE_TYPE (cfun->decl)));
4970 if (fnspec)
4972 fnspec = TREE_VALUE (TREE_VALUE (fnspec));
4973 i = 1;
4974 for (tree arg = DECL_ARGUMENTS (cfun->decl);
4975 arg; arg = DECL_CHAIN (arg), ++i)
4977 if (i >= (unsigned) TREE_STRING_LENGTH (fnspec))
4978 break;
4979 if (TREE_STRING_POINTER (fnspec)[i] == 'R'
4980 || TREE_STRING_POINTER (fnspec)[i] == 'r')
4982 tree name = ssa_default_def (cfun, arg);
4983 if (name)
4984 bitmap_set_bit (const_parms, SSA_NAME_VERSION (name));
4989 /* Walk all blocks in dominator order, value-numbering stmts
4990 SSA defs and decide whether outgoing edges are not executable. */
4991 sccvn_dom_walker walker;
4992 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
4994 /* Initialize the value ids and prune out remaining VN_TOPs
4995 from dead code. */
4996 tree name;
4998 FOR_EACH_SSA_NAME (i, name, cfun)
5000 vn_ssa_aux_t info = VN_INFO (name);
5001 if (!info->visited)
5002 info->valnum = name;
5003 if (info->valnum == name
5004 || info->valnum == VN_TOP)
5005 info->value_id = get_next_value_id ();
5006 else if (is_gimple_min_invariant (info->valnum))
5007 info->value_id = get_or_alloc_constant_value_id (info->valnum);
5010 /* Propagate. */
5011 FOR_EACH_SSA_NAME (i, name, cfun)
5013 vn_ssa_aux_t info = VN_INFO (name);
5014 if (TREE_CODE (info->valnum) == SSA_NAME
5015 && info->valnum != name
5016 && info->value_id != VN_INFO (info->valnum)->value_id)
5017 info->value_id = VN_INFO (info->valnum)->value_id;
5020 set_hashtable_value_ids ();
5022 if (dump_file && (dump_flags & TDF_DETAILS))
5024 fprintf (dump_file, "Value numbers:\n");
5025 FOR_EACH_SSA_NAME (i, name, cfun)
5027 if (VN_INFO (name)->visited
5028 && SSA_VAL (name) != name)
5030 print_generic_expr (dump_file, name);
5031 fprintf (dump_file, " = ");
5032 print_generic_expr (dump_file, SSA_VAL (name));
5033 fprintf (dump_file, "\n");
5039 /* Return the maximum value id we have ever seen. */
5041 unsigned int
5042 get_max_value_id (void)
5044 return next_value_id;
5047 /* Return the next unique value id. */
5049 unsigned int
5050 get_next_value_id (void)
5052 return next_value_id++;
5056 /* Compare two expressions E1 and E2 and return true if they are equal. */
5058 bool
5059 expressions_equal_p (tree e1, tree e2)
5061 /* The obvious case. */
5062 if (e1 == e2)
5063 return true;
5065 /* If either one is VN_TOP consider them equal. */
5066 if (e1 == VN_TOP || e2 == VN_TOP)
5067 return true;
5069 /* If only one of them is null, they cannot be equal. */
5070 if (!e1 || !e2)
5071 return false;
5073 /* Now perform the actual comparison. */
5074 if (TREE_CODE (e1) == TREE_CODE (e2)
5075 && operand_equal_p (e1, e2, OEP_PURE_SAME))
5076 return true;
5078 return false;
5082 /* Return true if the nary operation NARY may trap. This is a copy
5083 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
5085 bool
5086 vn_nary_may_trap (vn_nary_op_t nary)
5088 tree type;
5089 tree rhs2 = NULL_TREE;
5090 bool honor_nans = false;
5091 bool honor_snans = false;
5092 bool fp_operation = false;
5093 bool honor_trapv = false;
5094 bool handled, ret;
5095 unsigned i;
5097 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
5098 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
5099 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
5101 type = nary->type;
5102 fp_operation = FLOAT_TYPE_P (type);
5103 if (fp_operation)
5105 honor_nans = flag_trapping_math && !flag_finite_math_only;
5106 honor_snans = flag_signaling_nans != 0;
5108 else if (INTEGRAL_TYPE_P (type)
5109 && TYPE_OVERFLOW_TRAPS (type))
5110 honor_trapv = true;
5112 if (nary->length >= 2)
5113 rhs2 = nary->op[1];
5114 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
5115 honor_trapv,
5116 honor_nans, honor_snans, rhs2,
5117 &handled);
5118 if (handled
5119 && ret)
5120 return true;
5122 for (i = 0; i < nary->length; ++i)
5123 if (tree_could_trap_p (nary->op[i]))
5124 return true;
5126 return false;