2017-06-14 Richard Biener <rguenther@suse.de>
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blob419da47b5b788c9d1a98e0a9c254fe693fa4fe4c
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2017 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "insn-config.h"
32 #include "memmodel.h"
33 #include "emit-rtl.h"
34 #include "cgraph.h"
35 #include "gimple-pretty-print.h"
36 #include "alias.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "cfganal.h"
40 #include "tree-inline.h"
41 #include "internal-fn.h"
42 #include "gimple-fold.h"
43 #include "tree-eh.h"
44 #include "gimplify.h"
45 #include "flags.h"
46 #include "dojump.h"
47 #include "explow.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "stmt.h"
51 #include "expr.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "dumpfile.h"
55 #include "cfgloop.h"
56 #include "params.h"
57 #include "tree-ssa-propagate.h"
58 #include "tree-ssa-sccvn.h"
59 #include "tree-cfg.h"
60 #include "domwalk.h"
61 #include "gimple-iterator.h"
62 #include "gimple-match.h"
64 /* This algorithm is based on the SCC algorithm presented by Keith
65 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
66 (http://citeseer.ist.psu.edu/41805.html). In
67 straight line code, it is equivalent to a regular hash based value
68 numbering that is performed in reverse postorder.
70 For code with cycles, there are two alternatives, both of which
71 require keeping the hashtables separate from the actual list of
72 value numbers for SSA names.
74 1. Iterate value numbering in an RPO walk of the blocks, removing
75 all the entries from the hashtable after each iteration (but
76 keeping the SSA name->value number mapping between iterations).
77 Iterate until it does not change.
79 2. Perform value numbering as part of an SCC walk on the SSA graph,
80 iterating only the cycles in the SSA graph until they do not change
81 (using a separate, optimistic hashtable for value numbering the SCC
82 operands).
84 The second is not just faster in practice (because most SSA graph
85 cycles do not involve all the variables in the graph), it also has
86 some nice properties.
88 One of these nice properties is that when we pop an SCC off the
89 stack, we are guaranteed to have processed all the operands coming from
90 *outside of that SCC*, so we do not need to do anything special to
91 ensure they have value numbers.
93 Another nice property is that the SCC walk is done as part of a DFS
94 of the SSA graph, which makes it easy to perform combining and
95 simplifying operations at the same time.
97 The code below is deliberately written in a way that makes it easy
98 to separate the SCC walk from the other work it does.
100 In order to propagate constants through the code, we track which
101 expressions contain constants, and use those while folding. In
102 theory, we could also track expressions whose value numbers are
103 replaced, in case we end up folding based on expression
104 identities.
106 In order to value number memory, we assign value numbers to vuses.
107 This enables us to note that, for example, stores to the same
108 address of the same value from the same starting memory states are
109 equivalent.
110 TODO:
112 1. We can iterate only the changing portions of the SCC's, but
113 I have not seen an SCC big enough for this to be a win.
114 2. If you differentiate between phi nodes for loops and phi nodes
115 for if-then-else, you can properly consider phi nodes in different
116 blocks for equivalence.
117 3. We could value number vuses in more cases, particularly, whole
118 structure copies.
122 static tree *last_vuse_ptr;
123 static vn_lookup_kind vn_walk_kind;
124 static vn_lookup_kind default_vn_walk_kind;
125 bitmap const_parms;
127 /* vn_nary_op hashtable helpers. */
129 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
131 typedef vn_nary_op_s *compare_type;
132 static inline hashval_t hash (const vn_nary_op_s *);
133 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
136 /* Return the computed hashcode for nary operation P1. */
138 inline hashval_t
139 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
141 return vno1->hashcode;
144 /* Compare nary operations P1 and P2 and return true if they are
145 equivalent. */
147 inline bool
148 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
150 return vn_nary_op_eq (vno1, vno2);
153 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
154 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
157 /* vn_phi hashtable helpers. */
159 static int
160 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
162 struct vn_phi_hasher : pointer_hash <vn_phi_s>
164 static inline hashval_t hash (const vn_phi_s *);
165 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
166 static inline void remove (vn_phi_s *);
169 /* Return the computed hashcode for phi operation P1. */
171 inline hashval_t
172 vn_phi_hasher::hash (const vn_phi_s *vp1)
174 return vp1->hashcode;
177 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
179 inline bool
180 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
182 return vn_phi_eq (vp1, vp2);
185 /* Free a phi operation structure VP. */
187 inline void
188 vn_phi_hasher::remove (vn_phi_s *phi)
190 phi->phiargs.release ();
193 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
194 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
197 /* Compare two reference operands P1 and P2 for equality. Return true if
198 they are equal, and false otherwise. */
200 static int
201 vn_reference_op_eq (const void *p1, const void *p2)
203 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
204 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
206 return (vro1->opcode == vro2->opcode
207 /* We do not care for differences in type qualification. */
208 && (vro1->type == vro2->type
209 || (vro1->type && vro2->type
210 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
211 TYPE_MAIN_VARIANT (vro2->type))))
212 && expressions_equal_p (vro1->op0, vro2->op0)
213 && expressions_equal_p (vro1->op1, vro2->op1)
214 && expressions_equal_p (vro1->op2, vro2->op2));
217 /* Free a reference operation structure VP. */
219 static inline void
220 free_reference (vn_reference_s *vr)
222 vr->operands.release ();
226 /* vn_reference hashtable helpers. */
228 struct vn_reference_hasher : pointer_hash <vn_reference_s>
230 static inline hashval_t hash (const vn_reference_s *);
231 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
232 static inline void remove (vn_reference_s *);
235 /* Return the hashcode for a given reference operation P1. */
237 inline hashval_t
238 vn_reference_hasher::hash (const vn_reference_s *vr1)
240 return vr1->hashcode;
243 inline bool
244 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
246 return vn_reference_eq (v, c);
249 inline void
250 vn_reference_hasher::remove (vn_reference_s *v)
252 free_reference (v);
255 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
256 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
259 /* The set of hashtables and alloc_pool's for their items. */
261 typedef struct vn_tables_s
263 vn_nary_op_table_type *nary;
264 vn_phi_table_type *phis;
265 vn_reference_table_type *references;
266 struct obstack nary_obstack;
267 object_allocator<vn_phi_s> *phis_pool;
268 object_allocator<vn_reference_s> *references_pool;
269 } *vn_tables_t;
272 /* vn_constant hashtable helpers. */
274 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
276 static inline hashval_t hash (const vn_constant_s *);
277 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
280 /* Hash table hash function for vn_constant_t. */
282 inline hashval_t
283 vn_constant_hasher::hash (const vn_constant_s *vc1)
285 return vc1->hashcode;
288 /* Hash table equality function for vn_constant_t. */
290 inline bool
291 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
293 if (vc1->hashcode != vc2->hashcode)
294 return false;
296 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
299 static hash_table<vn_constant_hasher> *constant_to_value_id;
300 static bitmap constant_value_ids;
303 /* Valid hashtables storing information we have proven to be
304 correct. */
306 static vn_tables_t valid_info;
308 /* Optimistic hashtables storing information we are making assumptions about
309 during iterations. */
311 static vn_tables_t optimistic_info;
313 /* Pointer to the set of hashtables that is currently being used.
314 Should always point to either the optimistic_info, or the
315 valid_info. */
317 static vn_tables_t current_info;
320 /* Reverse post order index for each basic block. */
322 static int *rpo_numbers;
324 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
326 /* Return the SSA value of the VUSE x, supporting released VDEFs
327 during elimination which will value-number the VDEF to the
328 associated VUSE (but not substitute in the whole lattice). */
330 static inline tree
331 vuse_ssa_val (tree x)
333 if (!x)
334 return NULL_TREE;
338 x = SSA_VAL (x);
340 while (SSA_NAME_IN_FREE_LIST (x));
342 return x;
345 /* This represents the top of the VN lattice, which is the universal
346 value. */
348 tree VN_TOP;
350 /* Unique counter for our value ids. */
352 static unsigned int next_value_id;
354 /* Next DFS number and the stack for strongly connected component
355 detection. */
357 static unsigned int next_dfs_num;
358 static vec<tree> sccstack;
362 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
363 are allocated on an obstack for locality reasons, and to free them
364 without looping over the vec. */
366 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
367 static struct obstack vn_ssa_aux_obstack;
369 /* Return whether there is value numbering information for a given SSA name. */
371 bool
372 has_VN_INFO (tree name)
374 if (SSA_NAME_VERSION (name) < vn_ssa_aux_table.length ())
375 return vn_ssa_aux_table[SSA_NAME_VERSION (name)] != NULL;
376 return false;
379 /* Return the value numbering information for a given SSA name. */
381 vn_ssa_aux_t
382 VN_INFO (tree name)
384 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
385 gcc_checking_assert (res);
386 return res;
389 /* Set the value numbering info for a given SSA name to a given
390 value. */
392 static inline void
393 VN_INFO_SET (tree name, vn_ssa_aux_t value)
395 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
398 /* Initialize the value numbering info for a given SSA name.
399 This should be called just once for every SSA name. */
401 vn_ssa_aux_t
402 VN_INFO_GET (tree name)
404 vn_ssa_aux_t newinfo;
406 gcc_assert (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ()
407 || vn_ssa_aux_table[SSA_NAME_VERSION (name)] == NULL);
408 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
409 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
410 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
411 vn_ssa_aux_table.safe_grow_cleared (SSA_NAME_VERSION (name) + 1);
412 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
413 return newinfo;
417 /* Return the vn_kind the expression computed by the stmt should be
418 associated with. */
420 enum vn_kind
421 vn_get_stmt_kind (gimple *stmt)
423 switch (gimple_code (stmt))
425 case GIMPLE_CALL:
426 return VN_REFERENCE;
427 case GIMPLE_PHI:
428 return VN_PHI;
429 case GIMPLE_ASSIGN:
431 enum tree_code code = gimple_assign_rhs_code (stmt);
432 tree rhs1 = gimple_assign_rhs1 (stmt);
433 switch (get_gimple_rhs_class (code))
435 case GIMPLE_UNARY_RHS:
436 case GIMPLE_BINARY_RHS:
437 case GIMPLE_TERNARY_RHS:
438 return VN_NARY;
439 case GIMPLE_SINGLE_RHS:
440 switch (TREE_CODE_CLASS (code))
442 case tcc_reference:
443 /* VOP-less references can go through unary case. */
444 if ((code == REALPART_EXPR
445 || code == IMAGPART_EXPR
446 || code == VIEW_CONVERT_EXPR
447 || code == BIT_FIELD_REF)
448 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
449 return VN_NARY;
451 /* Fallthrough. */
452 case tcc_declaration:
453 return VN_REFERENCE;
455 case tcc_constant:
456 return VN_CONSTANT;
458 default:
459 if (code == ADDR_EXPR)
460 return (is_gimple_min_invariant (rhs1)
461 ? VN_CONSTANT : VN_REFERENCE);
462 else if (code == CONSTRUCTOR)
463 return VN_NARY;
464 return VN_NONE;
466 default:
467 return VN_NONE;
470 default:
471 return VN_NONE;
475 /* Lookup a value id for CONSTANT and return it. If it does not
476 exist returns 0. */
478 unsigned int
479 get_constant_value_id (tree constant)
481 vn_constant_s **slot;
482 struct vn_constant_s vc;
484 vc.hashcode = vn_hash_constant_with_type (constant);
485 vc.constant = constant;
486 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
487 if (slot)
488 return (*slot)->value_id;
489 return 0;
492 /* Lookup a value id for CONSTANT, and if it does not exist, create a
493 new one and return it. If it does exist, return it. */
495 unsigned int
496 get_or_alloc_constant_value_id (tree constant)
498 vn_constant_s **slot;
499 struct vn_constant_s vc;
500 vn_constant_t vcp;
502 vc.hashcode = vn_hash_constant_with_type (constant);
503 vc.constant = constant;
504 slot = constant_to_value_id->find_slot (&vc, INSERT);
505 if (*slot)
506 return (*slot)->value_id;
508 vcp = XNEW (struct vn_constant_s);
509 vcp->hashcode = vc.hashcode;
510 vcp->constant = constant;
511 vcp->value_id = get_next_value_id ();
512 *slot = vcp;
513 bitmap_set_bit (constant_value_ids, vcp->value_id);
514 return vcp->value_id;
517 /* Return true if V is a value id for a constant. */
519 bool
520 value_id_constant_p (unsigned int v)
522 return bitmap_bit_p (constant_value_ids, v);
525 /* Compute the hash for a reference operand VRO1. */
527 static void
528 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
530 hstate.add_int (vro1->opcode);
531 if (vro1->op0)
532 inchash::add_expr (vro1->op0, hstate);
533 if (vro1->op1)
534 inchash::add_expr (vro1->op1, hstate);
535 if (vro1->op2)
536 inchash::add_expr (vro1->op2, hstate);
539 /* Compute a hash for the reference operation VR1 and return it. */
541 static hashval_t
542 vn_reference_compute_hash (const vn_reference_t vr1)
544 inchash::hash hstate;
545 hashval_t result;
546 int i;
547 vn_reference_op_t vro;
548 HOST_WIDE_INT off = -1;
549 bool deref = false;
551 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
553 if (vro->opcode == MEM_REF)
554 deref = true;
555 else if (vro->opcode != ADDR_EXPR)
556 deref = false;
557 if (vro->off != -1)
559 if (off == -1)
560 off = 0;
561 off += vro->off;
563 else
565 if (off != -1
566 && off != 0)
567 hstate.add_int (off);
568 off = -1;
569 if (deref
570 && vro->opcode == ADDR_EXPR)
572 if (vro->op0)
574 tree op = TREE_OPERAND (vro->op0, 0);
575 hstate.add_int (TREE_CODE (op));
576 inchash::add_expr (op, hstate);
579 else
580 vn_reference_op_compute_hash (vro, hstate);
583 result = hstate.end ();
584 /* ??? We would ICE later if we hash instead of adding that in. */
585 if (vr1->vuse)
586 result += SSA_NAME_VERSION (vr1->vuse);
588 return result;
591 /* Return true if reference operations VR1 and VR2 are equivalent. This
592 means they have the same set of operands and vuses. */
594 bool
595 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
597 unsigned i, j;
599 /* Early out if this is not a hash collision. */
600 if (vr1->hashcode != vr2->hashcode)
601 return false;
603 /* The VOP needs to be the same. */
604 if (vr1->vuse != vr2->vuse)
605 return false;
607 /* If the operands are the same we are done. */
608 if (vr1->operands == vr2->operands)
609 return true;
611 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
612 return false;
614 if (INTEGRAL_TYPE_P (vr1->type)
615 && INTEGRAL_TYPE_P (vr2->type))
617 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
618 return false;
620 else if (INTEGRAL_TYPE_P (vr1->type)
621 && (TYPE_PRECISION (vr1->type)
622 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
623 return false;
624 else if (INTEGRAL_TYPE_P (vr2->type)
625 && (TYPE_PRECISION (vr2->type)
626 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
627 return false;
629 i = 0;
630 j = 0;
633 HOST_WIDE_INT off1 = 0, off2 = 0;
634 vn_reference_op_t vro1, vro2;
635 vn_reference_op_s tem1, tem2;
636 bool deref1 = false, deref2 = false;
637 for (; vr1->operands.iterate (i, &vro1); i++)
639 if (vro1->opcode == MEM_REF)
640 deref1 = true;
641 /* Do not look through a storage order barrier. */
642 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
643 return false;
644 if (vro1->off == -1)
645 break;
646 off1 += vro1->off;
648 for (; vr2->operands.iterate (j, &vro2); j++)
650 if (vro2->opcode == MEM_REF)
651 deref2 = true;
652 /* Do not look through a storage order barrier. */
653 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
654 return false;
655 if (vro2->off == -1)
656 break;
657 off2 += vro2->off;
659 if (off1 != off2)
660 return false;
661 if (deref1 && vro1->opcode == ADDR_EXPR)
663 memset (&tem1, 0, sizeof (tem1));
664 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
665 tem1.type = TREE_TYPE (tem1.op0);
666 tem1.opcode = TREE_CODE (tem1.op0);
667 vro1 = &tem1;
668 deref1 = false;
670 if (deref2 && vro2->opcode == ADDR_EXPR)
672 memset (&tem2, 0, sizeof (tem2));
673 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
674 tem2.type = TREE_TYPE (tem2.op0);
675 tem2.opcode = TREE_CODE (tem2.op0);
676 vro2 = &tem2;
677 deref2 = false;
679 if (deref1 != deref2)
680 return false;
681 if (!vn_reference_op_eq (vro1, vro2))
682 return false;
683 ++j;
684 ++i;
686 while (vr1->operands.length () != i
687 || vr2->operands.length () != j);
689 return true;
692 /* Copy the operations present in load/store REF into RESULT, a vector of
693 vn_reference_op_s's. */
695 static void
696 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
698 if (TREE_CODE (ref) == TARGET_MEM_REF)
700 vn_reference_op_s temp;
702 result->reserve (3);
704 memset (&temp, 0, sizeof (temp));
705 temp.type = TREE_TYPE (ref);
706 temp.opcode = TREE_CODE (ref);
707 temp.op0 = TMR_INDEX (ref);
708 temp.op1 = TMR_STEP (ref);
709 temp.op2 = TMR_OFFSET (ref);
710 temp.off = -1;
711 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
712 temp.base = MR_DEPENDENCE_BASE (ref);
713 result->quick_push (temp);
715 memset (&temp, 0, sizeof (temp));
716 temp.type = NULL_TREE;
717 temp.opcode = ERROR_MARK;
718 temp.op0 = TMR_INDEX2 (ref);
719 temp.off = -1;
720 result->quick_push (temp);
722 memset (&temp, 0, sizeof (temp));
723 temp.type = NULL_TREE;
724 temp.opcode = TREE_CODE (TMR_BASE (ref));
725 temp.op0 = TMR_BASE (ref);
726 temp.off = -1;
727 result->quick_push (temp);
728 return;
731 /* For non-calls, store the information that makes up the address. */
732 tree orig = ref;
733 while (ref)
735 vn_reference_op_s temp;
737 memset (&temp, 0, sizeof (temp));
738 temp.type = TREE_TYPE (ref);
739 temp.opcode = TREE_CODE (ref);
740 temp.off = -1;
742 switch (temp.opcode)
744 case MODIFY_EXPR:
745 temp.op0 = TREE_OPERAND (ref, 1);
746 break;
747 case WITH_SIZE_EXPR:
748 temp.op0 = TREE_OPERAND (ref, 1);
749 temp.off = 0;
750 break;
751 case MEM_REF:
752 /* The base address gets its own vn_reference_op_s structure. */
753 temp.op0 = TREE_OPERAND (ref, 1);
755 offset_int off = mem_ref_offset (ref);
756 if (wi::fits_shwi_p (off))
757 temp.off = off.to_shwi ();
759 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
760 temp.base = MR_DEPENDENCE_BASE (ref);
761 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
762 break;
763 case BIT_FIELD_REF:
764 /* Record bits, position and storage order. */
765 temp.op0 = TREE_OPERAND (ref, 1);
766 temp.op1 = TREE_OPERAND (ref, 2);
767 if (tree_fits_shwi_p (TREE_OPERAND (ref, 2)))
769 HOST_WIDE_INT off = tree_to_shwi (TREE_OPERAND (ref, 2));
770 if (off % BITS_PER_UNIT == 0)
771 temp.off = off / BITS_PER_UNIT;
773 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
774 break;
775 case COMPONENT_REF:
776 /* The field decl is enough to unambiguously specify the field,
777 a matching type is not necessary and a mismatching type
778 is always a spurious difference. */
779 temp.type = NULL_TREE;
780 temp.op0 = TREE_OPERAND (ref, 1);
781 temp.op1 = TREE_OPERAND (ref, 2);
783 tree this_offset = component_ref_field_offset (ref);
784 if (this_offset
785 && TREE_CODE (this_offset) == INTEGER_CST)
787 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
788 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
790 offset_int off
791 = (wi::to_offset (this_offset)
792 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
793 if (wi::fits_shwi_p (off)
794 /* Probibit value-numbering zero offset components
795 of addresses the same before the pass folding
796 __builtin_object_size had a chance to run
797 (checking cfun->after_inlining does the
798 trick here). */
799 && (TREE_CODE (orig) != ADDR_EXPR
800 || off != 0
801 || cfun->after_inlining))
802 temp.off = off.to_shwi ();
806 break;
807 case ARRAY_RANGE_REF:
808 case ARRAY_REF:
810 tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
811 /* Record index as operand. */
812 temp.op0 = TREE_OPERAND (ref, 1);
813 /* Always record lower bounds and element size. */
814 temp.op1 = array_ref_low_bound (ref);
815 /* But record element size in units of the type alignment. */
816 temp.op2 = TREE_OPERAND (ref, 3);
817 temp.align = eltype->type_common.align;
818 if (! temp.op2)
819 temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
820 size_int (TYPE_ALIGN_UNIT (eltype)));
821 if (TREE_CODE (temp.op0) == INTEGER_CST
822 && TREE_CODE (temp.op1) == INTEGER_CST
823 && TREE_CODE (temp.op2) == INTEGER_CST)
825 offset_int off = ((wi::to_offset (temp.op0)
826 - wi::to_offset (temp.op1))
827 * wi::to_offset (temp.op2)
828 * vn_ref_op_align_unit (&temp));
829 if (wi::fits_shwi_p (off))
830 temp.off = off.to_shwi();
833 break;
834 case VAR_DECL:
835 if (DECL_HARD_REGISTER (ref))
837 temp.op0 = ref;
838 break;
840 /* Fallthru. */
841 case PARM_DECL:
842 case CONST_DECL:
843 case RESULT_DECL:
844 /* Canonicalize decls to MEM[&decl] which is what we end up with
845 when valueizing MEM[ptr] with ptr = &decl. */
846 temp.opcode = MEM_REF;
847 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
848 temp.off = 0;
849 result->safe_push (temp);
850 temp.opcode = ADDR_EXPR;
851 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
852 temp.type = TREE_TYPE (temp.op0);
853 temp.off = -1;
854 break;
855 case STRING_CST:
856 case INTEGER_CST:
857 case COMPLEX_CST:
858 case VECTOR_CST:
859 case REAL_CST:
860 case FIXED_CST:
861 case CONSTRUCTOR:
862 case SSA_NAME:
863 temp.op0 = ref;
864 break;
865 case ADDR_EXPR:
866 if (is_gimple_min_invariant (ref))
868 temp.op0 = ref;
869 break;
871 break;
872 /* These are only interesting for their operands, their
873 existence, and their type. They will never be the last
874 ref in the chain of references (IE they require an
875 operand), so we don't have to put anything
876 for op* as it will be handled by the iteration */
877 case REALPART_EXPR:
878 temp.off = 0;
879 break;
880 case VIEW_CONVERT_EXPR:
881 temp.off = 0;
882 temp.reverse = storage_order_barrier_p (ref);
883 break;
884 case IMAGPART_EXPR:
885 /* This is only interesting for its constant offset. */
886 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
887 break;
888 default:
889 gcc_unreachable ();
891 result->safe_push (temp);
893 if (REFERENCE_CLASS_P (ref)
894 || TREE_CODE (ref) == MODIFY_EXPR
895 || TREE_CODE (ref) == WITH_SIZE_EXPR
896 || (TREE_CODE (ref) == ADDR_EXPR
897 && !is_gimple_min_invariant (ref)))
898 ref = TREE_OPERAND (ref, 0);
899 else
900 ref = NULL_TREE;
904 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
905 operands in *OPS, the reference alias set SET and the reference type TYPE.
906 Return true if something useful was produced. */
908 bool
909 ao_ref_init_from_vn_reference (ao_ref *ref,
910 alias_set_type set, tree type,
911 vec<vn_reference_op_s> ops)
913 vn_reference_op_t op;
914 unsigned i;
915 tree base = NULL_TREE;
916 tree *op0_p = &base;
917 offset_int offset = 0;
918 offset_int max_size;
919 offset_int size = -1;
920 tree size_tree = NULL_TREE;
921 alias_set_type base_alias_set = -1;
923 /* First get the final access size from just the outermost expression. */
924 op = &ops[0];
925 if (op->opcode == COMPONENT_REF)
926 size_tree = DECL_SIZE (op->op0);
927 else if (op->opcode == BIT_FIELD_REF)
928 size_tree = op->op0;
929 else
931 machine_mode mode = TYPE_MODE (type);
932 if (mode == BLKmode)
933 size_tree = TYPE_SIZE (type);
934 else
935 size = int (GET_MODE_BITSIZE (mode));
937 if (size_tree != NULL_TREE
938 && TREE_CODE (size_tree) == INTEGER_CST)
939 size = wi::to_offset (size_tree);
941 /* Initially, maxsize is the same as the accessed element size.
942 In the following it will only grow (or become -1). */
943 max_size = size;
945 /* Compute cumulative bit-offset for nested component-refs and array-refs,
946 and find the ultimate containing object. */
947 FOR_EACH_VEC_ELT (ops, i, op)
949 switch (op->opcode)
951 /* These may be in the reference ops, but we cannot do anything
952 sensible with them here. */
953 case ADDR_EXPR:
954 /* Apart from ADDR_EXPR arguments to MEM_REF. */
955 if (base != NULL_TREE
956 && TREE_CODE (base) == MEM_REF
957 && op->op0
958 && DECL_P (TREE_OPERAND (op->op0, 0)))
960 vn_reference_op_t pop = &ops[i-1];
961 base = TREE_OPERAND (op->op0, 0);
962 if (pop->off == -1)
964 max_size = -1;
965 offset = 0;
967 else
968 offset += pop->off * BITS_PER_UNIT;
969 op0_p = NULL;
970 break;
972 /* Fallthru. */
973 case CALL_EXPR:
974 return false;
976 /* Record the base objects. */
977 case MEM_REF:
978 base_alias_set = get_deref_alias_set (op->op0);
979 *op0_p = build2 (MEM_REF, op->type,
980 NULL_TREE, op->op0);
981 MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
982 MR_DEPENDENCE_BASE (*op0_p) = op->base;
983 op0_p = &TREE_OPERAND (*op0_p, 0);
984 break;
986 case VAR_DECL:
987 case PARM_DECL:
988 case RESULT_DECL:
989 case SSA_NAME:
990 *op0_p = op->op0;
991 op0_p = NULL;
992 break;
994 /* And now the usual component-reference style ops. */
995 case BIT_FIELD_REF:
996 offset += wi::to_offset (op->op1);
997 break;
999 case COMPONENT_REF:
1001 tree field = op->op0;
1002 /* We do not have a complete COMPONENT_REF tree here so we
1003 cannot use component_ref_field_offset. Do the interesting
1004 parts manually. */
1005 tree this_offset = DECL_FIELD_OFFSET (field);
1007 if (op->op1 || TREE_CODE (this_offset) != INTEGER_CST)
1008 max_size = -1;
1009 else
1011 offset_int woffset = (wi::to_offset (this_offset)
1012 << LOG2_BITS_PER_UNIT);
1013 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1014 offset += woffset;
1016 break;
1019 case ARRAY_RANGE_REF:
1020 case ARRAY_REF:
1021 /* We recorded the lower bound and the element size. */
1022 if (TREE_CODE (op->op0) != INTEGER_CST
1023 || TREE_CODE (op->op1) != INTEGER_CST
1024 || TREE_CODE (op->op2) != INTEGER_CST)
1025 max_size = -1;
1026 else
1028 offset_int woffset
1029 = wi::sext (wi::to_offset (op->op0) - wi::to_offset (op->op1),
1030 TYPE_PRECISION (TREE_TYPE (op->op0)));
1031 woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
1032 woffset <<= LOG2_BITS_PER_UNIT;
1033 offset += woffset;
1035 break;
1037 case REALPART_EXPR:
1038 break;
1040 case IMAGPART_EXPR:
1041 offset += size;
1042 break;
1044 case VIEW_CONVERT_EXPR:
1045 break;
1047 case STRING_CST:
1048 case INTEGER_CST:
1049 case COMPLEX_CST:
1050 case VECTOR_CST:
1051 case REAL_CST:
1052 case CONSTRUCTOR:
1053 case CONST_DECL:
1054 return false;
1056 default:
1057 return false;
1061 if (base == NULL_TREE)
1062 return false;
1064 ref->ref = NULL_TREE;
1065 ref->base = base;
1066 ref->ref_alias_set = set;
1067 if (base_alias_set != -1)
1068 ref->base_alias_set = base_alias_set;
1069 else
1070 ref->base_alias_set = get_alias_set (base);
1071 /* We discount volatiles from value-numbering elsewhere. */
1072 ref->volatile_p = false;
1074 if (!wi::fits_shwi_p (size) || wi::neg_p (size))
1076 ref->offset = 0;
1077 ref->size = -1;
1078 ref->max_size = -1;
1079 return true;
1082 ref->size = size.to_shwi ();
1084 if (!wi::fits_shwi_p (offset))
1086 ref->offset = 0;
1087 ref->max_size = -1;
1088 return true;
1091 ref->offset = offset.to_shwi ();
1093 if (!wi::fits_shwi_p (max_size) || wi::neg_p (max_size))
1094 ref->max_size = -1;
1095 else
1096 ref->max_size = max_size.to_shwi ();
1098 return true;
1101 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1102 vn_reference_op_s's. */
1104 static void
1105 copy_reference_ops_from_call (gcall *call,
1106 vec<vn_reference_op_s> *result)
1108 vn_reference_op_s temp;
1109 unsigned i;
1110 tree lhs = gimple_call_lhs (call);
1111 int lr;
1113 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1114 different. By adding the lhs here in the vector, we ensure that the
1115 hashcode is different, guaranteeing a different value number. */
1116 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1118 memset (&temp, 0, sizeof (temp));
1119 temp.opcode = MODIFY_EXPR;
1120 temp.type = TREE_TYPE (lhs);
1121 temp.op0 = lhs;
1122 temp.off = -1;
1123 result->safe_push (temp);
1126 /* Copy the type, opcode, function, static chain and EH region, if any. */
1127 memset (&temp, 0, sizeof (temp));
1128 temp.type = gimple_call_return_type (call);
1129 temp.opcode = CALL_EXPR;
1130 temp.op0 = gimple_call_fn (call);
1131 temp.op1 = gimple_call_chain (call);
1132 if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1133 temp.op2 = size_int (lr);
1134 temp.off = -1;
1135 if (gimple_call_with_bounds_p (call))
1136 temp.with_bounds = 1;
1137 result->safe_push (temp);
1139 /* Copy the call arguments. As they can be references as well,
1140 just chain them together. */
1141 for (i = 0; i < gimple_call_num_args (call); ++i)
1143 tree callarg = gimple_call_arg (call, i);
1144 copy_reference_ops_from_ref (callarg, result);
1148 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1149 *I_P to point to the last element of the replacement. */
1150 static bool
1151 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1152 unsigned int *i_p)
1154 unsigned int i = *i_p;
1155 vn_reference_op_t op = &(*ops)[i];
1156 vn_reference_op_t mem_op = &(*ops)[i - 1];
1157 tree addr_base;
1158 HOST_WIDE_INT addr_offset = 0;
1160 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1161 from .foo.bar to the preceding MEM_REF offset and replace the
1162 address with &OBJ. */
1163 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1164 &addr_offset);
1165 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1166 if (addr_base != TREE_OPERAND (op->op0, 0))
1168 offset_int off = offset_int::from (mem_op->op0, SIGNED);
1169 off += addr_offset;
1170 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1171 op->op0 = build_fold_addr_expr (addr_base);
1172 if (tree_fits_shwi_p (mem_op->op0))
1173 mem_op->off = tree_to_shwi (mem_op->op0);
1174 else
1175 mem_op->off = -1;
1176 return true;
1178 return false;
1181 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1182 *I_P to point to the last element of the replacement. */
1183 static bool
1184 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1185 unsigned int *i_p)
1187 unsigned int i = *i_p;
1188 vn_reference_op_t op = &(*ops)[i];
1189 vn_reference_op_t mem_op = &(*ops)[i - 1];
1190 gimple *def_stmt;
1191 enum tree_code code;
1192 offset_int off;
1194 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1195 if (!is_gimple_assign (def_stmt))
1196 return false;
1198 code = gimple_assign_rhs_code (def_stmt);
1199 if (code != ADDR_EXPR
1200 && code != POINTER_PLUS_EXPR)
1201 return false;
1203 off = offset_int::from (mem_op->op0, SIGNED);
1205 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1206 from .foo.bar to the preceding MEM_REF offset and replace the
1207 address with &OBJ. */
1208 if (code == ADDR_EXPR)
1210 tree addr, addr_base;
1211 HOST_WIDE_INT addr_offset;
1213 addr = gimple_assign_rhs1 (def_stmt);
1214 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1215 &addr_offset);
1216 /* If that didn't work because the address isn't invariant propagate
1217 the reference tree from the address operation in case the current
1218 dereference isn't offsetted. */
1219 if (!addr_base
1220 && *i_p == ops->length () - 1
1221 && off == 0
1222 /* This makes us disable this transform for PRE where the
1223 reference ops might be also used for code insertion which
1224 is invalid. */
1225 && default_vn_walk_kind == VN_WALKREWRITE)
1227 auto_vec<vn_reference_op_s, 32> tem;
1228 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1229 /* Make sure to preserve TBAA info. The only objects not
1230 wrapped in MEM_REFs that can have their address taken are
1231 STRING_CSTs. */
1232 if (tem.length () >= 2
1233 && tem[tem.length () - 2].opcode == MEM_REF)
1235 vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1236 new_mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0),
1237 new_mem_op->op0);
1239 else
1240 gcc_assert (tem.last ().opcode == STRING_CST);
1241 ops->pop ();
1242 ops->pop ();
1243 ops->safe_splice (tem);
1244 --*i_p;
1245 return true;
1247 if (!addr_base
1248 || TREE_CODE (addr_base) != MEM_REF)
1249 return false;
1251 off += addr_offset;
1252 off += mem_ref_offset (addr_base);
1253 op->op0 = TREE_OPERAND (addr_base, 0);
1255 else
1257 tree ptr, ptroff;
1258 ptr = gimple_assign_rhs1 (def_stmt);
1259 ptroff = gimple_assign_rhs2 (def_stmt);
1260 if (TREE_CODE (ptr) != SSA_NAME
1261 || TREE_CODE (ptroff) != INTEGER_CST)
1262 return false;
1264 off += wi::to_offset (ptroff);
1265 op->op0 = ptr;
1268 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1269 if (tree_fits_shwi_p (mem_op->op0))
1270 mem_op->off = tree_to_shwi (mem_op->op0);
1271 else
1272 mem_op->off = -1;
1273 if (TREE_CODE (op->op0) == SSA_NAME)
1274 op->op0 = SSA_VAL (op->op0);
1275 if (TREE_CODE (op->op0) != SSA_NAME)
1276 op->opcode = TREE_CODE (op->op0);
1278 /* And recurse. */
1279 if (TREE_CODE (op->op0) == SSA_NAME)
1280 vn_reference_maybe_forwprop_address (ops, i_p);
1281 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1282 vn_reference_fold_indirect (ops, i_p);
1283 return true;
1286 /* Optimize the reference REF to a constant if possible or return
1287 NULL_TREE if not. */
1289 tree
1290 fully_constant_vn_reference_p (vn_reference_t ref)
1292 vec<vn_reference_op_s> operands = ref->operands;
1293 vn_reference_op_t op;
1295 /* Try to simplify the translated expression if it is
1296 a call to a builtin function with at most two arguments. */
1297 op = &operands[0];
1298 if (op->opcode == CALL_EXPR
1299 && TREE_CODE (op->op0) == ADDR_EXPR
1300 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1301 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1302 && operands.length () >= 2
1303 && operands.length () <= 3)
1305 vn_reference_op_t arg0, arg1 = NULL;
1306 bool anyconst = false;
1307 arg0 = &operands[1];
1308 if (operands.length () > 2)
1309 arg1 = &operands[2];
1310 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1311 || (arg0->opcode == ADDR_EXPR
1312 && is_gimple_min_invariant (arg0->op0)))
1313 anyconst = true;
1314 if (arg1
1315 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1316 || (arg1->opcode == ADDR_EXPR
1317 && is_gimple_min_invariant (arg1->op0))))
1318 anyconst = true;
1319 if (anyconst)
1321 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1322 arg1 ? 2 : 1,
1323 arg0->op0,
1324 arg1 ? arg1->op0 : NULL);
1325 if (folded
1326 && TREE_CODE (folded) == NOP_EXPR)
1327 folded = TREE_OPERAND (folded, 0);
1328 if (folded
1329 && is_gimple_min_invariant (folded))
1330 return folded;
1334 /* Simplify reads from constants or constant initializers. */
1335 else if (BITS_PER_UNIT == 8
1336 && is_gimple_reg_type (ref->type)
1337 && (!INTEGRAL_TYPE_P (ref->type)
1338 || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
1340 HOST_WIDE_INT off = 0;
1341 HOST_WIDE_INT size;
1342 if (INTEGRAL_TYPE_P (ref->type))
1343 size = TYPE_PRECISION (ref->type);
1344 else
1345 size = tree_to_shwi (TYPE_SIZE (ref->type));
1346 if (size % BITS_PER_UNIT != 0
1347 || size > MAX_BITSIZE_MODE_ANY_MODE)
1348 return NULL_TREE;
1349 size /= BITS_PER_UNIT;
1350 unsigned i;
1351 for (i = 0; i < operands.length (); ++i)
1353 if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1355 ++i;
1356 break;
1358 if (operands[i].off == -1)
1359 return NULL_TREE;
1360 off += operands[i].off;
1361 if (operands[i].opcode == MEM_REF)
1363 ++i;
1364 break;
1367 vn_reference_op_t base = &operands[--i];
1368 tree ctor = error_mark_node;
1369 tree decl = NULL_TREE;
1370 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1371 ctor = base->op0;
1372 else if (base->opcode == MEM_REF
1373 && base[1].opcode == ADDR_EXPR
1374 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1375 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL))
1377 decl = TREE_OPERAND (base[1].op0, 0);
1378 ctor = ctor_for_folding (decl);
1380 if (ctor == NULL_TREE)
1381 return build_zero_cst (ref->type);
1382 else if (ctor != error_mark_node)
1384 if (decl)
1386 tree res = fold_ctor_reference (ref->type, ctor,
1387 off * BITS_PER_UNIT,
1388 size * BITS_PER_UNIT, decl);
1389 if (res)
1391 STRIP_USELESS_TYPE_CONVERSION (res);
1392 if (is_gimple_min_invariant (res))
1393 return res;
1396 else
1398 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1399 int len = native_encode_expr (ctor, buf, size, off);
1400 if (len > 0)
1401 return native_interpret_expr (ref->type, buf, len);
1406 return NULL_TREE;
1409 /* Return true if OPS contain a storage order barrier. */
1411 static bool
1412 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1414 vn_reference_op_t op;
1415 unsigned i;
1417 FOR_EACH_VEC_ELT (ops, i, op)
1418 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1419 return true;
1421 return false;
1424 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1425 structures into their value numbers. This is done in-place, and
1426 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1427 whether any operands were valueized. */
1429 static vec<vn_reference_op_s>
1430 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1432 vn_reference_op_t vro;
1433 unsigned int i;
1435 *valueized_anything = false;
1437 FOR_EACH_VEC_ELT (orig, i, vro)
1439 if (vro->opcode == SSA_NAME
1440 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1442 tree tem = SSA_VAL (vro->op0);
1443 if (tem != vro->op0)
1445 *valueized_anything = true;
1446 vro->op0 = tem;
1448 /* If it transforms from an SSA_NAME to a constant, update
1449 the opcode. */
1450 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1451 vro->opcode = TREE_CODE (vro->op0);
1453 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1455 tree tem = SSA_VAL (vro->op1);
1456 if (tem != vro->op1)
1458 *valueized_anything = true;
1459 vro->op1 = tem;
1462 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1464 tree tem = SSA_VAL (vro->op2);
1465 if (tem != vro->op2)
1467 *valueized_anything = true;
1468 vro->op2 = tem;
1471 /* If it transforms from an SSA_NAME to an address, fold with
1472 a preceding indirect reference. */
1473 if (i > 0
1474 && vro->op0
1475 && TREE_CODE (vro->op0) == ADDR_EXPR
1476 && orig[i - 1].opcode == MEM_REF)
1478 if (vn_reference_fold_indirect (&orig, &i))
1479 *valueized_anything = true;
1481 else if (i > 0
1482 && vro->opcode == SSA_NAME
1483 && orig[i - 1].opcode == MEM_REF)
1485 if (vn_reference_maybe_forwprop_address (&orig, &i))
1486 *valueized_anything = true;
1488 /* If it transforms a non-constant ARRAY_REF into a constant
1489 one, adjust the constant offset. */
1490 else if (vro->opcode == ARRAY_REF
1491 && vro->off == -1
1492 && TREE_CODE (vro->op0) == INTEGER_CST
1493 && TREE_CODE (vro->op1) == INTEGER_CST
1494 && TREE_CODE (vro->op2) == INTEGER_CST)
1496 offset_int off = ((wi::to_offset (vro->op0)
1497 - wi::to_offset (vro->op1))
1498 * wi::to_offset (vro->op2)
1499 * vn_ref_op_align_unit (vro));
1500 if (wi::fits_shwi_p (off))
1501 vro->off = off.to_shwi ();
1505 return orig;
1508 static vec<vn_reference_op_s>
1509 valueize_refs (vec<vn_reference_op_s> orig)
1511 bool tem;
1512 return valueize_refs_1 (orig, &tem);
1515 static vec<vn_reference_op_s> shared_lookup_references;
1517 /* Create a vector of vn_reference_op_s structures from REF, a
1518 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1519 this function. *VALUEIZED_ANYTHING will specify whether any
1520 operands were valueized. */
1522 static vec<vn_reference_op_s>
1523 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1525 if (!ref)
1526 return vNULL;
1527 shared_lookup_references.truncate (0);
1528 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1529 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1530 valueized_anything);
1531 return shared_lookup_references;
1534 /* Create a vector of vn_reference_op_s structures from CALL, a
1535 call statement. The vector is shared among all callers of
1536 this function. */
1538 static vec<vn_reference_op_s>
1539 valueize_shared_reference_ops_from_call (gcall *call)
1541 if (!call)
1542 return vNULL;
1543 shared_lookup_references.truncate (0);
1544 copy_reference_ops_from_call (call, &shared_lookup_references);
1545 shared_lookup_references = valueize_refs (shared_lookup_references);
1546 return shared_lookup_references;
1549 /* Lookup a SCCVN reference operation VR in the current hash table.
1550 Returns the resulting value number if it exists in the hash table,
1551 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1552 vn_reference_t stored in the hashtable if something is found. */
1554 static tree
1555 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1557 vn_reference_s **slot;
1558 hashval_t hash;
1560 hash = vr->hashcode;
1561 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1562 if (!slot && current_info == optimistic_info)
1563 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1564 if (slot)
1566 if (vnresult)
1567 *vnresult = (vn_reference_t)*slot;
1568 return ((vn_reference_t)*slot)->result;
1571 return NULL_TREE;
1574 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1575 with the current VUSE and performs the expression lookup. */
1577 static void *
1578 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1579 unsigned int cnt, void *vr_)
1581 vn_reference_t vr = (vn_reference_t)vr_;
1582 vn_reference_s **slot;
1583 hashval_t hash;
1585 /* This bounds the stmt walks we perform on reference lookups
1586 to O(1) instead of O(N) where N is the number of dominating
1587 stores. */
1588 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1589 return (void *)-1;
1591 if (last_vuse_ptr)
1592 *last_vuse_ptr = vuse;
1594 /* Fixup vuse and hash. */
1595 if (vr->vuse)
1596 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1597 vr->vuse = vuse_ssa_val (vuse);
1598 if (vr->vuse)
1599 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1601 hash = vr->hashcode;
1602 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1603 if (!slot && current_info == optimistic_info)
1604 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1605 if (slot)
1606 return *slot;
1608 return NULL;
1611 /* Lookup an existing or insert a new vn_reference entry into the
1612 value table for the VUSE, SET, TYPE, OPERANDS reference which
1613 has the value VALUE which is either a constant or an SSA name. */
1615 static vn_reference_t
1616 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1617 alias_set_type set,
1618 tree type,
1619 vec<vn_reference_op_s,
1620 va_heap> operands,
1621 tree value)
1623 vn_reference_s vr1;
1624 vn_reference_t result;
1625 unsigned value_id;
1626 vr1.vuse = vuse;
1627 vr1.operands = operands;
1628 vr1.type = type;
1629 vr1.set = set;
1630 vr1.hashcode = vn_reference_compute_hash (&vr1);
1631 if (vn_reference_lookup_1 (&vr1, &result))
1632 return result;
1633 if (TREE_CODE (value) == SSA_NAME)
1634 value_id = VN_INFO (value)->value_id;
1635 else
1636 value_id = get_or_alloc_constant_value_id (value);
1637 return vn_reference_insert_pieces (vuse, set, type,
1638 operands.copy (), value, value_id);
1641 static vn_nary_op_t vn_nary_op_insert_stmt (gimple *stmt, tree result);
1643 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
1645 static tree
1646 vn_lookup_simplify_result (code_helper rcode, tree type, tree *ops)
1648 if (!rcode.is_tree_code ())
1649 return NULL_TREE;
1650 vn_nary_op_t vnresult = NULL;
1651 return vn_nary_op_lookup_pieces (TREE_CODE_LENGTH ((tree_code) rcode),
1652 (tree_code) rcode, type, ops, &vnresult);
1655 /* Return a value-number for RCODE OPS... either by looking up an existing
1656 value-number for the simplified result or by inserting the operation if
1657 INSERT is true. */
1659 static tree
1660 vn_nary_build_or_lookup_1 (code_helper rcode, tree type, tree *ops,
1661 bool insert)
1663 tree result = NULL_TREE;
1664 /* We will be creating a value number for
1665 RCODE (OPS...).
1666 So first simplify and lookup this expression to see if it
1667 is already available. */
1668 mprts_hook = vn_lookup_simplify_result;
1669 bool res = false;
1670 switch (TREE_CODE_LENGTH ((tree_code) rcode))
1672 case 1:
1673 res = gimple_resimplify1 (NULL, &rcode, type, ops, vn_valueize);
1674 break;
1675 case 2:
1676 res = gimple_resimplify2 (NULL, &rcode, type, ops, vn_valueize);
1677 break;
1678 case 3:
1679 res = gimple_resimplify3 (NULL, &rcode, type, ops, vn_valueize);
1680 break;
1682 mprts_hook = NULL;
1683 gimple *new_stmt = NULL;
1684 if (res
1685 && gimple_simplified_result_is_gimple_val (rcode, ops))
1686 /* The expression is already available. */
1687 result = ops[0];
1688 else
1690 tree val = vn_lookup_simplify_result (rcode, type, ops);
1691 if (!val && insert)
1693 gimple_seq stmts = NULL;
1694 result = maybe_push_res_to_seq (rcode, type, ops, &stmts);
1695 if (result)
1697 gcc_assert (gimple_seq_singleton_p (stmts));
1698 new_stmt = gimple_seq_first_stmt (stmts);
1701 else
1702 /* The expression is already available. */
1703 result = val;
1705 if (new_stmt)
1707 /* The expression is not yet available, value-number lhs to
1708 the new SSA_NAME we created. */
1709 /* Initialize value-number information properly. */
1710 VN_INFO_GET (result)->valnum = result;
1711 VN_INFO (result)->value_id = get_next_value_id ();
1712 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
1713 new_stmt);
1714 VN_INFO (result)->needs_insertion = true;
1715 /* ??? PRE phi-translation inserts NARYs without corresponding
1716 SSA name result. Re-use those but set their result according
1717 to the stmt we just built. */
1718 vn_nary_op_t nary = NULL;
1719 vn_nary_op_lookup_stmt (new_stmt, &nary);
1720 if (nary)
1722 gcc_assert (nary->result == NULL_TREE);
1723 nary->result = gimple_assign_lhs (new_stmt);
1725 /* As all "inserted" statements are singleton SCCs, insert
1726 to the valid table. This is strictly needed to
1727 avoid re-generating new value SSA_NAMEs for the same
1728 expression during SCC iteration over and over (the
1729 optimistic table gets cleared after each iteration).
1730 We do not need to insert into the optimistic table, as
1731 lookups there will fall back to the valid table. */
1732 else if (current_info == optimistic_info)
1734 current_info = valid_info;
1735 vn_nary_op_insert_stmt (new_stmt, result);
1736 current_info = optimistic_info;
1738 else
1739 vn_nary_op_insert_stmt (new_stmt, result);
1740 if (dump_file && (dump_flags & TDF_DETAILS))
1742 fprintf (dump_file, "Inserting name ");
1743 print_generic_expr (dump_file, result);
1744 fprintf (dump_file, " for expression ");
1745 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
1746 fprintf (dump_file, "\n");
1749 return result;
1752 /* Return a value-number for RCODE OPS... either by looking up an existing
1753 value-number for the simplified result or by inserting the operation. */
1755 static tree
1756 vn_nary_build_or_lookup (code_helper rcode, tree type, tree *ops)
1758 return vn_nary_build_or_lookup_1 (rcode, type, ops, true);
1761 /* Try to simplify the expression RCODE OPS... of type TYPE and return
1762 its value if present. */
1764 tree
1765 vn_nary_simplify (vn_nary_op_t nary)
1767 if (nary->length > 3)
1768 return NULL_TREE;
1769 tree ops[3];
1770 memcpy (ops, nary->op, sizeof (tree) * nary->length);
1771 return vn_nary_build_or_lookup_1 (nary->opcode, nary->type, ops, false);
1775 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1776 from the statement defining VUSE and if not successful tries to
1777 translate *REFP and VR_ through an aggregate copy at the definition
1778 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
1779 of *REF and *VR. If only disambiguation was performed then
1780 *DISAMBIGUATE_ONLY is set to true. */
1782 static void *
1783 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1784 bool *disambiguate_only)
1786 vn_reference_t vr = (vn_reference_t)vr_;
1787 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
1788 tree base = ao_ref_base (ref);
1789 HOST_WIDE_INT offset, maxsize;
1790 static vec<vn_reference_op_s> lhs_ops;
1791 ao_ref lhs_ref;
1792 bool lhs_ref_ok = false;
1794 /* If the reference is based on a parameter that was determined as
1795 pointing to readonly memory it doesn't change. */
1796 if (TREE_CODE (base) == MEM_REF
1797 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
1798 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
1799 && bitmap_bit_p (const_parms,
1800 SSA_NAME_VERSION (TREE_OPERAND (base, 0))))
1802 *disambiguate_only = true;
1803 return NULL;
1806 /* First try to disambiguate after value-replacing in the definitions LHS. */
1807 if (is_gimple_assign (def_stmt))
1809 tree lhs = gimple_assign_lhs (def_stmt);
1810 bool valueized_anything = false;
1811 /* Avoid re-allocation overhead. */
1812 lhs_ops.truncate (0);
1813 copy_reference_ops_from_ref (lhs, &lhs_ops);
1814 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1815 if (valueized_anything)
1817 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1818 get_alias_set (lhs),
1819 TREE_TYPE (lhs), lhs_ops);
1820 if (lhs_ref_ok
1821 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1823 *disambiguate_only = true;
1824 return NULL;
1827 else
1829 ao_ref_init (&lhs_ref, lhs);
1830 lhs_ref_ok = true;
1833 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1834 && gimple_call_num_args (def_stmt) <= 4)
1836 /* For builtin calls valueize its arguments and call the
1837 alias oracle again. Valueization may improve points-to
1838 info of pointers and constify size and position arguments.
1839 Originally this was motivated by PR61034 which has
1840 conditional calls to free falsely clobbering ref because
1841 of imprecise points-to info of the argument. */
1842 tree oldargs[4];
1843 bool valueized_anything = false;
1844 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1846 oldargs[i] = gimple_call_arg (def_stmt, i);
1847 if (TREE_CODE (oldargs[i]) == SSA_NAME
1848 && VN_INFO (oldargs[i])->valnum != oldargs[i])
1850 gimple_call_set_arg (def_stmt, i, VN_INFO (oldargs[i])->valnum);
1851 valueized_anything = true;
1854 if (valueized_anything)
1856 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1857 ref);
1858 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1859 gimple_call_set_arg (def_stmt, i, oldargs[i]);
1860 if (!res)
1862 *disambiguate_only = true;
1863 return NULL;
1868 if (*disambiguate_only)
1869 return (void *)-1;
1871 offset = ref->offset;
1872 maxsize = ref->max_size;
1874 /* If we cannot constrain the size of the reference we cannot
1875 test if anything kills it. */
1876 if (maxsize == -1)
1877 return (void *)-1;
1879 /* We can't deduce anything useful from clobbers. */
1880 if (gimple_clobber_p (def_stmt))
1881 return (void *)-1;
1883 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1884 from that definition.
1885 1) Memset. */
1886 if (is_gimple_reg_type (vr->type)
1887 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1888 && integer_zerop (gimple_call_arg (def_stmt, 1))
1889 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2))
1890 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1892 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1893 tree base2;
1894 HOST_WIDE_INT offset2, size2, maxsize2;
1895 bool reverse;
1896 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
1897 &reverse);
1898 size2 = tree_to_uhwi (gimple_call_arg (def_stmt, 2)) * 8;
1899 if ((unsigned HOST_WIDE_INT)size2 / 8
1900 == tree_to_uhwi (gimple_call_arg (def_stmt, 2))
1901 && maxsize2 != -1
1902 && operand_equal_p (base, base2, 0)
1903 && offset2 <= offset
1904 && offset2 + size2 >= offset + maxsize)
1906 tree val = build_zero_cst (vr->type);
1907 return vn_reference_lookup_or_insert_for_pieces
1908 (vuse, vr->set, vr->type, vr->operands, val);
1912 /* 2) Assignment from an empty CONSTRUCTOR. */
1913 else if (is_gimple_reg_type (vr->type)
1914 && gimple_assign_single_p (def_stmt)
1915 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1916 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1918 tree base2;
1919 HOST_WIDE_INT offset2, size2, maxsize2;
1920 bool reverse;
1921 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1922 &offset2, &size2, &maxsize2, &reverse);
1923 if (maxsize2 != -1
1924 && operand_equal_p (base, base2, 0)
1925 && offset2 <= offset
1926 && offset2 + size2 >= offset + maxsize)
1928 tree val = build_zero_cst (vr->type);
1929 return vn_reference_lookup_or_insert_for_pieces
1930 (vuse, vr->set, vr->type, vr->operands, val);
1934 /* 3) Assignment from a constant. We can use folds native encode/interpret
1935 routines to extract the assigned bits. */
1936 else if (ref->size == maxsize
1937 && is_gimple_reg_type (vr->type)
1938 && !contains_storage_order_barrier_p (vr->operands)
1939 && gimple_assign_single_p (def_stmt)
1940 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1941 && maxsize % BITS_PER_UNIT == 0
1942 && offset % BITS_PER_UNIT == 0
1943 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
1944 || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
1945 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
1947 tree base2;
1948 HOST_WIDE_INT offset2, size2, maxsize2;
1949 bool reverse;
1950 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1951 &offset2, &size2, &maxsize2, &reverse);
1952 if (!reverse
1953 && maxsize2 != -1
1954 && maxsize2 == size2
1955 && size2 % BITS_PER_UNIT == 0
1956 && offset2 % BITS_PER_UNIT == 0
1957 && operand_equal_p (base, base2, 0)
1958 && offset2 <= offset
1959 && offset2 + size2 >= offset + maxsize)
1961 /* We support up to 512-bit values (for V8DFmode). */
1962 unsigned char buffer[64];
1963 int len;
1965 tree rhs = gimple_assign_rhs1 (def_stmt);
1966 if (TREE_CODE (rhs) == SSA_NAME)
1967 rhs = SSA_VAL (rhs);
1968 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1969 buffer, sizeof (buffer));
1970 if (len > 0)
1972 tree type = vr->type;
1973 /* Make sure to interpret in a type that has a range
1974 covering the whole access size. */
1975 if (INTEGRAL_TYPE_P (vr->type)
1976 && ref->size != TYPE_PRECISION (vr->type))
1977 type = build_nonstandard_integer_type (ref->size,
1978 TYPE_UNSIGNED (type));
1979 tree val = native_interpret_expr (type,
1980 buffer
1981 + ((offset - offset2)
1982 / BITS_PER_UNIT),
1983 ref->size / BITS_PER_UNIT);
1984 /* If we chop off bits because the types precision doesn't
1985 match the memory access size this is ok when optimizing
1986 reads but not when called from the DSE code during
1987 elimination. */
1988 if (val
1989 && type != vr->type)
1991 if (! int_fits_type_p (val, vr->type))
1992 val = NULL_TREE;
1993 else
1994 val = fold_convert (vr->type, val);
1997 if (val)
1998 return vn_reference_lookup_or_insert_for_pieces
1999 (vuse, vr->set, vr->type, vr->operands, val);
2004 /* 4) Assignment from an SSA name which definition we may be able
2005 to access pieces from. */
2006 else if (ref->size == maxsize
2007 && is_gimple_reg_type (vr->type)
2008 && !contains_storage_order_barrier_p (vr->operands)
2009 && gimple_assign_single_p (def_stmt)
2010 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
2012 tree base2;
2013 HOST_WIDE_INT offset2, size2, maxsize2;
2014 bool reverse;
2015 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2016 &offset2, &size2, &maxsize2,
2017 &reverse);
2018 if (!reverse
2019 && maxsize2 != -1
2020 && maxsize2 == size2
2021 && operand_equal_p (base, base2, 0)
2022 && offset2 <= offset
2023 && offset2 + size2 >= offset + maxsize
2024 /* ??? We can't handle bitfield precision extracts without
2025 either using an alternate type for the BIT_FIELD_REF and
2026 then doing a conversion or possibly adjusting the offset
2027 according to endianness. */
2028 && (! INTEGRAL_TYPE_P (vr->type)
2029 || ref->size == TYPE_PRECISION (vr->type))
2030 && ref->size % BITS_PER_UNIT == 0)
2032 code_helper rcode = BIT_FIELD_REF;
2033 tree ops[3];
2034 ops[0] = SSA_VAL (gimple_assign_rhs1 (def_stmt));
2035 ops[1] = bitsize_int (ref->size);
2036 ops[2] = bitsize_int (offset - offset2);
2037 tree val = vn_nary_build_or_lookup (rcode, vr->type, ops);
2038 if (val
2039 && (TREE_CODE (val) != SSA_NAME
2040 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2042 vn_reference_t res = vn_reference_lookup_or_insert_for_pieces
2043 (vuse, vr->set, vr->type, vr->operands, val);
2044 return res;
2049 /* 5) For aggregate copies translate the reference through them if
2050 the copy kills ref. */
2051 else if (vn_walk_kind == VN_WALKREWRITE
2052 && gimple_assign_single_p (def_stmt)
2053 && (DECL_P (gimple_assign_rhs1 (def_stmt))
2054 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
2055 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
2057 tree base2;
2058 HOST_WIDE_INT maxsize2;
2059 int i, j, k;
2060 auto_vec<vn_reference_op_s> rhs;
2061 vn_reference_op_t vro;
2062 ao_ref r;
2064 if (!lhs_ref_ok)
2065 return (void *)-1;
2067 /* See if the assignment kills REF. */
2068 base2 = ao_ref_base (&lhs_ref);
2069 maxsize2 = lhs_ref.max_size;
2070 if (maxsize2 == -1
2071 || (base != base2
2072 && (TREE_CODE (base) != MEM_REF
2073 || TREE_CODE (base2) != MEM_REF
2074 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
2075 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
2076 TREE_OPERAND (base2, 1))))
2077 || !stmt_kills_ref_p (def_stmt, ref))
2078 return (void *)-1;
2080 /* Find the common base of ref and the lhs. lhs_ops already
2081 contains valueized operands for the lhs. */
2082 i = vr->operands.length () - 1;
2083 j = lhs_ops.length () - 1;
2084 while (j >= 0 && i >= 0
2085 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
2087 i--;
2088 j--;
2091 /* ??? The innermost op should always be a MEM_REF and we already
2092 checked that the assignment to the lhs kills vr. Thus for
2093 aggregate copies using char[] types the vn_reference_op_eq
2094 may fail when comparing types for compatibility. But we really
2095 don't care here - further lookups with the rewritten operands
2096 will simply fail if we messed up types too badly. */
2097 HOST_WIDE_INT extra_off = 0;
2098 if (j == 0 && i >= 0
2099 && lhs_ops[0].opcode == MEM_REF
2100 && lhs_ops[0].off != -1)
2102 if (lhs_ops[0].off == vr->operands[i].off)
2103 i--, j--;
2104 else if (vr->operands[i].opcode == MEM_REF
2105 && vr->operands[i].off != -1)
2107 extra_off = vr->operands[i].off - lhs_ops[0].off;
2108 i--, j--;
2112 /* i now points to the first additional op.
2113 ??? LHS may not be completely contained in VR, one or more
2114 VIEW_CONVERT_EXPRs could be in its way. We could at least
2115 try handling outermost VIEW_CONVERT_EXPRs. */
2116 if (j != -1)
2117 return (void *)-1;
2119 /* Punt if the additional ops contain a storage order barrier. */
2120 for (k = i; k >= 0; k--)
2122 vro = &vr->operands[k];
2123 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
2124 return (void *)-1;
2127 /* Now re-write REF to be based on the rhs of the assignment. */
2128 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
2130 /* Apply an extra offset to the inner MEM_REF of the RHS. */
2131 if (extra_off != 0)
2133 if (rhs.length () < 2
2134 || rhs[0].opcode != MEM_REF
2135 || rhs[0].off == -1)
2136 return (void *)-1;
2137 rhs[0].off += extra_off;
2138 rhs[0].op0 = int_const_binop (PLUS_EXPR, rhs[0].op0,
2139 build_int_cst (TREE_TYPE (rhs[0].op0),
2140 extra_off));
2143 /* We need to pre-pend vr->operands[0..i] to rhs. */
2144 vec<vn_reference_op_s> old = vr->operands;
2145 if (i + 1 + rhs.length () > vr->operands.length ())
2146 vr->operands.safe_grow (i + 1 + rhs.length ());
2147 else
2148 vr->operands.truncate (i + 1 + rhs.length ());
2149 FOR_EACH_VEC_ELT (rhs, j, vro)
2150 vr->operands[i + 1 + j] = *vro;
2151 vr->operands = valueize_refs (vr->operands);
2152 if (old == shared_lookup_references)
2153 shared_lookup_references = vr->operands;
2154 vr->hashcode = vn_reference_compute_hash (vr);
2156 /* Try folding the new reference to a constant. */
2157 tree val = fully_constant_vn_reference_p (vr);
2158 if (val)
2159 return vn_reference_lookup_or_insert_for_pieces
2160 (vuse, vr->set, vr->type, vr->operands, val);
2162 /* Adjust *ref from the new operands. */
2163 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2164 return (void *)-1;
2165 /* This can happen with bitfields. */
2166 if (ref->size != r.size)
2167 return (void *)-1;
2168 *ref = r;
2170 /* Do not update last seen VUSE after translating. */
2171 last_vuse_ptr = NULL;
2173 /* Keep looking for the adjusted *REF / VR pair. */
2174 return NULL;
2177 /* 6) For memcpy copies translate the reference through them if
2178 the copy kills ref. */
2179 else if (vn_walk_kind == VN_WALKREWRITE
2180 && is_gimple_reg_type (vr->type)
2181 /* ??? Handle BCOPY as well. */
2182 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2183 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2184 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2185 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2186 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2187 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2188 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2189 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2)))
2191 tree lhs, rhs;
2192 ao_ref r;
2193 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
2194 vn_reference_op_s op;
2195 HOST_WIDE_INT at;
2197 /* Only handle non-variable, addressable refs. */
2198 if (ref->size != maxsize
2199 || offset % BITS_PER_UNIT != 0
2200 || ref->size % BITS_PER_UNIT != 0)
2201 return (void *)-1;
2203 /* Extract a pointer base and an offset for the destination. */
2204 lhs = gimple_call_arg (def_stmt, 0);
2205 lhs_offset = 0;
2206 if (TREE_CODE (lhs) == SSA_NAME)
2208 lhs = SSA_VAL (lhs);
2209 if (TREE_CODE (lhs) == SSA_NAME)
2211 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
2212 if (gimple_assign_single_p (def_stmt)
2213 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2214 lhs = gimple_assign_rhs1 (def_stmt);
2217 if (TREE_CODE (lhs) == ADDR_EXPR)
2219 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2220 &lhs_offset);
2221 if (!tem)
2222 return (void *)-1;
2223 if (TREE_CODE (tem) == MEM_REF
2224 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2226 lhs = TREE_OPERAND (tem, 0);
2227 if (TREE_CODE (lhs) == SSA_NAME)
2228 lhs = SSA_VAL (lhs);
2229 lhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2231 else if (DECL_P (tem))
2232 lhs = build_fold_addr_expr (tem);
2233 else
2234 return (void *)-1;
2236 if (TREE_CODE (lhs) != SSA_NAME
2237 && TREE_CODE (lhs) != ADDR_EXPR)
2238 return (void *)-1;
2240 /* Extract a pointer base and an offset for the source. */
2241 rhs = gimple_call_arg (def_stmt, 1);
2242 rhs_offset = 0;
2243 if (TREE_CODE (rhs) == SSA_NAME)
2244 rhs = SSA_VAL (rhs);
2245 if (TREE_CODE (rhs) == ADDR_EXPR)
2247 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2248 &rhs_offset);
2249 if (!tem)
2250 return (void *)-1;
2251 if (TREE_CODE (tem) == MEM_REF
2252 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2254 rhs = TREE_OPERAND (tem, 0);
2255 rhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2257 else if (DECL_P (tem))
2258 rhs = build_fold_addr_expr (tem);
2259 else
2260 return (void *)-1;
2262 if (TREE_CODE (rhs) != SSA_NAME
2263 && TREE_CODE (rhs) != ADDR_EXPR)
2264 return (void *)-1;
2266 copy_size = tree_to_uhwi (gimple_call_arg (def_stmt, 2));
2268 /* The bases of the destination and the references have to agree. */
2269 if ((TREE_CODE (base) != MEM_REF
2270 && !DECL_P (base))
2271 || (TREE_CODE (base) == MEM_REF
2272 && (TREE_OPERAND (base, 0) != lhs
2273 || !tree_fits_uhwi_p (TREE_OPERAND (base, 1))))
2274 || (DECL_P (base)
2275 && (TREE_CODE (lhs) != ADDR_EXPR
2276 || TREE_OPERAND (lhs, 0) != base)))
2277 return (void *)-1;
2279 at = offset / BITS_PER_UNIT;
2280 if (TREE_CODE (base) == MEM_REF)
2281 at += tree_to_uhwi (TREE_OPERAND (base, 1));
2282 /* If the access is completely outside of the memcpy destination
2283 area there is no aliasing. */
2284 if (lhs_offset >= at + maxsize / BITS_PER_UNIT
2285 || lhs_offset + copy_size <= at)
2286 return NULL;
2287 /* And the access has to be contained within the memcpy destination. */
2288 if (lhs_offset > at
2289 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
2290 return (void *)-1;
2292 /* Make room for 2 operands in the new reference. */
2293 if (vr->operands.length () < 2)
2295 vec<vn_reference_op_s> old = vr->operands;
2296 vr->operands.safe_grow_cleared (2);
2297 if (old == shared_lookup_references)
2298 shared_lookup_references = vr->operands;
2300 else
2301 vr->operands.truncate (2);
2303 /* The looked-through reference is a simple MEM_REF. */
2304 memset (&op, 0, sizeof (op));
2305 op.type = vr->type;
2306 op.opcode = MEM_REF;
2307 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
2308 op.off = at - lhs_offset + rhs_offset;
2309 vr->operands[0] = op;
2310 op.type = TREE_TYPE (rhs);
2311 op.opcode = TREE_CODE (rhs);
2312 op.op0 = rhs;
2313 op.off = -1;
2314 vr->operands[1] = op;
2315 vr->hashcode = vn_reference_compute_hash (vr);
2317 /* Try folding the new reference to a constant. */
2318 tree val = fully_constant_vn_reference_p (vr);
2319 if (val)
2320 return vn_reference_lookup_or_insert_for_pieces
2321 (vuse, vr->set, vr->type, vr->operands, val);
2323 /* Adjust *ref from the new operands. */
2324 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2325 return (void *)-1;
2326 /* This can happen with bitfields. */
2327 if (ref->size != r.size)
2328 return (void *)-1;
2329 *ref = r;
2331 /* Do not update last seen VUSE after translating. */
2332 last_vuse_ptr = NULL;
2334 /* Keep looking for the adjusted *REF / VR pair. */
2335 return NULL;
2338 /* Bail out and stop walking. */
2339 return (void *)-1;
2342 /* Return a reference op vector from OP that can be used for
2343 vn_reference_lookup_pieces. The caller is responsible for releasing
2344 the vector. */
2346 vec<vn_reference_op_s>
2347 vn_reference_operands_for_lookup (tree op)
2349 bool valueized;
2350 return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
2353 /* Lookup a reference operation by it's parts, in the current hash table.
2354 Returns the resulting value number if it exists in the hash table,
2355 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2356 vn_reference_t stored in the hashtable if something is found. */
2358 tree
2359 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2360 vec<vn_reference_op_s> operands,
2361 vn_reference_t *vnresult, vn_lookup_kind kind)
2363 struct vn_reference_s vr1;
2364 vn_reference_t tmp;
2365 tree cst;
2367 if (!vnresult)
2368 vnresult = &tmp;
2369 *vnresult = NULL;
2371 vr1.vuse = vuse_ssa_val (vuse);
2372 shared_lookup_references.truncate (0);
2373 shared_lookup_references.safe_grow (operands.length ());
2374 memcpy (shared_lookup_references.address (),
2375 operands.address (),
2376 sizeof (vn_reference_op_s)
2377 * operands.length ());
2378 vr1.operands = operands = shared_lookup_references
2379 = valueize_refs (shared_lookup_references);
2380 vr1.type = type;
2381 vr1.set = set;
2382 vr1.hashcode = vn_reference_compute_hash (&vr1);
2383 if ((cst = fully_constant_vn_reference_p (&vr1)))
2384 return cst;
2386 vn_reference_lookup_1 (&vr1, vnresult);
2387 if (!*vnresult
2388 && kind != VN_NOWALK
2389 && vr1.vuse)
2391 ao_ref r;
2392 vn_walk_kind = kind;
2393 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2394 *vnresult =
2395 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2396 vn_reference_lookup_2,
2397 vn_reference_lookup_3,
2398 vuse_ssa_val, &vr1);
2399 gcc_checking_assert (vr1.operands == shared_lookup_references);
2402 if (*vnresult)
2403 return (*vnresult)->result;
2405 return NULL_TREE;
2408 /* Lookup OP in the current hash table, and return the resulting value
2409 number if it exists in the hash table. Return NULL_TREE if it does
2410 not exist in the hash table or if the result field of the structure
2411 was NULL.. VNRESULT will be filled in with the vn_reference_t
2412 stored in the hashtable if one exists. When TBAA_P is false assume
2413 we are looking up a store and treat it as having alias-set zero. */
2415 tree
2416 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2417 vn_reference_t *vnresult, bool tbaa_p)
2419 vec<vn_reference_op_s> operands;
2420 struct vn_reference_s vr1;
2421 tree cst;
2422 bool valuezied_anything;
2424 if (vnresult)
2425 *vnresult = NULL;
2427 vr1.vuse = vuse_ssa_val (vuse);
2428 vr1.operands = operands
2429 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2430 vr1.type = TREE_TYPE (op);
2431 vr1.set = tbaa_p ? get_alias_set (op) : 0;
2432 vr1.hashcode = vn_reference_compute_hash (&vr1);
2433 if ((cst = fully_constant_vn_reference_p (&vr1)))
2434 return cst;
2436 if (kind != VN_NOWALK
2437 && vr1.vuse)
2439 vn_reference_t wvnresult;
2440 ao_ref r;
2441 /* Make sure to use a valueized reference if we valueized anything.
2442 Otherwise preserve the full reference for advanced TBAA. */
2443 if (!valuezied_anything
2444 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2445 vr1.operands))
2446 ao_ref_init (&r, op);
2447 if (! tbaa_p)
2448 r.ref_alias_set = r.base_alias_set = 0;
2449 vn_walk_kind = kind;
2450 wvnresult =
2451 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2452 vn_reference_lookup_2,
2453 vn_reference_lookup_3,
2454 vuse_ssa_val, &vr1);
2455 gcc_checking_assert (vr1.operands == shared_lookup_references);
2456 if (wvnresult)
2458 if (vnresult)
2459 *vnresult = wvnresult;
2460 return wvnresult->result;
2463 return NULL_TREE;
2466 return vn_reference_lookup_1 (&vr1, vnresult);
2469 /* Lookup CALL in the current hash table and return the entry in
2470 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2472 void
2473 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2474 vn_reference_t vr)
2476 if (vnresult)
2477 *vnresult = NULL;
2479 tree vuse = gimple_vuse (call);
2481 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2482 vr->operands = valueize_shared_reference_ops_from_call (call);
2483 vr->type = gimple_expr_type (call);
2484 vr->set = 0;
2485 vr->hashcode = vn_reference_compute_hash (vr);
2486 vn_reference_lookup_1 (vr, vnresult);
2489 /* Insert OP into the current hash table with a value number of
2490 RESULT, and return the resulting reference structure we created. */
2492 static vn_reference_t
2493 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2495 vn_reference_s **slot;
2496 vn_reference_t vr1;
2497 bool tem;
2499 vr1 = current_info->references_pool->allocate ();
2500 if (TREE_CODE (result) == SSA_NAME)
2501 vr1->value_id = VN_INFO (result)->value_id;
2502 else
2503 vr1->value_id = get_or_alloc_constant_value_id (result);
2504 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2505 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2506 vr1->type = TREE_TYPE (op);
2507 vr1->set = get_alias_set (op);
2508 vr1->hashcode = vn_reference_compute_hash (vr1);
2509 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2510 vr1->result_vdef = vdef;
2512 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2513 INSERT);
2515 /* Because we lookup stores using vuses, and value number failures
2516 using the vdefs (see visit_reference_op_store for how and why),
2517 it's possible that on failure we may try to insert an already
2518 inserted store. This is not wrong, there is no ssa name for a
2519 store that we could use as a differentiator anyway. Thus, unlike
2520 the other lookup functions, you cannot gcc_assert (!*slot)
2521 here. */
2523 /* But free the old slot in case of a collision. */
2524 if (*slot)
2525 free_reference (*slot);
2527 *slot = vr1;
2528 return vr1;
2531 /* Insert a reference by it's pieces into the current hash table with
2532 a value number of RESULT. Return the resulting reference
2533 structure we created. */
2535 vn_reference_t
2536 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2537 vec<vn_reference_op_s> operands,
2538 tree result, unsigned int value_id)
2541 vn_reference_s **slot;
2542 vn_reference_t vr1;
2544 vr1 = current_info->references_pool->allocate ();
2545 vr1->value_id = value_id;
2546 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2547 vr1->operands = valueize_refs (operands);
2548 vr1->type = type;
2549 vr1->set = set;
2550 vr1->hashcode = vn_reference_compute_hash (vr1);
2551 if (result && TREE_CODE (result) == SSA_NAME)
2552 result = SSA_VAL (result);
2553 vr1->result = result;
2555 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2556 INSERT);
2558 /* At this point we should have all the things inserted that we have
2559 seen before, and we should never try inserting something that
2560 already exists. */
2561 gcc_assert (!*slot);
2562 if (*slot)
2563 free_reference (*slot);
2565 *slot = vr1;
2566 return vr1;
2569 /* Compute and return the hash value for nary operation VBO1. */
2571 static hashval_t
2572 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2574 inchash::hash hstate;
2575 unsigned i;
2577 for (i = 0; i < vno1->length; ++i)
2578 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2579 vno1->op[i] = SSA_VAL (vno1->op[i]);
2581 if (((vno1->length == 2
2582 && commutative_tree_code (vno1->opcode))
2583 || (vno1->length == 3
2584 && commutative_ternary_tree_code (vno1->opcode)))
2585 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2586 std::swap (vno1->op[0], vno1->op[1]);
2587 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
2588 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2590 std::swap (vno1->op[0], vno1->op[1]);
2591 vno1->opcode = swap_tree_comparison (vno1->opcode);
2594 hstate.add_int (vno1->opcode);
2595 for (i = 0; i < vno1->length; ++i)
2596 inchash::add_expr (vno1->op[i], hstate);
2598 return hstate.end ();
2601 /* Compare nary operations VNO1 and VNO2 and return true if they are
2602 equivalent. */
2604 bool
2605 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2607 unsigned i;
2609 if (vno1->hashcode != vno2->hashcode)
2610 return false;
2612 if (vno1->length != vno2->length)
2613 return false;
2615 if (vno1->opcode != vno2->opcode
2616 || !types_compatible_p (vno1->type, vno2->type))
2617 return false;
2619 for (i = 0; i < vno1->length; ++i)
2620 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2621 return false;
2623 return true;
2626 /* Initialize VNO from the pieces provided. */
2628 static void
2629 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2630 enum tree_code code, tree type, tree *ops)
2632 vno->opcode = code;
2633 vno->length = length;
2634 vno->type = type;
2635 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2638 /* Initialize VNO from OP. */
2640 static void
2641 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2643 unsigned i;
2645 vno->opcode = TREE_CODE (op);
2646 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2647 vno->type = TREE_TYPE (op);
2648 for (i = 0; i < vno->length; ++i)
2649 vno->op[i] = TREE_OPERAND (op, i);
2652 /* Return the number of operands for a vn_nary ops structure from STMT. */
2654 static unsigned int
2655 vn_nary_length_from_stmt (gimple *stmt)
2657 switch (gimple_assign_rhs_code (stmt))
2659 case REALPART_EXPR:
2660 case IMAGPART_EXPR:
2661 case VIEW_CONVERT_EXPR:
2662 return 1;
2664 case BIT_FIELD_REF:
2665 return 3;
2667 case CONSTRUCTOR:
2668 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2670 default:
2671 return gimple_num_ops (stmt) - 1;
2675 /* Initialize VNO from STMT. */
2677 static void
2678 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
2680 unsigned i;
2682 vno->opcode = gimple_assign_rhs_code (stmt);
2683 vno->type = gimple_expr_type (stmt);
2684 switch (vno->opcode)
2686 case REALPART_EXPR:
2687 case IMAGPART_EXPR:
2688 case VIEW_CONVERT_EXPR:
2689 vno->length = 1;
2690 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2691 break;
2693 case BIT_FIELD_REF:
2694 vno->length = 3;
2695 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2696 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2697 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2698 break;
2700 case CONSTRUCTOR:
2701 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2702 for (i = 0; i < vno->length; ++i)
2703 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2704 break;
2706 default:
2707 gcc_checking_assert (!gimple_assign_single_p (stmt));
2708 vno->length = gimple_num_ops (stmt) - 1;
2709 for (i = 0; i < vno->length; ++i)
2710 vno->op[i] = gimple_op (stmt, i + 1);
2714 /* Compute the hashcode for VNO and look for it in the hash table;
2715 return the resulting value number if it exists in the hash table.
2716 Return NULL_TREE if it does not exist in the hash table or if the
2717 result field of the operation is NULL. VNRESULT will contain the
2718 vn_nary_op_t from the hashtable if it exists. */
2720 static tree
2721 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2723 vn_nary_op_s **slot;
2725 if (vnresult)
2726 *vnresult = NULL;
2728 vno->hashcode = vn_nary_op_compute_hash (vno);
2729 slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2730 NO_INSERT);
2731 if (!slot && current_info == optimistic_info)
2732 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2733 NO_INSERT);
2734 if (!slot)
2735 return NULL_TREE;
2736 if (vnresult)
2737 *vnresult = *slot;
2738 return (*slot)->result;
2741 /* Lookup a n-ary operation by its pieces and return the resulting value
2742 number if it exists in the hash table. Return NULL_TREE if it does
2743 not exist in the hash table or if the result field of the operation
2744 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2745 if it exists. */
2747 tree
2748 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2749 tree type, tree *ops, vn_nary_op_t *vnresult)
2751 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2752 sizeof_vn_nary_op (length));
2753 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2754 return vn_nary_op_lookup_1 (vno1, vnresult);
2757 /* Lookup OP in the current hash table, and return the resulting value
2758 number if it exists in the hash table. Return NULL_TREE if it does
2759 not exist in the hash table or if the result field of the operation
2760 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2761 if it exists. */
2763 tree
2764 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2766 vn_nary_op_t vno1
2767 = XALLOCAVAR (struct vn_nary_op_s,
2768 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2769 init_vn_nary_op_from_op (vno1, op);
2770 return vn_nary_op_lookup_1 (vno1, vnresult);
2773 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2774 value number if it exists in the hash table. Return NULL_TREE if
2775 it does not exist in the hash table. VNRESULT will contain the
2776 vn_nary_op_t from the hashtable if it exists. */
2778 tree
2779 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
2781 vn_nary_op_t vno1
2782 = XALLOCAVAR (struct vn_nary_op_s,
2783 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2784 init_vn_nary_op_from_stmt (vno1, stmt);
2785 return vn_nary_op_lookup_1 (vno1, vnresult);
2788 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2790 static vn_nary_op_t
2791 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2793 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2796 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2797 obstack. */
2799 static vn_nary_op_t
2800 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2802 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2803 &current_info->nary_obstack);
2805 vno1->value_id = value_id;
2806 vno1->length = length;
2807 vno1->result = result;
2809 return vno1;
2812 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2813 VNO->HASHCODE first. */
2815 static vn_nary_op_t
2816 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
2817 bool compute_hash)
2819 vn_nary_op_s **slot;
2821 if (compute_hash)
2822 vno->hashcode = vn_nary_op_compute_hash (vno);
2824 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
2825 /* While we do not want to insert things twice it's awkward to
2826 avoid it in the case where visit_nary_op pattern-matches stuff
2827 and ends up simplifying the replacement to itself. We then
2828 get two inserts, one from visit_nary_op and one from
2829 vn_nary_build_or_lookup.
2830 So allow inserts with the same value number. */
2831 if (*slot && (*slot)->result == vno->result)
2832 return *slot;
2834 gcc_assert (!*slot);
2836 *slot = vno;
2837 return vno;
2840 /* Insert a n-ary operation into the current hash table using it's
2841 pieces. Return the vn_nary_op_t structure we created and put in
2842 the hashtable. */
2844 vn_nary_op_t
2845 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2846 tree type, tree *ops,
2847 tree result, unsigned int value_id)
2849 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2850 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2851 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2854 /* Insert OP into the current hash table with a value number of
2855 RESULT. Return the vn_nary_op_t structure we created and put in
2856 the hashtable. */
2858 vn_nary_op_t
2859 vn_nary_op_insert (tree op, tree result)
2861 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2862 vn_nary_op_t vno1;
2864 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2865 init_vn_nary_op_from_op (vno1, op);
2866 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2869 /* Insert the rhs of STMT into the current hash table with a value number of
2870 RESULT. */
2872 static vn_nary_op_t
2873 vn_nary_op_insert_stmt (gimple *stmt, tree result)
2875 vn_nary_op_t vno1
2876 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2877 result, VN_INFO (result)->value_id);
2878 init_vn_nary_op_from_stmt (vno1, stmt);
2879 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2882 /* Compute a hashcode for PHI operation VP1 and return it. */
2884 static inline hashval_t
2885 vn_phi_compute_hash (vn_phi_t vp1)
2887 inchash::hash hstate (vp1->phiargs.length () > 2
2888 ? vp1->block->index : vp1->phiargs.length ());
2889 tree phi1op;
2890 tree type;
2891 edge e;
2892 edge_iterator ei;
2894 /* If all PHI arguments are constants we need to distinguish
2895 the PHI node via its type. */
2896 type = vp1->type;
2897 hstate.merge_hash (vn_hash_type (type));
2899 FOR_EACH_EDGE (e, ei, vp1->block->preds)
2901 /* Don't hash backedge values they need to be handled as VN_TOP
2902 for optimistic value-numbering. */
2903 if (e->flags & EDGE_DFS_BACK)
2904 continue;
2906 phi1op = vp1->phiargs[e->dest_idx];
2907 if (phi1op == VN_TOP)
2908 continue;
2909 inchash::add_expr (phi1op, hstate);
2912 return hstate.end ();
2916 /* Return true if COND1 and COND2 represent the same condition, set
2917 *INVERTED_P if one needs to be inverted to make it the same as
2918 the other. */
2920 static bool
2921 cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
2922 gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
2924 enum tree_code code1 = gimple_cond_code (cond1);
2925 enum tree_code code2 = gimple_cond_code (cond2);
2927 *inverted_p = false;
2928 if (code1 == code2)
2930 else if (code1 == swap_tree_comparison (code2))
2931 std::swap (lhs2, rhs2);
2932 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
2933 *inverted_p = true;
2934 else if (code1 == invert_tree_comparison
2935 (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
2937 std::swap (lhs2, rhs2);
2938 *inverted_p = true;
2940 else
2941 return false;
2943 return ((expressions_equal_p (lhs1, lhs2)
2944 && expressions_equal_p (rhs1, rhs2))
2945 || (commutative_tree_code (code1)
2946 && expressions_equal_p (lhs1, rhs2)
2947 && expressions_equal_p (rhs1, lhs2)));
2950 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2952 static int
2953 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
2955 if (vp1->hashcode != vp2->hashcode)
2956 return false;
2958 if (vp1->block != vp2->block)
2960 if (vp1->phiargs.length () != vp2->phiargs.length ())
2961 return false;
2963 switch (vp1->phiargs.length ())
2965 case 1:
2966 /* Single-arg PHIs are just copies. */
2967 break;
2969 case 2:
2971 /* Rule out backedges into the PHI. */
2972 if (vp1->block->loop_father->header == vp1->block
2973 || vp2->block->loop_father->header == vp2->block)
2974 return false;
2976 /* If the PHI nodes do not have compatible types
2977 they are not the same. */
2978 if (!types_compatible_p (vp1->type, vp2->type))
2979 return false;
2981 basic_block idom1
2982 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
2983 basic_block idom2
2984 = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
2985 /* If the immediate dominator end in switch stmts multiple
2986 values may end up in the same PHI arg via intermediate
2987 CFG merges. */
2988 if (EDGE_COUNT (idom1->succs) != 2
2989 || EDGE_COUNT (idom2->succs) != 2)
2990 return false;
2992 /* Verify the controlling stmt is the same. */
2993 gimple *last1 = last_stmt (idom1);
2994 gimple *last2 = last_stmt (idom2);
2995 if (gimple_code (last1) != GIMPLE_COND
2996 || gimple_code (last2) != GIMPLE_COND)
2997 return false;
2998 bool inverted_p;
2999 if (! cond_stmts_equal_p (as_a <gcond *> (last1),
3000 vp1->cclhs, vp1->ccrhs,
3001 as_a <gcond *> (last2),
3002 vp2->cclhs, vp2->ccrhs,
3003 &inverted_p))
3004 return false;
3006 /* Get at true/false controlled edges into the PHI. */
3007 edge te1, te2, fe1, fe2;
3008 if (! extract_true_false_controlled_edges (idom1, vp1->block,
3009 &te1, &fe1)
3010 || ! extract_true_false_controlled_edges (idom2, vp2->block,
3011 &te2, &fe2))
3012 return false;
3014 /* Swap edges if the second condition is the inverted of the
3015 first. */
3016 if (inverted_p)
3017 std::swap (te2, fe2);
3019 /* ??? Handle VN_TOP specially. */
3020 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
3021 vp2->phiargs[te2->dest_idx])
3022 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
3023 vp2->phiargs[fe2->dest_idx]))
3024 return false;
3026 return true;
3029 default:
3030 return false;
3034 /* If the PHI nodes do not have compatible types
3035 they are not the same. */
3036 if (!types_compatible_p (vp1->type, vp2->type))
3037 return false;
3039 /* Any phi in the same block will have it's arguments in the
3040 same edge order, because of how we store phi nodes. */
3041 int i;
3042 tree phi1op;
3043 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
3045 tree phi2op = vp2->phiargs[i];
3046 if (phi1op == VN_TOP || phi2op == VN_TOP)
3047 continue;
3048 if (!expressions_equal_p (phi1op, phi2op))
3049 return false;
3052 return true;
3055 static vec<tree> shared_lookup_phiargs;
3057 /* Lookup PHI in the current hash table, and return the resulting
3058 value number if it exists in the hash table. Return NULL_TREE if
3059 it does not exist in the hash table. */
3061 static tree
3062 vn_phi_lookup (gimple *phi)
3064 vn_phi_s **slot;
3065 struct vn_phi_s vp1;
3066 edge e;
3067 edge_iterator ei;
3069 shared_lookup_phiargs.truncate (0);
3070 shared_lookup_phiargs.safe_grow (gimple_phi_num_args (phi));
3072 /* Canonicalize the SSA_NAME's to their value number. */
3073 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3075 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3076 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3077 shared_lookup_phiargs[e->dest_idx] = def;
3079 vp1.type = TREE_TYPE (gimple_phi_result (phi));
3080 vp1.phiargs = shared_lookup_phiargs;
3081 vp1.block = gimple_bb (phi);
3082 /* Extract values of the controlling condition. */
3083 vp1.cclhs = NULL_TREE;
3084 vp1.ccrhs = NULL_TREE;
3085 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1.block);
3086 if (EDGE_COUNT (idom1->succs) == 2)
3087 if (gcond *last1 = dyn_cast <gcond *> (last_stmt (idom1)))
3089 vp1.cclhs = vn_valueize (gimple_cond_lhs (last1));
3090 vp1.ccrhs = vn_valueize (gimple_cond_rhs (last1));
3092 vp1.hashcode = vn_phi_compute_hash (&vp1);
3093 slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3094 NO_INSERT);
3095 if (!slot && current_info == optimistic_info)
3096 slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3097 NO_INSERT);
3098 if (!slot)
3099 return NULL_TREE;
3100 return (*slot)->result;
3103 /* Insert PHI into the current hash table with a value number of
3104 RESULT. */
3106 static vn_phi_t
3107 vn_phi_insert (gimple *phi, tree result)
3109 vn_phi_s **slot;
3110 vn_phi_t vp1 = current_info->phis_pool->allocate ();
3111 vec<tree> args = vNULL;
3112 edge e;
3113 edge_iterator ei;
3115 args.safe_grow (gimple_phi_num_args (phi));
3117 /* Canonicalize the SSA_NAME's to their value number. */
3118 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3120 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3121 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3122 args[e->dest_idx] = def;
3124 vp1->value_id = VN_INFO (result)->value_id;
3125 vp1->type = TREE_TYPE (gimple_phi_result (phi));
3126 vp1->phiargs = args;
3127 vp1->block = gimple_bb (phi);
3128 /* Extract values of the controlling condition. */
3129 vp1->cclhs = NULL_TREE;
3130 vp1->ccrhs = NULL_TREE;
3131 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3132 if (EDGE_COUNT (idom1->succs) == 2)
3133 if (gcond *last1 = dyn_cast <gcond *> (last_stmt (idom1)))
3135 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
3136 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
3138 vp1->result = result;
3139 vp1->hashcode = vn_phi_compute_hash (vp1);
3141 slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
3143 /* Because we iterate over phi operations more than once, it's
3144 possible the slot might already exist here, hence no assert.*/
3145 *slot = vp1;
3146 return vp1;
3150 /* Print set of components in strongly connected component SCC to OUT. */
3152 static void
3153 print_scc (FILE *out, vec<tree> scc)
3155 tree var;
3156 unsigned int i;
3158 fprintf (out, "SCC consists of %u:", scc.length ());
3159 FOR_EACH_VEC_ELT (scc, i, var)
3161 fprintf (out, " ");
3162 print_generic_expr (out, var);
3164 fprintf (out, "\n");
3167 /* Return true if BB1 is dominated by BB2 taking into account edges
3168 that are not executable. */
3170 static bool
3171 dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
3173 edge_iterator ei;
3174 edge e;
3176 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3177 return true;
3179 /* Before iterating we'd like to know if there exists a
3180 (executable) path from bb2 to bb1 at all, if not we can
3181 directly return false. For now simply iterate once. */
3183 /* Iterate to the single executable bb1 predecessor. */
3184 if (EDGE_COUNT (bb1->preds) > 1)
3186 edge prede = NULL;
3187 FOR_EACH_EDGE (e, ei, bb1->preds)
3188 if (e->flags & EDGE_EXECUTABLE)
3190 if (prede)
3192 prede = NULL;
3193 break;
3195 prede = e;
3197 if (prede)
3199 bb1 = prede->src;
3201 /* Re-do the dominance check with changed bb1. */
3202 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3203 return true;
3207 /* Iterate to the single executable bb2 successor. */
3208 edge succe = NULL;
3209 FOR_EACH_EDGE (e, ei, bb2->succs)
3210 if (e->flags & EDGE_EXECUTABLE)
3212 if (succe)
3214 succe = NULL;
3215 break;
3217 succe = e;
3219 if (succe)
3221 /* Verify the reached block is only reached through succe.
3222 If there is only one edge we can spare us the dominator
3223 check and iterate directly. */
3224 if (EDGE_COUNT (succe->dest->preds) > 1)
3226 FOR_EACH_EDGE (e, ei, succe->dest->preds)
3227 if (e != succe
3228 && (e->flags & EDGE_EXECUTABLE))
3230 succe = NULL;
3231 break;
3234 if (succe)
3236 bb2 = succe->dest;
3238 /* Re-do the dominance check with changed bb2. */
3239 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3240 return true;
3244 /* We could now iterate updating bb1 / bb2. */
3245 return false;
3248 /* Set the value number of FROM to TO, return true if it has changed
3249 as a result. */
3251 static inline bool
3252 set_ssa_val_to (tree from, tree to)
3254 tree currval = SSA_VAL (from);
3255 HOST_WIDE_INT toff, coff;
3257 /* The only thing we allow as value numbers are ssa_names
3258 and invariants. So assert that here. We don't allow VN_TOP
3259 as visiting a stmt should produce a value-number other than
3260 that.
3261 ??? Still VN_TOP can happen for unreachable code, so force
3262 it to varying in that case. Not all code is prepared to
3263 get VN_TOP on valueization. */
3264 if (to == VN_TOP)
3266 if (dump_file && (dump_flags & TDF_DETAILS))
3267 fprintf (dump_file, "Forcing value number to varying on "
3268 "receiving VN_TOP\n");
3269 to = from;
3272 gcc_assert (to != NULL_TREE
3273 && ((TREE_CODE (to) == SSA_NAME
3274 && (to == from || SSA_VAL (to) == to))
3275 || is_gimple_min_invariant (to)));
3277 if (from != to)
3279 if (currval == from)
3281 if (dump_file && (dump_flags & TDF_DETAILS))
3283 fprintf (dump_file, "Not changing value number of ");
3284 print_generic_expr (dump_file, from);
3285 fprintf (dump_file, " from VARYING to ");
3286 print_generic_expr (dump_file, to);
3287 fprintf (dump_file, "\n");
3289 return false;
3291 else if (currval != VN_TOP
3292 && ! is_gimple_min_invariant (currval)
3293 && is_gimple_min_invariant (to))
3295 if (dump_file && (dump_flags & TDF_DETAILS))
3297 fprintf (dump_file, "Forcing VARYING instead of changing "
3298 "value number of ");
3299 print_generic_expr (dump_file, from);
3300 fprintf (dump_file, " from ");
3301 print_generic_expr (dump_file, currval);
3302 fprintf (dump_file, " (non-constant) to ");
3303 print_generic_expr (dump_file, to);
3304 fprintf (dump_file, " (constant)\n");
3306 to = from;
3308 else if (TREE_CODE (to) == SSA_NAME
3309 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
3310 to = from;
3313 if (dump_file && (dump_flags & TDF_DETAILS))
3315 fprintf (dump_file, "Setting value number of ");
3316 print_generic_expr (dump_file, from);
3317 fprintf (dump_file, " to ");
3318 print_generic_expr (dump_file, to);
3321 if (currval != to
3322 && !operand_equal_p (currval, to, 0)
3323 /* ??? For addresses involving volatile objects or types operand_equal_p
3324 does not reliably detect ADDR_EXPRs as equal. We know we are only
3325 getting invariant gimple addresses here, so can use
3326 get_addr_base_and_unit_offset to do this comparison. */
3327 && !(TREE_CODE (currval) == ADDR_EXPR
3328 && TREE_CODE (to) == ADDR_EXPR
3329 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
3330 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
3331 && coff == toff))
3333 if (dump_file && (dump_flags & TDF_DETAILS))
3334 fprintf (dump_file, " (changed)\n");
3336 /* If we equate two SSA names we have to make the side-band info
3337 of the leader conservative (and remember whatever original value
3338 was present). */
3339 if (TREE_CODE (to) == SSA_NAME)
3341 if (INTEGRAL_TYPE_P (TREE_TYPE (to))
3342 && SSA_NAME_RANGE_INFO (to))
3344 if (SSA_NAME_IS_DEFAULT_DEF (to)
3345 || dominated_by_p_w_unex
3346 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3347 gimple_bb (SSA_NAME_DEF_STMT (to))))
3348 /* Keep the info from the dominator. */
3350 else
3352 /* Save old info. */
3353 if (! VN_INFO (to)->info.range_info)
3355 VN_INFO (to)->info.range_info = SSA_NAME_RANGE_INFO (to);
3356 VN_INFO (to)->range_info_anti_range_p
3357 = SSA_NAME_ANTI_RANGE_P (to);
3359 /* Rather than allocating memory and unioning the info
3360 just clear it. */
3361 if (dump_file && (dump_flags & TDF_DETAILS))
3363 fprintf (dump_file, "clearing range info of ");
3364 print_generic_expr (dump_file, to);
3365 fprintf (dump_file, "\n");
3367 SSA_NAME_RANGE_INFO (to) = NULL;
3370 else if (POINTER_TYPE_P (TREE_TYPE (to))
3371 && SSA_NAME_PTR_INFO (to))
3373 if (SSA_NAME_IS_DEFAULT_DEF (to)
3374 || dominated_by_p_w_unex
3375 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3376 gimple_bb (SSA_NAME_DEF_STMT (to))))
3377 /* Keep the info from the dominator. */
3379 else if (! SSA_NAME_PTR_INFO (from)
3380 /* Handle the case of trivially equivalent info. */
3381 || memcmp (SSA_NAME_PTR_INFO (to),
3382 SSA_NAME_PTR_INFO (from),
3383 sizeof (ptr_info_def)) != 0)
3385 /* Save old info. */
3386 if (! VN_INFO (to)->info.ptr_info)
3387 VN_INFO (to)->info.ptr_info = SSA_NAME_PTR_INFO (to);
3388 /* Rather than allocating memory and unioning the info
3389 just clear it. */
3390 if (dump_file && (dump_flags & TDF_DETAILS))
3392 fprintf (dump_file, "clearing points-to info of ");
3393 print_generic_expr (dump_file, to);
3394 fprintf (dump_file, "\n");
3396 SSA_NAME_PTR_INFO (to) = NULL;
3401 VN_INFO (from)->valnum = to;
3402 return true;
3404 if (dump_file && (dump_flags & TDF_DETAILS))
3405 fprintf (dump_file, "\n");
3406 return false;
3409 /* Mark as processed all the definitions in the defining stmt of USE, or
3410 the USE itself. */
3412 static void
3413 mark_use_processed (tree use)
3415 ssa_op_iter iter;
3416 def_operand_p defp;
3417 gimple *stmt = SSA_NAME_DEF_STMT (use);
3419 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
3421 VN_INFO (use)->use_processed = true;
3422 return;
3425 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3427 tree def = DEF_FROM_PTR (defp);
3429 VN_INFO (def)->use_processed = true;
3433 /* Set all definitions in STMT to value number to themselves.
3434 Return true if a value number changed. */
3436 static bool
3437 defs_to_varying (gimple *stmt)
3439 bool changed = false;
3440 ssa_op_iter iter;
3441 def_operand_p defp;
3443 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3445 tree def = DEF_FROM_PTR (defp);
3446 changed |= set_ssa_val_to (def, def);
3448 return changed;
3451 /* Visit a copy between LHS and RHS, return true if the value number
3452 changed. */
3454 static bool
3455 visit_copy (tree lhs, tree rhs)
3457 /* Valueize. */
3458 rhs = SSA_VAL (rhs);
3460 return set_ssa_val_to (lhs, rhs);
3463 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
3464 is the same. */
3466 static tree
3467 valueized_wider_op (tree wide_type, tree op)
3469 if (TREE_CODE (op) == SSA_NAME)
3470 op = SSA_VAL (op);
3472 /* Either we have the op widened available. */
3473 tree ops[3] = {};
3474 ops[0] = op;
3475 tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
3476 wide_type, ops, NULL);
3477 if (tem)
3478 return tem;
3480 /* Or the op is truncated from some existing value. */
3481 if (TREE_CODE (op) == SSA_NAME)
3483 gimple *def = SSA_NAME_DEF_STMT (op);
3484 if (is_gimple_assign (def)
3485 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3487 tem = gimple_assign_rhs1 (def);
3488 if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)))
3490 if (TREE_CODE (tem) == SSA_NAME)
3491 tem = SSA_VAL (tem);
3492 return tem;
3497 /* For constants simply extend it. */
3498 if (TREE_CODE (op) == INTEGER_CST)
3499 return wide_int_to_tree (wide_type, op);
3501 return NULL_TREE;
3504 /* Visit a nary operator RHS, value number it, and return true if the
3505 value number of LHS has changed as a result. */
3507 static bool
3508 visit_nary_op (tree lhs, gassign *stmt)
3510 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
3511 if (result)
3512 return set_ssa_val_to (lhs, result);
3514 /* Do some special pattern matching for redundancies of operations
3515 in different types. */
3516 enum tree_code code = gimple_assign_rhs_code (stmt);
3517 tree type = TREE_TYPE (lhs);
3518 tree rhs1 = gimple_assign_rhs1 (stmt);
3519 switch (code)
3521 CASE_CONVERT:
3522 /* Match arithmetic done in a different type where we can easily
3523 substitute the result from some earlier sign-changed or widened
3524 operation. */
3525 if (INTEGRAL_TYPE_P (type)
3526 && TREE_CODE (rhs1) == SSA_NAME
3527 /* We only handle sign-changes or zero-extension -> & mask. */
3528 && ((TYPE_UNSIGNED (TREE_TYPE (rhs1))
3529 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs1)))
3530 || TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (rhs1))))
3532 gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
3533 if (def
3534 && (gimple_assign_rhs_code (def) == PLUS_EXPR
3535 || gimple_assign_rhs_code (def) == MINUS_EXPR
3536 || gimple_assign_rhs_code (def) == MULT_EXPR))
3538 tree ops[3] = {};
3539 /* Either we have the op widened available. */
3540 ops[0] = valueized_wider_op (type,
3541 gimple_assign_rhs1 (def));
3542 if (ops[0])
3543 ops[1] = valueized_wider_op (type,
3544 gimple_assign_rhs2 (def));
3545 if (ops[0] && ops[1])
3547 ops[0] = vn_nary_op_lookup_pieces
3548 (2, gimple_assign_rhs_code (def), type, ops, NULL);
3549 /* We have wider operation available. */
3550 if (ops[0])
3552 unsigned lhs_prec = TYPE_PRECISION (type);
3553 unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1));
3554 if (lhs_prec == rhs_prec)
3556 ops[1] = NULL_TREE;
3557 result = vn_nary_build_or_lookup (NOP_EXPR,
3558 type, ops);
3559 if (result)
3561 bool changed = set_ssa_val_to (lhs, result);
3562 vn_nary_op_insert_stmt (stmt, result);
3563 return changed;
3566 else
3568 ops[1] = wide_int_to_tree (type,
3569 wi::mask (rhs_prec, false,
3570 lhs_prec));
3571 result = vn_nary_build_or_lookup (BIT_AND_EXPR,
3572 TREE_TYPE (lhs),
3573 ops);
3574 if (result)
3576 bool changed = set_ssa_val_to (lhs, result);
3577 vn_nary_op_insert_stmt (stmt, result);
3578 return changed;
3585 default:;
3588 bool changed = set_ssa_val_to (lhs, lhs);
3589 vn_nary_op_insert_stmt (stmt, lhs);
3590 return changed;
3593 /* Visit a call STMT storing into LHS. Return true if the value number
3594 of the LHS has changed as a result. */
3596 static bool
3597 visit_reference_op_call (tree lhs, gcall *stmt)
3599 bool changed = false;
3600 struct vn_reference_s vr1;
3601 vn_reference_t vnresult = NULL;
3602 tree vdef = gimple_vdef (stmt);
3604 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
3605 if (lhs && TREE_CODE (lhs) != SSA_NAME)
3606 lhs = NULL_TREE;
3608 vn_reference_lookup_call (stmt, &vnresult, &vr1);
3609 if (vnresult)
3611 if (vnresult->result_vdef && vdef)
3612 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
3613 else if (vdef)
3614 /* If the call was discovered to be pure or const reflect
3615 that as far as possible. */
3616 changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt)));
3618 if (!vnresult->result && lhs)
3619 vnresult->result = lhs;
3621 if (vnresult->result && lhs)
3622 changed |= set_ssa_val_to (lhs, vnresult->result);
3624 else
3626 vn_reference_t vr2;
3627 vn_reference_s **slot;
3628 tree vdef_val = vdef;
3629 if (vdef)
3631 /* If we value numbered an indirect functions function to
3632 one not clobbering memory value number its VDEF to its
3633 VUSE. */
3634 tree fn = gimple_call_fn (stmt);
3635 if (fn && TREE_CODE (fn) == SSA_NAME)
3637 fn = SSA_VAL (fn);
3638 if (TREE_CODE (fn) == ADDR_EXPR
3639 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
3640 && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
3641 & (ECF_CONST | ECF_PURE)))
3642 vdef_val = vuse_ssa_val (gimple_vuse (stmt));
3644 changed |= set_ssa_val_to (vdef, vdef_val);
3646 if (lhs)
3647 changed |= set_ssa_val_to (lhs, lhs);
3648 vr2 = current_info->references_pool->allocate ();
3649 vr2->vuse = vr1.vuse;
3650 /* As we are not walking the virtual operand chain we know the
3651 shared_lookup_references are still original so we can re-use
3652 them here. */
3653 vr2->operands = vr1.operands.copy ();
3654 vr2->type = vr1.type;
3655 vr2->set = vr1.set;
3656 vr2->hashcode = vr1.hashcode;
3657 vr2->result = lhs;
3658 vr2->result_vdef = vdef_val;
3659 slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3660 INSERT);
3661 gcc_assert (!*slot);
3662 *slot = vr2;
3665 return changed;
3668 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3669 and return true if the value number of the LHS has changed as a result. */
3671 static bool
3672 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
3674 bool changed = false;
3675 tree last_vuse;
3676 tree result;
3678 last_vuse = gimple_vuse (stmt);
3679 last_vuse_ptr = &last_vuse;
3680 result = vn_reference_lookup (op, gimple_vuse (stmt),
3681 default_vn_walk_kind, NULL, true);
3682 last_vuse_ptr = NULL;
3684 /* We handle type-punning through unions by value-numbering based
3685 on offset and size of the access. Be prepared to handle a
3686 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3687 if (result
3688 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3690 /* We will be setting the value number of lhs to the value number
3691 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3692 So first simplify and lookup this expression to see if it
3693 is already available. */
3694 code_helper rcode = VIEW_CONVERT_EXPR;
3695 tree ops[3] = { result };
3696 result = vn_nary_build_or_lookup (rcode, TREE_TYPE (op), ops);
3699 if (result)
3700 changed = set_ssa_val_to (lhs, result);
3701 else
3703 changed = set_ssa_val_to (lhs, lhs);
3704 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
3707 return changed;
3711 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3712 and return true if the value number of the LHS has changed as a result. */
3714 static bool
3715 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
3717 bool changed = false;
3718 vn_reference_t vnresult = NULL;
3719 tree assign;
3720 bool resultsame = false;
3721 tree vuse = gimple_vuse (stmt);
3722 tree vdef = gimple_vdef (stmt);
3724 if (TREE_CODE (op) == SSA_NAME)
3725 op = SSA_VAL (op);
3727 /* First we want to lookup using the *vuses* from the store and see
3728 if there the last store to this location with the same address
3729 had the same value.
3731 The vuses represent the memory state before the store. If the
3732 memory state, address, and value of the store is the same as the
3733 last store to this location, then this store will produce the
3734 same memory state as that store.
3736 In this case the vdef versions for this store are value numbered to those
3737 vuse versions, since they represent the same memory state after
3738 this store.
3740 Otherwise, the vdefs for the store are used when inserting into
3741 the table, since the store generates a new memory state. */
3743 vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
3744 if (vnresult
3745 && vnresult->result)
3747 tree result = vnresult->result;
3748 if (TREE_CODE (result) == SSA_NAME)
3749 result = SSA_VAL (result);
3750 resultsame = expressions_equal_p (result, op);
3751 if (resultsame)
3753 /* If the TBAA state isn't compatible for downstream reads
3754 we cannot value-number the VDEFs the same. */
3755 alias_set_type set = get_alias_set (lhs);
3756 if (vnresult->set != set
3757 && ! alias_set_subset_of (set, vnresult->set))
3758 resultsame = false;
3762 if (!resultsame)
3764 /* Only perform the following when being called from PRE
3765 which embeds tail merging. */
3766 if (default_vn_walk_kind == VN_WALK)
3768 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3769 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
3770 if (vnresult)
3772 VN_INFO (vdef)->use_processed = true;
3773 return set_ssa_val_to (vdef, vnresult->result_vdef);
3777 if (dump_file && (dump_flags & TDF_DETAILS))
3779 fprintf (dump_file, "No store match\n");
3780 fprintf (dump_file, "Value numbering store ");
3781 print_generic_expr (dump_file, lhs);
3782 fprintf (dump_file, " to ");
3783 print_generic_expr (dump_file, op);
3784 fprintf (dump_file, "\n");
3786 /* Have to set value numbers before insert, since insert is
3787 going to valueize the references in-place. */
3788 if (vdef)
3789 changed |= set_ssa_val_to (vdef, vdef);
3791 /* Do not insert structure copies into the tables. */
3792 if (is_gimple_min_invariant (op)
3793 || is_gimple_reg (op))
3794 vn_reference_insert (lhs, op, vdef, NULL);
3796 /* Only perform the following when being called from PRE
3797 which embeds tail merging. */
3798 if (default_vn_walk_kind == VN_WALK)
3800 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3801 vn_reference_insert (assign, lhs, vuse, vdef);
3804 else
3806 /* We had a match, so value number the vdef to have the value
3807 number of the vuse it came from. */
3809 if (dump_file && (dump_flags & TDF_DETAILS))
3810 fprintf (dump_file, "Store matched earlier value, "
3811 "value numbering store vdefs to matching vuses.\n");
3813 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3816 return changed;
3819 /* Visit and value number PHI, return true if the value number
3820 changed. */
3822 static bool
3823 visit_phi (gimple *phi)
3825 bool changed = false;
3826 tree result;
3827 tree sameval = VN_TOP;
3828 bool allsame = true;
3829 unsigned n_executable = 0;
3831 /* TODO: We could check for this in init_sccvn, and replace this
3832 with a gcc_assert. */
3833 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3834 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3836 /* See if all non-TOP arguments have the same value. TOP is
3837 equivalent to everything, so we can ignore it. */
3838 edge_iterator ei;
3839 edge e;
3840 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3841 if (e->flags & EDGE_EXECUTABLE)
3843 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3845 ++n_executable;
3846 if (TREE_CODE (def) == SSA_NAME)
3847 def = SSA_VAL (def);
3848 if (def == VN_TOP)
3849 continue;
3850 if (sameval == VN_TOP)
3851 sameval = def;
3852 else if (!expressions_equal_p (def, sameval))
3854 allsame = false;
3855 break;
3859 /* If none of the edges was executable or all incoming values are
3860 undefined keep the value-number at VN_TOP. If only a single edge
3861 is exectuable use its value. */
3862 if (sameval == VN_TOP
3863 || n_executable == 1)
3864 return set_ssa_val_to (PHI_RESULT (phi), sameval);
3866 /* First see if it is equivalent to a phi node in this block. We prefer
3867 this as it allows IV elimination - see PRs 66502 and 67167. */
3868 result = vn_phi_lookup (phi);
3869 if (result)
3870 changed = set_ssa_val_to (PHI_RESULT (phi), result);
3871 /* Otherwise all value numbered to the same value, the phi node has that
3872 value. */
3873 else if (allsame)
3874 changed = set_ssa_val_to (PHI_RESULT (phi), sameval);
3875 else
3877 vn_phi_insert (phi, PHI_RESULT (phi));
3878 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3881 return changed;
3884 /* Try to simplify RHS using equivalences and constant folding. */
3886 static tree
3887 try_to_simplify (gassign *stmt)
3889 enum tree_code code = gimple_assign_rhs_code (stmt);
3890 tree tem;
3892 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3893 in this case, there is no point in doing extra work. */
3894 if (code == SSA_NAME)
3895 return NULL_TREE;
3897 /* First try constant folding based on our current lattice. */
3898 mprts_hook = vn_lookup_simplify_result;
3899 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
3900 mprts_hook = NULL;
3901 if (tem
3902 && (TREE_CODE (tem) == SSA_NAME
3903 || is_gimple_min_invariant (tem)))
3904 return tem;
3906 return NULL_TREE;
3909 /* Visit and value number USE, return true if the value number
3910 changed. */
3912 static bool
3913 visit_use (tree use)
3915 bool changed = false;
3916 gimple *stmt = SSA_NAME_DEF_STMT (use);
3918 mark_use_processed (use);
3920 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3921 if (dump_file && (dump_flags & TDF_DETAILS)
3922 && !SSA_NAME_IS_DEFAULT_DEF (use))
3924 fprintf (dump_file, "Value numbering ");
3925 print_generic_expr (dump_file, use);
3926 fprintf (dump_file, " stmt = ");
3927 print_gimple_stmt (dump_file, stmt, 0);
3930 /* Handle uninitialized uses. */
3931 if (SSA_NAME_IS_DEFAULT_DEF (use))
3932 changed = set_ssa_val_to (use, use);
3933 else if (gimple_code (stmt) == GIMPLE_PHI)
3934 changed = visit_phi (stmt);
3935 else if (gimple_has_volatile_ops (stmt))
3936 changed = defs_to_varying (stmt);
3937 else if (gassign *ass = dyn_cast <gassign *> (stmt))
3939 enum tree_code code = gimple_assign_rhs_code (ass);
3940 tree lhs = gimple_assign_lhs (ass);
3941 tree rhs1 = gimple_assign_rhs1 (ass);
3942 tree simplified;
3944 /* Shortcut for copies. Simplifying copies is pointless,
3945 since we copy the expression and value they represent. */
3946 if (code == SSA_NAME
3947 && TREE_CODE (lhs) == SSA_NAME)
3949 changed = visit_copy (lhs, rhs1);
3950 goto done;
3952 simplified = try_to_simplify (ass);
3953 if (simplified)
3955 if (dump_file && (dump_flags & TDF_DETAILS))
3957 fprintf (dump_file, "RHS ");
3958 print_gimple_expr (dump_file, ass, 0);
3959 fprintf (dump_file, " simplified to ");
3960 print_generic_expr (dump_file, simplified);
3961 fprintf (dump_file, "\n");
3964 /* Setting value numbers to constants will occasionally
3965 screw up phi congruence because constants are not
3966 uniquely associated with a single ssa name that can be
3967 looked up. */
3968 if (simplified
3969 && is_gimple_min_invariant (simplified)
3970 && TREE_CODE (lhs) == SSA_NAME)
3972 changed = set_ssa_val_to (lhs, simplified);
3973 goto done;
3975 else if (simplified
3976 && TREE_CODE (simplified) == SSA_NAME
3977 && TREE_CODE (lhs) == SSA_NAME)
3979 changed = visit_copy (lhs, simplified);
3980 goto done;
3983 if ((TREE_CODE (lhs) == SSA_NAME
3984 /* We can substitute SSA_NAMEs that are live over
3985 abnormal edges with their constant value. */
3986 && !(gimple_assign_copy_p (ass)
3987 && is_gimple_min_invariant (rhs1))
3988 && !(simplified
3989 && is_gimple_min_invariant (simplified))
3990 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3991 /* Stores or copies from SSA_NAMEs that are live over
3992 abnormal edges are a problem. */
3993 || (code == SSA_NAME
3994 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3995 changed = defs_to_varying (ass);
3996 else if (REFERENCE_CLASS_P (lhs)
3997 || DECL_P (lhs))
3998 changed = visit_reference_op_store (lhs, rhs1, ass);
3999 else if (TREE_CODE (lhs) == SSA_NAME)
4001 if ((gimple_assign_copy_p (ass)
4002 && is_gimple_min_invariant (rhs1))
4003 || (simplified
4004 && is_gimple_min_invariant (simplified)))
4006 if (simplified)
4007 changed = set_ssa_val_to (lhs, simplified);
4008 else
4009 changed = set_ssa_val_to (lhs, rhs1);
4011 else
4013 /* Visit the original statement. */
4014 switch (vn_get_stmt_kind (ass))
4016 case VN_NARY:
4017 changed = visit_nary_op (lhs, ass);
4018 break;
4019 case VN_REFERENCE:
4020 changed = visit_reference_op_load (lhs, rhs1, ass);
4021 break;
4022 default:
4023 changed = defs_to_varying (ass);
4024 break;
4028 else
4029 changed = defs_to_varying (ass);
4031 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
4033 tree lhs = gimple_call_lhs (call_stmt);
4034 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4036 /* Try constant folding based on our current lattice. */
4037 tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
4038 vn_valueize);
4039 if (simplified)
4041 if (dump_file && (dump_flags & TDF_DETAILS))
4043 fprintf (dump_file, "call ");
4044 print_gimple_expr (dump_file, call_stmt, 0);
4045 fprintf (dump_file, " simplified to ");
4046 print_generic_expr (dump_file, simplified);
4047 fprintf (dump_file, "\n");
4050 /* Setting value numbers to constants will occasionally
4051 screw up phi congruence because constants are not
4052 uniquely associated with a single ssa name that can be
4053 looked up. */
4054 if (simplified
4055 && is_gimple_min_invariant (simplified))
4057 changed = set_ssa_val_to (lhs, simplified);
4058 if (gimple_vdef (call_stmt))
4059 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4060 SSA_VAL (gimple_vuse (call_stmt)));
4061 goto done;
4063 else if (simplified
4064 && TREE_CODE (simplified) == SSA_NAME)
4066 changed = visit_copy (lhs, simplified);
4067 if (gimple_vdef (call_stmt))
4068 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4069 SSA_VAL (gimple_vuse (call_stmt)));
4070 goto done;
4072 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4074 changed = defs_to_varying (call_stmt);
4075 goto done;
4079 /* Pick up flags from a devirtualization target. */
4080 tree fn = gimple_call_fn (stmt);
4081 int extra_fnflags = 0;
4082 if (fn && TREE_CODE (fn) == SSA_NAME)
4084 fn = SSA_VAL (fn);
4085 if (TREE_CODE (fn) == ADDR_EXPR
4086 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
4087 extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0));
4089 if (!gimple_call_internal_p (call_stmt)
4090 && (/* Calls to the same function with the same vuse
4091 and the same operands do not necessarily return the same
4092 value, unless they're pure or const. */
4093 ((gimple_call_flags (call_stmt) | extra_fnflags)
4094 & (ECF_PURE | ECF_CONST))
4095 /* If calls have a vdef, subsequent calls won't have
4096 the same incoming vuse. So, if 2 calls with vdef have the
4097 same vuse, we know they're not subsequent.
4098 We can value number 2 calls to the same function with the
4099 same vuse and the same operands which are not subsequent
4100 the same, because there is no code in the program that can
4101 compare the 2 values... */
4102 || (gimple_vdef (call_stmt)
4103 /* ... unless the call returns a pointer which does
4104 not alias with anything else. In which case the
4105 information that the values are distinct are encoded
4106 in the IL. */
4107 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
4108 /* Only perform the following when being called from PRE
4109 which embeds tail merging. */
4110 && default_vn_walk_kind == VN_WALK)))
4111 changed = visit_reference_op_call (lhs, call_stmt);
4112 else
4113 changed = defs_to_varying (call_stmt);
4115 else
4116 changed = defs_to_varying (stmt);
4117 done:
4118 return changed;
4121 /* Compare two operands by reverse postorder index */
4123 static int
4124 compare_ops (const void *pa, const void *pb)
4126 const tree opa = *((const tree *)pa);
4127 const tree opb = *((const tree *)pb);
4128 gimple *opstmta = SSA_NAME_DEF_STMT (opa);
4129 gimple *opstmtb = SSA_NAME_DEF_STMT (opb);
4130 basic_block bba;
4131 basic_block bbb;
4133 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
4134 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4135 else if (gimple_nop_p (opstmta))
4136 return -1;
4137 else if (gimple_nop_p (opstmtb))
4138 return 1;
4140 bba = gimple_bb (opstmta);
4141 bbb = gimple_bb (opstmtb);
4143 if (!bba && !bbb)
4144 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4145 else if (!bba)
4146 return -1;
4147 else if (!bbb)
4148 return 1;
4150 if (bba == bbb)
4152 if (gimple_code (opstmta) == GIMPLE_PHI
4153 && gimple_code (opstmtb) == GIMPLE_PHI)
4154 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4155 else if (gimple_code (opstmta) == GIMPLE_PHI)
4156 return -1;
4157 else if (gimple_code (opstmtb) == GIMPLE_PHI)
4158 return 1;
4159 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
4160 return gimple_uid (opstmta) - gimple_uid (opstmtb);
4161 else
4162 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4164 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
4167 /* Sort an array containing members of a strongly connected component
4168 SCC so that the members are ordered by RPO number.
4169 This means that when the sort is complete, iterating through the
4170 array will give you the members in RPO order. */
4172 static void
4173 sort_scc (vec<tree> scc)
4175 scc.qsort (compare_ops);
4178 /* Insert the no longer used nary ONARY to the hash INFO. */
4180 static void
4181 copy_nary (vn_nary_op_t onary, vn_tables_t info)
4183 size_t size = sizeof_vn_nary_op (onary->length);
4184 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
4185 &info->nary_obstack);
4186 memcpy (nary, onary, size);
4187 vn_nary_op_insert_into (nary, info->nary, false);
4190 /* Insert the no longer used phi OPHI to the hash INFO. */
4192 static void
4193 copy_phi (vn_phi_t ophi, vn_tables_t info)
4195 vn_phi_t phi = info->phis_pool->allocate ();
4196 vn_phi_s **slot;
4197 memcpy (phi, ophi, sizeof (*phi));
4198 ophi->phiargs.create (0);
4199 slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
4200 gcc_assert (!*slot);
4201 *slot = phi;
4204 /* Insert the no longer used reference OREF to the hash INFO. */
4206 static void
4207 copy_reference (vn_reference_t oref, vn_tables_t info)
4209 vn_reference_t ref;
4210 vn_reference_s **slot;
4211 ref = info->references_pool->allocate ();
4212 memcpy (ref, oref, sizeof (*ref));
4213 oref->operands.create (0);
4214 slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
4215 if (*slot)
4216 free_reference (*slot);
4217 *slot = ref;
4220 /* Process a strongly connected component in the SSA graph. */
4222 static void
4223 process_scc (vec<tree> scc)
4225 tree var;
4226 unsigned int i;
4227 unsigned int iterations = 0;
4228 bool changed = true;
4229 vn_nary_op_iterator_type hin;
4230 vn_phi_iterator_type hip;
4231 vn_reference_iterator_type hir;
4232 vn_nary_op_t nary;
4233 vn_phi_t phi;
4234 vn_reference_t ref;
4236 /* If the SCC has a single member, just visit it. */
4237 if (scc.length () == 1)
4239 tree use = scc[0];
4240 if (VN_INFO (use)->use_processed)
4241 return;
4242 /* We need to make sure it doesn't form a cycle itself, which can
4243 happen for self-referential PHI nodes. In that case we would
4244 end up inserting an expression with VN_TOP operands into the
4245 valid table which makes us derive bogus equivalences later.
4246 The cheapest way to check this is to assume it for all PHI nodes. */
4247 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
4248 /* Fallthru to iteration. */ ;
4249 else
4251 visit_use (use);
4252 return;
4256 if (dump_file && (dump_flags & TDF_DETAILS))
4257 print_scc (dump_file, scc);
4259 /* Iterate over the SCC with the optimistic table until it stops
4260 changing. */
4261 current_info = optimistic_info;
4262 while (changed)
4264 changed = false;
4265 iterations++;
4266 if (dump_file && (dump_flags & TDF_DETAILS))
4267 fprintf (dump_file, "Starting iteration %d\n", iterations);
4268 /* As we are value-numbering optimistically we have to
4269 clear the expression tables and the simplified expressions
4270 in each iteration until we converge. */
4271 optimistic_info->nary->empty ();
4272 optimistic_info->phis->empty ();
4273 optimistic_info->references->empty ();
4274 obstack_free (&optimistic_info->nary_obstack, NULL);
4275 gcc_obstack_init (&optimistic_info->nary_obstack);
4276 optimistic_info->phis_pool->release ();
4277 optimistic_info->references_pool->release ();
4278 FOR_EACH_VEC_ELT (scc, i, var)
4279 gcc_assert (!VN_INFO (var)->needs_insertion
4280 && VN_INFO (var)->expr == NULL);
4281 FOR_EACH_VEC_ELT (scc, i, var)
4282 changed |= visit_use (var);
4285 if (dump_file && (dump_flags & TDF_DETAILS))
4286 fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
4287 statistics_histogram_event (cfun, "SCC iterations", iterations);
4289 /* Finally, copy the contents of the no longer used optimistic
4290 table to the valid table. */
4291 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
4292 copy_nary (nary, valid_info);
4293 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
4294 copy_phi (phi, valid_info);
4295 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
4296 ref, vn_reference_t, hir)
4297 copy_reference (ref, valid_info);
4299 current_info = valid_info;
4303 /* Pop the components of the found SCC for NAME off the SCC stack
4304 and process them. Returns true if all went well, false if
4305 we run into resource limits. */
4307 static void
4308 extract_and_process_scc_for_name (tree name)
4310 auto_vec<tree> scc;
4311 tree x;
4313 /* Found an SCC, pop the components off the SCC stack and
4314 process them. */
4317 x = sccstack.pop ();
4319 VN_INFO (x)->on_sccstack = false;
4320 scc.safe_push (x);
4321 } while (x != name);
4323 /* Drop all defs in the SCC to varying in case a SCC turns out to be
4324 incredibly large.
4325 ??? Just switch to a non-optimistic mode that avoids any iteration. */
4326 if (scc.length () > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
4328 if (dump_file)
4330 print_scc (dump_file, scc);
4331 fprintf (dump_file, "WARNING: Giving up value-numbering SCC due to "
4332 "size %u exceeding %u\n", scc.length (),
4333 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
4335 tree var;
4336 unsigned i;
4337 FOR_EACH_VEC_ELT (scc, i, var)
4339 gimple *def = SSA_NAME_DEF_STMT (var);
4340 mark_use_processed (var);
4341 if (SSA_NAME_IS_DEFAULT_DEF (var)
4342 || gimple_code (def) == GIMPLE_PHI)
4343 set_ssa_val_to (var, var);
4344 else
4345 defs_to_varying (def);
4347 return;
4350 if (scc.length () > 1)
4351 sort_scc (scc);
4353 process_scc (scc);
4356 /* Depth first search on NAME to discover and process SCC's in the SSA
4357 graph.
4358 Execution of this algorithm relies on the fact that the SCC's are
4359 popped off the stack in topological order.
4360 Returns true if successful, false if we stopped processing SCC's due
4361 to resource constraints. */
4363 static void
4364 DFS (tree name)
4366 auto_vec<ssa_op_iter> itervec;
4367 auto_vec<tree> namevec;
4368 use_operand_p usep = NULL;
4369 gimple *defstmt;
4370 tree use;
4371 ssa_op_iter iter;
4373 start_over:
4374 /* SCC info */
4375 VN_INFO (name)->dfsnum = next_dfs_num++;
4376 VN_INFO (name)->visited = true;
4377 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
4379 sccstack.safe_push (name);
4380 VN_INFO (name)->on_sccstack = true;
4381 defstmt = SSA_NAME_DEF_STMT (name);
4383 /* Recursively DFS on our operands, looking for SCC's. */
4384 if (!gimple_nop_p (defstmt))
4386 /* Push a new iterator. */
4387 if (gphi *phi = dyn_cast <gphi *> (defstmt))
4388 usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
4389 else
4390 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
4392 else
4393 clear_and_done_ssa_iter (&iter);
4395 while (1)
4397 /* If we are done processing uses of a name, go up the stack
4398 of iterators and process SCCs as we found them. */
4399 if (op_iter_done (&iter))
4401 /* See if we found an SCC. */
4402 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
4403 extract_and_process_scc_for_name (name);
4405 /* Check if we are done. */
4406 if (namevec.is_empty ())
4407 return;
4409 /* Restore the last use walker and continue walking there. */
4410 use = name;
4411 name = namevec.pop ();
4412 memcpy (&iter, &itervec.last (),
4413 sizeof (ssa_op_iter));
4414 itervec.pop ();
4415 goto continue_walking;
4418 use = USE_FROM_PTR (usep);
4420 /* Since we handle phi nodes, we will sometimes get
4421 invariants in the use expression. */
4422 if (TREE_CODE (use) == SSA_NAME)
4424 if (! (VN_INFO (use)->visited))
4426 /* Recurse by pushing the current use walking state on
4427 the stack and starting over. */
4428 itervec.safe_push (iter);
4429 namevec.safe_push (name);
4430 name = use;
4431 goto start_over;
4433 continue_walking:
4434 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
4435 VN_INFO (use)->low);
4437 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
4438 && VN_INFO (use)->on_sccstack)
4440 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
4441 VN_INFO (name)->low);
4445 usep = op_iter_next_use (&iter);
4449 /* Allocate a value number table. */
4451 static void
4452 allocate_vn_table (vn_tables_t table)
4454 table->phis = new vn_phi_table_type (23);
4455 table->nary = new vn_nary_op_table_type (23);
4456 table->references = new vn_reference_table_type (23);
4458 gcc_obstack_init (&table->nary_obstack);
4459 table->phis_pool = new object_allocator<vn_phi_s> ("VN phis");
4460 table->references_pool = new object_allocator<vn_reference_s>
4461 ("VN references");
4464 /* Free a value number table. */
4466 static void
4467 free_vn_table (vn_tables_t table)
4469 delete table->phis;
4470 table->phis = NULL;
4471 delete table->nary;
4472 table->nary = NULL;
4473 delete table->references;
4474 table->references = NULL;
4475 obstack_free (&table->nary_obstack, NULL);
4476 delete table->phis_pool;
4477 delete table->references_pool;
4480 static void
4481 init_scc_vn (void)
4483 int j;
4484 int *rpo_numbers_temp;
4486 calculate_dominance_info (CDI_DOMINATORS);
4487 mark_dfs_back_edges ();
4489 sccstack.create (0);
4490 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
4492 constant_value_ids = BITMAP_ALLOC (NULL);
4494 next_dfs_num = 1;
4495 next_value_id = 1;
4497 vn_ssa_aux_table.create (num_ssa_names + 1);
4498 /* VEC_alloc doesn't actually grow it to the right size, it just
4499 preallocates the space to do so. */
4500 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
4501 gcc_obstack_init (&vn_ssa_aux_obstack);
4503 shared_lookup_phiargs.create (0);
4504 shared_lookup_references.create (0);
4505 rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
4506 rpo_numbers_temp =
4507 XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
4508 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4510 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4511 the i'th block in RPO order is bb. We want to map bb's to RPO
4512 numbers, so we need to rearrange this array. */
4513 for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
4514 rpo_numbers[rpo_numbers_temp[j]] = j;
4516 XDELETE (rpo_numbers_temp);
4518 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
4520 renumber_gimple_stmt_uids ();
4522 /* Create the valid and optimistic value numbering tables. */
4523 valid_info = XCNEW (struct vn_tables_s);
4524 allocate_vn_table (valid_info);
4525 optimistic_info = XCNEW (struct vn_tables_s);
4526 allocate_vn_table (optimistic_info);
4527 current_info = valid_info;
4529 /* Create the VN_INFO structures, and initialize value numbers to
4530 TOP or VARYING for parameters. */
4531 size_t i;
4532 tree name;
4534 FOR_EACH_SSA_NAME (i, name, cfun)
4536 VN_INFO_GET (name)->valnum = VN_TOP;
4537 VN_INFO (name)->needs_insertion = false;
4538 VN_INFO (name)->expr = NULL;
4539 VN_INFO (name)->value_id = 0;
4541 if (!SSA_NAME_IS_DEFAULT_DEF (name))
4542 continue;
4544 switch (TREE_CODE (SSA_NAME_VAR (name)))
4546 case VAR_DECL:
4547 /* Undefined vars keep TOP. */
4548 break;
4550 case PARM_DECL:
4551 /* Parameters are VARYING but we can record a condition
4552 if we know it is a non-NULL pointer. */
4553 VN_INFO (name)->visited = true;
4554 VN_INFO (name)->valnum = name;
4555 if (POINTER_TYPE_P (TREE_TYPE (name))
4556 && nonnull_arg_p (SSA_NAME_VAR (name)))
4558 tree ops[2];
4559 ops[0] = name;
4560 ops[1] = build_int_cst (TREE_TYPE (name), 0);
4561 vn_nary_op_insert_pieces (2, NE_EXPR, boolean_type_node, ops,
4562 boolean_true_node, 0);
4563 if (dump_file && (dump_flags & TDF_DETAILS))
4565 fprintf (dump_file, "Recording ");
4566 print_generic_expr (dump_file, name, TDF_SLIM);
4567 fprintf (dump_file, " != 0\n");
4570 break;
4572 case RESULT_DECL:
4573 /* If the result is passed by invisible reference the default
4574 def is initialized, otherwise it's uninitialized. */
4575 if (DECL_BY_REFERENCE (SSA_NAME_VAR (name)))
4577 VN_INFO (name)->visited = true;
4578 VN_INFO (name)->valnum = name;
4580 break;
4582 default:
4583 gcc_unreachable ();
4588 /* Restore SSA info that has been reset on value leaders. */
4590 void
4591 scc_vn_restore_ssa_info (void)
4593 unsigned i;
4594 tree name;
4596 FOR_EACH_SSA_NAME (i, name, cfun)
4598 if (has_VN_INFO (name))
4600 if (VN_INFO (name)->needs_insertion)
4602 else if (POINTER_TYPE_P (TREE_TYPE (name))
4603 && VN_INFO (name)->info.ptr_info)
4604 SSA_NAME_PTR_INFO (name) = VN_INFO (name)->info.ptr_info;
4605 else if (INTEGRAL_TYPE_P (TREE_TYPE (name))
4606 && VN_INFO (name)->info.range_info)
4608 SSA_NAME_RANGE_INFO (name) = VN_INFO (name)->info.range_info;
4609 SSA_NAME_ANTI_RANGE_P (name)
4610 = VN_INFO (name)->range_info_anti_range_p;
4616 void
4617 free_scc_vn (void)
4619 size_t i;
4620 tree name;
4622 delete constant_to_value_id;
4623 constant_to_value_id = NULL;
4624 BITMAP_FREE (constant_value_ids);
4625 shared_lookup_phiargs.release ();
4626 shared_lookup_references.release ();
4627 XDELETEVEC (rpo_numbers);
4629 FOR_EACH_SSA_NAME (i, name, cfun)
4631 if (has_VN_INFO (name)
4632 && VN_INFO (name)->needs_insertion)
4633 release_ssa_name (name);
4635 obstack_free (&vn_ssa_aux_obstack, NULL);
4636 vn_ssa_aux_table.release ();
4638 sccstack.release ();
4639 free_vn_table (valid_info);
4640 XDELETE (valid_info);
4641 free_vn_table (optimistic_info);
4642 XDELETE (optimistic_info);
4644 BITMAP_FREE (const_parms);
4647 /* Set *ID according to RESULT. */
4649 static void
4650 set_value_id_for_result (tree result, unsigned int *id)
4652 if (result && TREE_CODE (result) == SSA_NAME)
4653 *id = VN_INFO (result)->value_id;
4654 else if (result && is_gimple_min_invariant (result))
4655 *id = get_or_alloc_constant_value_id (result);
4656 else
4657 *id = get_next_value_id ();
4660 /* Set the value ids in the valid hash tables. */
4662 static void
4663 set_hashtable_value_ids (void)
4665 vn_nary_op_iterator_type hin;
4666 vn_phi_iterator_type hip;
4667 vn_reference_iterator_type hir;
4668 vn_nary_op_t vno;
4669 vn_reference_t vr;
4670 vn_phi_t vp;
4672 /* Now set the value ids of the things we had put in the hash
4673 table. */
4675 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4676 set_value_id_for_result (vno->result, &vno->value_id);
4678 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4679 set_value_id_for_result (vp->result, &vp->value_id);
4681 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4682 hir)
4683 set_value_id_for_result (vr->result, &vr->value_id);
4686 class sccvn_dom_walker : public dom_walker
4688 public:
4689 sccvn_dom_walker ()
4690 : dom_walker (CDI_DOMINATORS, true), cond_stack (0) {}
4692 virtual edge before_dom_children (basic_block);
4693 virtual void after_dom_children (basic_block);
4695 void record_cond (basic_block,
4696 enum tree_code code, tree lhs, tree rhs, bool value);
4697 void record_conds (basic_block,
4698 enum tree_code code, tree lhs, tree rhs, bool value);
4700 auto_vec<std::pair <basic_block, std::pair <vn_nary_op_t, vn_nary_op_t> > >
4701 cond_stack;
4704 /* Record a temporary condition for the BB and its dominated blocks. */
4706 void
4707 sccvn_dom_walker::record_cond (basic_block bb,
4708 enum tree_code code, tree lhs, tree rhs,
4709 bool value)
4711 tree ops[2] = { lhs, rhs };
4712 vn_nary_op_t old = NULL;
4713 if (vn_nary_op_lookup_pieces (2, code, boolean_type_node, ops, &old))
4714 current_info->nary->remove_elt_with_hash (old, old->hashcode);
4715 vn_nary_op_t cond
4716 = vn_nary_op_insert_pieces (2, code, boolean_type_node, ops,
4717 value
4718 ? boolean_true_node
4719 : boolean_false_node, 0);
4720 if (dump_file && (dump_flags & TDF_DETAILS))
4722 fprintf (dump_file, "Recording temporarily ");
4723 print_generic_expr (dump_file, ops[0], TDF_SLIM);
4724 fprintf (dump_file, " %s ", get_tree_code_name (code));
4725 print_generic_expr (dump_file, ops[1], TDF_SLIM);
4726 fprintf (dump_file, " == %s%s\n",
4727 value ? "true" : "false",
4728 old ? " (old entry saved)" : "");
4730 cond_stack.safe_push (std::make_pair (bb, std::make_pair (cond, old)));
4733 /* Record temporary conditions for the BB and its dominated blocks
4734 according to LHS CODE RHS == VALUE and its dominated conditions. */
4736 void
4737 sccvn_dom_walker::record_conds (basic_block bb,
4738 enum tree_code code, tree lhs, tree rhs,
4739 bool value)
4741 /* Record the original condition. */
4742 record_cond (bb, code, lhs, rhs, value);
4744 if (!value)
4745 return;
4747 /* Record dominated conditions if the condition is true. Note that
4748 the inversion is already recorded. */
4749 switch (code)
4751 case LT_EXPR:
4752 case GT_EXPR:
4753 record_cond (bb, code == LT_EXPR ? LE_EXPR : GE_EXPR, lhs, rhs, true);
4754 record_cond (bb, NE_EXPR, lhs, rhs, true);
4755 record_cond (bb, EQ_EXPR, lhs, rhs, false);
4756 break;
4758 case EQ_EXPR:
4759 record_cond (bb, LE_EXPR, lhs, rhs, true);
4760 record_cond (bb, GE_EXPR, lhs, rhs, true);
4761 record_cond (bb, LT_EXPR, lhs, rhs, false);
4762 record_cond (bb, GT_EXPR, lhs, rhs, false);
4763 break;
4765 default:
4766 break;
4770 /* Restore expressions and values derived from conditionals. */
4772 void
4773 sccvn_dom_walker::after_dom_children (basic_block bb)
4775 while (!cond_stack.is_empty ()
4776 && cond_stack.last ().first == bb)
4778 vn_nary_op_t cond = cond_stack.last ().second.first;
4779 vn_nary_op_t old = cond_stack.last ().second.second;
4780 current_info->nary->remove_elt_with_hash (cond, cond->hashcode);
4781 if (old)
4782 vn_nary_op_insert_into (old, current_info->nary, false);
4783 cond_stack.pop ();
4787 /* Value number all statements in BB. */
4789 edge
4790 sccvn_dom_walker::before_dom_children (basic_block bb)
4792 edge e;
4793 edge_iterator ei;
4795 if (dump_file && (dump_flags & TDF_DETAILS))
4796 fprintf (dump_file, "Visiting BB %d\n", bb->index);
4798 /* If we have a single predecessor record the equivalence from a
4799 possible condition on the predecessor edge. */
4800 edge pred_e = NULL;
4801 FOR_EACH_EDGE (e, ei, bb->preds)
4803 /* Ignore simple backedges from this to allow recording conditions
4804 in loop headers. */
4805 if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
4806 continue;
4807 if (! pred_e)
4808 pred_e = e;
4809 else
4811 pred_e = NULL;
4812 break;
4815 if (pred_e)
4817 /* Check if there are multiple executable successor edges in
4818 the source block. Otherwise there is no additional info
4819 to be recorded. */
4820 edge e2;
4821 FOR_EACH_EDGE (e2, ei, pred_e->src->succs)
4822 if (e2 != pred_e
4823 && e2->flags & EDGE_EXECUTABLE)
4824 break;
4825 if (e2 && (e2->flags & EDGE_EXECUTABLE))
4827 gimple *stmt = last_stmt (pred_e->src);
4828 if (stmt
4829 && gimple_code (stmt) == GIMPLE_COND)
4831 enum tree_code code = gimple_cond_code (stmt);
4832 tree lhs = gimple_cond_lhs (stmt);
4833 tree rhs = gimple_cond_rhs (stmt);
4834 record_conds (bb, code, lhs, rhs,
4835 (pred_e->flags & EDGE_TRUE_VALUE) != 0);
4836 code = invert_tree_comparison (code, HONOR_NANS (lhs));
4837 if (code != ERROR_MARK)
4838 record_conds (bb, code, lhs, rhs,
4839 (pred_e->flags & EDGE_TRUE_VALUE) == 0);
4844 /* Value-number all defs in the basic-block. */
4845 for (gphi_iterator gsi = gsi_start_phis (bb);
4846 !gsi_end_p (gsi); gsi_next (&gsi))
4848 gphi *phi = gsi.phi ();
4849 tree res = PHI_RESULT (phi);
4850 if (!VN_INFO (res)->visited)
4851 DFS (res);
4853 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
4854 !gsi_end_p (gsi); gsi_next (&gsi))
4856 ssa_op_iter i;
4857 tree op;
4858 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
4859 if (!VN_INFO (op)->visited)
4860 DFS (op);
4863 /* Finally look at the last stmt. */
4864 gimple *stmt = last_stmt (bb);
4865 if (!stmt)
4866 return NULL;
4868 enum gimple_code code = gimple_code (stmt);
4869 if (code != GIMPLE_COND
4870 && code != GIMPLE_SWITCH
4871 && code != GIMPLE_GOTO)
4872 return NULL;
4874 if (dump_file && (dump_flags & TDF_DETAILS))
4876 fprintf (dump_file, "Visiting control stmt ending BB %d: ", bb->index);
4877 print_gimple_stmt (dump_file, stmt, 0);
4880 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
4881 if value-numbering can prove they are not reachable. Handling
4882 computed gotos is also possible. */
4883 tree val;
4884 switch (code)
4886 case GIMPLE_COND:
4888 tree lhs = vn_valueize (gimple_cond_lhs (stmt));
4889 tree rhs = vn_valueize (gimple_cond_rhs (stmt));
4890 val = gimple_simplify (gimple_cond_code (stmt),
4891 boolean_type_node, lhs, rhs,
4892 NULL, vn_valueize);
4893 /* If that didn't simplify to a constant see if we have recorded
4894 temporary expressions from taken edges. */
4895 if (!val || TREE_CODE (val) != INTEGER_CST)
4897 tree ops[2];
4898 ops[0] = lhs;
4899 ops[1] = rhs;
4900 val = vn_nary_op_lookup_pieces (2, gimple_cond_code (stmt),
4901 boolean_type_node, ops, NULL);
4903 break;
4905 case GIMPLE_SWITCH:
4906 val = gimple_switch_index (as_a <gswitch *> (stmt));
4907 break;
4908 case GIMPLE_GOTO:
4909 val = gimple_goto_dest (stmt);
4910 break;
4911 default:
4912 gcc_unreachable ();
4914 if (!val)
4915 return NULL;
4917 edge taken = find_taken_edge (bb, vn_valueize (val));
4918 if (!taken)
4919 return NULL;
4921 if (dump_file && (dump_flags & TDF_DETAILS))
4922 fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
4923 "not executable\n", bb->index, bb->index, taken->dest->index);
4925 return taken;
4928 /* Do SCCVN. Returns true if it finished, false if we bailed out
4929 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
4930 how we use the alias oracle walking during the VN process. */
4932 void
4933 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
4935 size_t i;
4937 default_vn_walk_kind = default_vn_walk_kind_;
4939 init_scc_vn ();
4941 /* Collect pointers we know point to readonly memory. */
4942 const_parms = BITMAP_ALLOC (NULL);
4943 tree fnspec = lookup_attribute ("fn spec",
4944 TYPE_ATTRIBUTES (TREE_TYPE (cfun->decl)));
4945 if (fnspec)
4947 fnspec = TREE_VALUE (TREE_VALUE (fnspec));
4948 i = 1;
4949 for (tree arg = DECL_ARGUMENTS (cfun->decl);
4950 arg; arg = DECL_CHAIN (arg), ++i)
4952 if (i >= (unsigned) TREE_STRING_LENGTH (fnspec))
4953 break;
4954 if (TREE_STRING_POINTER (fnspec)[i] == 'R'
4955 || TREE_STRING_POINTER (fnspec)[i] == 'r')
4957 tree name = ssa_default_def (cfun, arg);
4958 if (name)
4959 bitmap_set_bit (const_parms, SSA_NAME_VERSION (name));
4964 /* Walk all blocks in dominator order, value-numbering stmts
4965 SSA defs and decide whether outgoing edges are not executable. */
4966 sccvn_dom_walker walker;
4967 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
4969 /* Initialize the value ids and prune out remaining VN_TOPs
4970 from dead code. */
4971 tree name;
4973 FOR_EACH_SSA_NAME (i, name, cfun)
4975 vn_ssa_aux_t info = VN_INFO (name);
4976 if (!info->visited)
4977 info->valnum = name;
4978 if (info->valnum == name
4979 || info->valnum == VN_TOP)
4980 info->value_id = get_next_value_id ();
4981 else if (is_gimple_min_invariant (info->valnum))
4982 info->value_id = get_or_alloc_constant_value_id (info->valnum);
4985 /* Propagate. */
4986 FOR_EACH_SSA_NAME (i, name, cfun)
4988 vn_ssa_aux_t info = VN_INFO (name);
4989 if (TREE_CODE (info->valnum) == SSA_NAME
4990 && info->valnum != name
4991 && info->value_id != VN_INFO (info->valnum)->value_id)
4992 info->value_id = VN_INFO (info->valnum)->value_id;
4995 set_hashtable_value_ids ();
4997 if (dump_file && (dump_flags & TDF_DETAILS))
4999 fprintf (dump_file, "Value numbers:\n");
5000 FOR_EACH_SSA_NAME (i, name, cfun)
5002 if (VN_INFO (name)->visited
5003 && SSA_VAL (name) != name)
5005 print_generic_expr (dump_file, name);
5006 fprintf (dump_file, " = ");
5007 print_generic_expr (dump_file, SSA_VAL (name));
5008 fprintf (dump_file, "\n");
5014 /* Return the maximum value id we have ever seen. */
5016 unsigned int
5017 get_max_value_id (void)
5019 return next_value_id;
5022 /* Return the next unique value id. */
5024 unsigned int
5025 get_next_value_id (void)
5027 return next_value_id++;
5031 /* Compare two expressions E1 and E2 and return true if they are equal. */
5033 bool
5034 expressions_equal_p (tree e1, tree e2)
5036 /* The obvious case. */
5037 if (e1 == e2)
5038 return true;
5040 /* If either one is VN_TOP consider them equal. */
5041 if (e1 == VN_TOP || e2 == VN_TOP)
5042 return true;
5044 /* If only one of them is null, they cannot be equal. */
5045 if (!e1 || !e2)
5046 return false;
5048 /* Now perform the actual comparison. */
5049 if (TREE_CODE (e1) == TREE_CODE (e2)
5050 && operand_equal_p (e1, e2, OEP_PURE_SAME))
5051 return true;
5053 return false;
5057 /* Return true if the nary operation NARY may trap. This is a copy
5058 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
5060 bool
5061 vn_nary_may_trap (vn_nary_op_t nary)
5063 tree type;
5064 tree rhs2 = NULL_TREE;
5065 bool honor_nans = false;
5066 bool honor_snans = false;
5067 bool fp_operation = false;
5068 bool honor_trapv = false;
5069 bool handled, ret;
5070 unsigned i;
5072 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
5073 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
5074 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
5076 type = nary->type;
5077 fp_operation = FLOAT_TYPE_P (type);
5078 if (fp_operation)
5080 honor_nans = flag_trapping_math && !flag_finite_math_only;
5081 honor_snans = flag_signaling_nans != 0;
5083 else if (INTEGRAL_TYPE_P (type)
5084 && TYPE_OVERFLOW_TRAPS (type))
5085 honor_trapv = true;
5087 if (nary->length >= 2)
5088 rhs2 = nary->op[1];
5089 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
5090 honor_trapv,
5091 honor_nans, honor_snans, rhs2,
5092 &handled);
5093 if (handled
5094 && ret)
5095 return true;
5097 for (i = 0; i < nary->length; ++i)
5098 if (tree_could_trap_p (nary->op[i]))
5099 return true;
5101 return false;