2015-10-01 Steven G. Kargl <kargl@gcc.gnu.org>
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blob9db5a7fc0ec1b13d1207f36a56305c677c247d9e
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2015 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "rtl.h"
28 #include "ssa.h"
29 #include "alias.h"
30 #include "fold-const.h"
31 #include "stor-layout.h"
32 #include "cfganal.h"
33 #include "gimple-pretty-print.h"
34 #include "tree-inline.h"
35 #include "internal-fn.h"
36 #include "gimple-fold.h"
37 #include "tree-eh.h"
38 #include "gimplify.h"
39 #include "flags.h"
40 #include "insn-config.h"
41 #include "expmed.h"
42 #include "dojump.h"
43 #include "explow.h"
44 #include "calls.h"
45 #include "emit-rtl.h"
46 #include "varasm.h"
47 #include "stmt.h"
48 #include "expr.h"
49 #include "tree-dfa.h"
50 #include "tree-ssa.h"
51 #include "dumpfile.h"
52 #include "alloc-pool.h"
53 #include "cfgloop.h"
54 #include "params.h"
55 #include "tree-ssa-propagate.h"
56 #include "tree-ssa-sccvn.h"
57 #include "tree-cfg.h"
58 #include "domwalk.h"
59 #include "cgraph.h"
60 #include "gimple-iterator.h"
61 #include "gimple-match.h"
63 /* This algorithm is based on the SCC algorithm presented by Keith
64 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
65 (http://citeseer.ist.psu.edu/41805.html). In
66 straight line code, it is equivalent to a regular hash based value
67 numbering that is performed in reverse postorder.
69 For code with cycles, there are two alternatives, both of which
70 require keeping the hashtables separate from the actual list of
71 value numbers for SSA names.
73 1. Iterate value numbering in an RPO walk of the blocks, removing
74 all the entries from the hashtable after each iteration (but
75 keeping the SSA name->value number mapping between iterations).
76 Iterate until it does not change.
78 2. Perform value numbering as part of an SCC walk on the SSA graph,
79 iterating only the cycles in the SSA graph until they do not change
80 (using a separate, optimistic hashtable for value numbering the SCC
81 operands).
83 The second is not just faster in practice (because most SSA graph
84 cycles do not involve all the variables in the graph), it also has
85 some nice properties.
87 One of these nice properties is that when we pop an SCC off the
88 stack, we are guaranteed to have processed all the operands coming from
89 *outside of that SCC*, so we do not need to do anything special to
90 ensure they have value numbers.
92 Another nice property is that the SCC walk is done as part of a DFS
93 of the SSA graph, which makes it easy to perform combining and
94 simplifying operations at the same time.
96 The code below is deliberately written in a way that makes it easy
97 to separate the SCC walk from the other work it does.
99 In order to propagate constants through the code, we track which
100 expressions contain constants, and use those while folding. In
101 theory, we could also track expressions whose value numbers are
102 replaced, in case we end up folding based on expression
103 identities.
105 In order to value number memory, we assign value numbers to vuses.
106 This enables us to note that, for example, stores to the same
107 address of the same value from the same starting memory states are
108 equivalent.
109 TODO:
111 1. We can iterate only the changing portions of the SCC's, but
112 I have not seen an SCC big enough for this to be a win.
113 2. If you differentiate between phi nodes for loops and phi nodes
114 for if-then-else, you can properly consider phi nodes in different
115 blocks for equivalence.
116 3. We could value number vuses in more cases, particularly, whole
117 structure copies.
121 static tree *last_vuse_ptr;
122 static vn_lookup_kind vn_walk_kind;
123 static vn_lookup_kind default_vn_walk_kind;
124 bitmap const_parms;
126 /* vn_nary_op hashtable helpers. */
128 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
130 typedef vn_nary_op_s *compare_type;
131 static inline hashval_t hash (const vn_nary_op_s *);
132 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
135 /* Return the computed hashcode for nary operation P1. */
137 inline hashval_t
138 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
140 return vno1->hashcode;
143 /* Compare nary operations P1 and P2 and return true if they are
144 equivalent. */
146 inline bool
147 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
149 return vn_nary_op_eq (vno1, vno2);
152 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
153 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
156 /* vn_phi hashtable helpers. */
158 static int
159 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
161 struct vn_phi_hasher : pointer_hash <vn_phi_s>
163 static inline hashval_t hash (const vn_phi_s *);
164 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
165 static inline void remove (vn_phi_s *);
168 /* Return the computed hashcode for phi operation P1. */
170 inline hashval_t
171 vn_phi_hasher::hash (const vn_phi_s *vp1)
173 return vp1->hashcode;
176 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
178 inline bool
179 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
181 return vn_phi_eq (vp1, vp2);
184 /* Free a phi operation structure VP. */
186 inline void
187 vn_phi_hasher::remove (vn_phi_s *phi)
189 phi->phiargs.release ();
192 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
193 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
196 /* Compare two reference operands P1 and P2 for equality. Return true if
197 they are equal, and false otherwise. */
199 static int
200 vn_reference_op_eq (const void *p1, const void *p2)
202 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
203 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
205 return (vro1->opcode == vro2->opcode
206 /* We do not care for differences in type qualification. */
207 && (vro1->type == vro2->type
208 || (vro1->type && vro2->type
209 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
210 TYPE_MAIN_VARIANT (vro2->type))))
211 && expressions_equal_p (vro1->op0, vro2->op0)
212 && expressions_equal_p (vro1->op1, vro2->op1)
213 && expressions_equal_p (vro1->op2, vro2->op2));
216 /* Free a reference operation structure VP. */
218 static inline void
219 free_reference (vn_reference_s *vr)
221 vr->operands.release ();
225 /* vn_reference hashtable helpers. */
227 struct vn_reference_hasher : pointer_hash <vn_reference_s>
229 static inline hashval_t hash (const vn_reference_s *);
230 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
231 static inline void remove (vn_reference_s *);
234 /* Return the hashcode for a given reference operation P1. */
236 inline hashval_t
237 vn_reference_hasher::hash (const vn_reference_s *vr1)
239 return vr1->hashcode;
242 inline bool
243 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
245 return vn_reference_eq (v, c);
248 inline void
249 vn_reference_hasher::remove (vn_reference_s *v)
251 free_reference (v);
254 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
255 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
258 /* The set of hashtables and alloc_pool's for their items. */
260 typedef struct vn_tables_s
262 vn_nary_op_table_type *nary;
263 vn_phi_table_type *phis;
264 vn_reference_table_type *references;
265 struct obstack nary_obstack;
266 object_allocator<vn_phi_s> *phis_pool;
267 object_allocator<vn_reference_s> *references_pool;
268 } *vn_tables_t;
271 /* vn_constant hashtable helpers. */
273 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
275 static inline hashval_t hash (const vn_constant_s *);
276 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
279 /* Hash table hash function for vn_constant_t. */
281 inline hashval_t
282 vn_constant_hasher::hash (const vn_constant_s *vc1)
284 return vc1->hashcode;
287 /* Hash table equality function for vn_constant_t. */
289 inline bool
290 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
292 if (vc1->hashcode != vc2->hashcode)
293 return false;
295 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
298 static hash_table<vn_constant_hasher> *constant_to_value_id;
299 static bitmap constant_value_ids;
302 /* Valid hashtables storing information we have proven to be
303 correct. */
305 static vn_tables_t valid_info;
307 /* Optimistic hashtables storing information we are making assumptions about
308 during iterations. */
310 static vn_tables_t optimistic_info;
312 /* Pointer to the set of hashtables that is currently being used.
313 Should always point to either the optimistic_info, or the
314 valid_info. */
316 static vn_tables_t current_info;
319 /* Reverse post order index for each basic block. */
321 static int *rpo_numbers;
323 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
325 /* Return the SSA value of the VUSE x, supporting released VDEFs
326 during elimination which will value-number the VDEF to the
327 associated VUSE (but not substitute in the whole lattice). */
329 static inline tree
330 vuse_ssa_val (tree x)
332 if (!x)
333 return NULL_TREE;
337 x = SSA_VAL (x);
339 while (SSA_NAME_IN_FREE_LIST (x));
341 return x;
344 /* This represents the top of the VN lattice, which is the universal
345 value. */
347 tree VN_TOP;
349 /* Unique counter for our value ids. */
351 static unsigned int next_value_id;
353 /* Next DFS number and the stack for strongly connected component
354 detection. */
356 static unsigned int next_dfs_num;
357 static vec<tree> sccstack;
361 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
362 are allocated on an obstack for locality reasons, and to free them
363 without looping over the vec. */
365 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
366 static struct obstack vn_ssa_aux_obstack;
368 /* Return the value numbering information for a given SSA name. */
370 vn_ssa_aux_t
371 VN_INFO (tree name)
373 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
374 gcc_checking_assert (res);
375 return res;
378 /* Set the value numbering info for a given SSA name to a given
379 value. */
381 static inline void
382 VN_INFO_SET (tree name, vn_ssa_aux_t value)
384 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
387 /* Initialize the value numbering info for a given SSA name.
388 This should be called just once for every SSA name. */
390 vn_ssa_aux_t
391 VN_INFO_GET (tree name)
393 vn_ssa_aux_t newinfo;
395 gcc_assert (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ()
396 || vn_ssa_aux_table[SSA_NAME_VERSION (name)] == NULL);
397 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
398 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
399 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
400 vn_ssa_aux_table.safe_grow (SSA_NAME_VERSION (name) + 1);
401 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
402 return newinfo;
406 /* Return the vn_kind the expression computed by the stmt should be
407 associated with. */
409 enum vn_kind
410 vn_get_stmt_kind (gimple *stmt)
412 switch (gimple_code (stmt))
414 case GIMPLE_CALL:
415 return VN_REFERENCE;
416 case GIMPLE_PHI:
417 return VN_PHI;
418 case GIMPLE_ASSIGN:
420 enum tree_code code = gimple_assign_rhs_code (stmt);
421 tree rhs1 = gimple_assign_rhs1 (stmt);
422 switch (get_gimple_rhs_class (code))
424 case GIMPLE_UNARY_RHS:
425 case GIMPLE_BINARY_RHS:
426 case GIMPLE_TERNARY_RHS:
427 return VN_NARY;
428 case GIMPLE_SINGLE_RHS:
429 switch (TREE_CODE_CLASS (code))
431 case tcc_reference:
432 /* VOP-less references can go through unary case. */
433 if ((code == REALPART_EXPR
434 || code == IMAGPART_EXPR
435 || code == VIEW_CONVERT_EXPR
436 || code == BIT_FIELD_REF)
437 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
438 return VN_NARY;
440 /* Fallthrough. */
441 case tcc_declaration:
442 return VN_REFERENCE;
444 case tcc_constant:
445 return VN_CONSTANT;
447 default:
448 if (code == ADDR_EXPR)
449 return (is_gimple_min_invariant (rhs1)
450 ? VN_CONSTANT : VN_REFERENCE);
451 else if (code == CONSTRUCTOR)
452 return VN_NARY;
453 return VN_NONE;
455 default:
456 return VN_NONE;
459 default:
460 return VN_NONE;
464 /* Lookup a value id for CONSTANT and return it. If it does not
465 exist returns 0. */
467 unsigned int
468 get_constant_value_id (tree constant)
470 vn_constant_s **slot;
471 struct vn_constant_s vc;
473 vc.hashcode = vn_hash_constant_with_type (constant);
474 vc.constant = constant;
475 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
476 if (slot)
477 return (*slot)->value_id;
478 return 0;
481 /* Lookup a value id for CONSTANT, and if it does not exist, create a
482 new one and return it. If it does exist, return it. */
484 unsigned int
485 get_or_alloc_constant_value_id (tree constant)
487 vn_constant_s **slot;
488 struct vn_constant_s vc;
489 vn_constant_t vcp;
491 vc.hashcode = vn_hash_constant_with_type (constant);
492 vc.constant = constant;
493 slot = constant_to_value_id->find_slot (&vc, INSERT);
494 if (*slot)
495 return (*slot)->value_id;
497 vcp = XNEW (struct vn_constant_s);
498 vcp->hashcode = vc.hashcode;
499 vcp->constant = constant;
500 vcp->value_id = get_next_value_id ();
501 *slot = vcp;
502 bitmap_set_bit (constant_value_ids, vcp->value_id);
503 return vcp->value_id;
506 /* Return true if V is a value id for a constant. */
508 bool
509 value_id_constant_p (unsigned int v)
511 return bitmap_bit_p (constant_value_ids, v);
514 /* Compute the hash for a reference operand VRO1. */
516 static void
517 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
519 hstate.add_int (vro1->opcode);
520 if (vro1->op0)
521 inchash::add_expr (vro1->op0, hstate);
522 if (vro1->op1)
523 inchash::add_expr (vro1->op1, hstate);
524 if (vro1->op2)
525 inchash::add_expr (vro1->op2, hstate);
528 /* Compute a hash for the reference operation VR1 and return it. */
530 static hashval_t
531 vn_reference_compute_hash (const vn_reference_t vr1)
533 inchash::hash hstate;
534 hashval_t result;
535 int i;
536 vn_reference_op_t vro;
537 HOST_WIDE_INT off = -1;
538 bool deref = false;
540 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
542 if (vro->opcode == MEM_REF)
543 deref = true;
544 else if (vro->opcode != ADDR_EXPR)
545 deref = false;
546 if (vro->off != -1)
548 if (off == -1)
549 off = 0;
550 off += vro->off;
552 else
554 if (off != -1
555 && off != 0)
556 hstate.add_int (off);
557 off = -1;
558 if (deref
559 && vro->opcode == ADDR_EXPR)
561 if (vro->op0)
563 tree op = TREE_OPERAND (vro->op0, 0);
564 hstate.add_int (TREE_CODE (op));
565 inchash::add_expr (op, hstate);
568 else
569 vn_reference_op_compute_hash (vro, hstate);
572 result = hstate.end ();
573 /* ??? We would ICE later if we hash instead of adding that in. */
574 if (vr1->vuse)
575 result += SSA_NAME_VERSION (vr1->vuse);
577 return result;
580 /* Return true if reference operations VR1 and VR2 are equivalent. This
581 means they have the same set of operands and vuses. */
583 bool
584 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
586 unsigned i, j;
588 /* Early out if this is not a hash collision. */
589 if (vr1->hashcode != vr2->hashcode)
590 return false;
592 /* The VOP needs to be the same. */
593 if (vr1->vuse != vr2->vuse)
594 return false;
596 /* If the operands are the same we are done. */
597 if (vr1->operands == vr2->operands)
598 return true;
600 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
601 return false;
603 if (INTEGRAL_TYPE_P (vr1->type)
604 && INTEGRAL_TYPE_P (vr2->type))
606 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
607 return false;
609 else if (INTEGRAL_TYPE_P (vr1->type)
610 && (TYPE_PRECISION (vr1->type)
611 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
612 return false;
613 else if (INTEGRAL_TYPE_P (vr2->type)
614 && (TYPE_PRECISION (vr2->type)
615 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
616 return false;
618 i = 0;
619 j = 0;
622 HOST_WIDE_INT off1 = 0, off2 = 0;
623 vn_reference_op_t vro1, vro2;
624 vn_reference_op_s tem1, tem2;
625 bool deref1 = false, deref2 = false;
626 for (; vr1->operands.iterate (i, &vro1); i++)
628 if (vro1->opcode == MEM_REF)
629 deref1 = true;
630 if (vro1->off == -1)
631 break;
632 off1 += vro1->off;
634 for (; vr2->operands.iterate (j, &vro2); j++)
636 if (vro2->opcode == MEM_REF)
637 deref2 = true;
638 if (vro2->off == -1)
639 break;
640 off2 += vro2->off;
642 if (off1 != off2)
643 return false;
644 if (deref1 && vro1->opcode == ADDR_EXPR)
646 memset (&tem1, 0, sizeof (tem1));
647 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
648 tem1.type = TREE_TYPE (tem1.op0);
649 tem1.opcode = TREE_CODE (tem1.op0);
650 vro1 = &tem1;
651 deref1 = false;
653 if (deref2 && vro2->opcode == ADDR_EXPR)
655 memset (&tem2, 0, sizeof (tem2));
656 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
657 tem2.type = TREE_TYPE (tem2.op0);
658 tem2.opcode = TREE_CODE (tem2.op0);
659 vro2 = &tem2;
660 deref2 = false;
662 if (deref1 != deref2)
663 return false;
664 if (!vn_reference_op_eq (vro1, vro2))
665 return false;
666 ++j;
667 ++i;
669 while (vr1->operands.length () != i
670 || vr2->operands.length () != j);
672 return true;
675 /* Copy the operations present in load/store REF into RESULT, a vector of
676 vn_reference_op_s's. */
678 static void
679 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
681 if (TREE_CODE (ref) == TARGET_MEM_REF)
683 vn_reference_op_s temp;
685 result->reserve (3);
687 memset (&temp, 0, sizeof (temp));
688 temp.type = TREE_TYPE (ref);
689 temp.opcode = TREE_CODE (ref);
690 temp.op0 = TMR_INDEX (ref);
691 temp.op1 = TMR_STEP (ref);
692 temp.op2 = TMR_OFFSET (ref);
693 temp.off = -1;
694 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
695 temp.base = MR_DEPENDENCE_BASE (ref);
696 result->quick_push (temp);
698 memset (&temp, 0, sizeof (temp));
699 temp.type = NULL_TREE;
700 temp.opcode = ERROR_MARK;
701 temp.op0 = TMR_INDEX2 (ref);
702 temp.off = -1;
703 result->quick_push (temp);
705 memset (&temp, 0, sizeof (temp));
706 temp.type = NULL_TREE;
707 temp.opcode = TREE_CODE (TMR_BASE (ref));
708 temp.op0 = TMR_BASE (ref);
709 temp.off = -1;
710 result->quick_push (temp);
711 return;
714 /* For non-calls, store the information that makes up the address. */
715 tree orig = ref;
716 while (ref)
718 vn_reference_op_s temp;
720 memset (&temp, 0, sizeof (temp));
721 temp.type = TREE_TYPE (ref);
722 temp.opcode = TREE_CODE (ref);
723 temp.off = -1;
725 switch (temp.opcode)
727 case MODIFY_EXPR:
728 temp.op0 = TREE_OPERAND (ref, 1);
729 break;
730 case WITH_SIZE_EXPR:
731 temp.op0 = TREE_OPERAND (ref, 1);
732 temp.off = 0;
733 break;
734 case MEM_REF:
735 /* The base address gets its own vn_reference_op_s structure. */
736 temp.op0 = TREE_OPERAND (ref, 1);
737 if (tree_fits_shwi_p (TREE_OPERAND (ref, 1)))
738 temp.off = tree_to_shwi (TREE_OPERAND (ref, 1));
739 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
740 temp.base = MR_DEPENDENCE_BASE (ref);
741 break;
742 case BIT_FIELD_REF:
743 /* Record bits and position. */
744 temp.op0 = TREE_OPERAND (ref, 1);
745 temp.op1 = TREE_OPERAND (ref, 2);
746 if (tree_fits_shwi_p (TREE_OPERAND (ref, 2)))
748 HOST_WIDE_INT off = tree_to_shwi (TREE_OPERAND (ref, 2));
749 if (off % BITS_PER_UNIT == 0)
750 temp.off = off / BITS_PER_UNIT;
752 break;
753 case COMPONENT_REF:
754 /* The field decl is enough to unambiguously specify the field,
755 a matching type is not necessary and a mismatching type
756 is always a spurious difference. */
757 temp.type = NULL_TREE;
758 temp.op0 = TREE_OPERAND (ref, 1);
759 temp.op1 = TREE_OPERAND (ref, 2);
761 tree this_offset = component_ref_field_offset (ref);
762 if (this_offset
763 && TREE_CODE (this_offset) == INTEGER_CST)
765 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
766 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
768 offset_int off
769 = (wi::to_offset (this_offset)
770 + wi::lrshift (wi::to_offset (bit_offset),
771 LOG2_BITS_PER_UNIT));
772 if (wi::fits_shwi_p (off)
773 /* Probibit value-numbering zero offset components
774 of addresses the same before the pass folding
775 __builtin_object_size had a chance to run
776 (checking cfun->after_inlining does the
777 trick here). */
778 && (TREE_CODE (orig) != ADDR_EXPR
779 || off != 0
780 || cfun->after_inlining))
781 temp.off = off.to_shwi ();
785 break;
786 case ARRAY_RANGE_REF:
787 case ARRAY_REF:
788 /* Record index as operand. */
789 temp.op0 = TREE_OPERAND (ref, 1);
790 /* Always record lower bounds and element size. */
791 temp.op1 = array_ref_low_bound (ref);
792 temp.op2 = array_ref_element_size (ref);
793 if (TREE_CODE (temp.op0) == INTEGER_CST
794 && TREE_CODE (temp.op1) == INTEGER_CST
795 && TREE_CODE (temp.op2) == INTEGER_CST)
797 offset_int off = ((wi::to_offset (temp.op0)
798 - wi::to_offset (temp.op1))
799 * wi::to_offset (temp.op2));
800 if (wi::fits_shwi_p (off))
801 temp.off = off.to_shwi();
803 break;
804 case VAR_DECL:
805 if (DECL_HARD_REGISTER (ref))
807 temp.op0 = ref;
808 break;
810 /* Fallthru. */
811 case PARM_DECL:
812 case CONST_DECL:
813 case RESULT_DECL:
814 /* Canonicalize decls to MEM[&decl] which is what we end up with
815 when valueizing MEM[ptr] with ptr = &decl. */
816 temp.opcode = MEM_REF;
817 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
818 temp.off = 0;
819 result->safe_push (temp);
820 temp.opcode = ADDR_EXPR;
821 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
822 temp.type = TREE_TYPE (temp.op0);
823 temp.off = -1;
824 break;
825 case STRING_CST:
826 case INTEGER_CST:
827 case COMPLEX_CST:
828 case VECTOR_CST:
829 case REAL_CST:
830 case FIXED_CST:
831 case CONSTRUCTOR:
832 case SSA_NAME:
833 temp.op0 = ref;
834 break;
835 case ADDR_EXPR:
836 if (is_gimple_min_invariant (ref))
838 temp.op0 = ref;
839 break;
841 break;
842 /* These are only interesting for their operands, their
843 existence, and their type. They will never be the last
844 ref in the chain of references (IE they require an
845 operand), so we don't have to put anything
846 for op* as it will be handled by the iteration */
847 case REALPART_EXPR:
848 case VIEW_CONVERT_EXPR:
849 temp.off = 0;
850 break;
851 case IMAGPART_EXPR:
852 /* This is only interesting for its constant offset. */
853 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
854 break;
855 default:
856 gcc_unreachable ();
858 result->safe_push (temp);
860 if (REFERENCE_CLASS_P (ref)
861 || TREE_CODE (ref) == MODIFY_EXPR
862 || TREE_CODE (ref) == WITH_SIZE_EXPR
863 || (TREE_CODE (ref) == ADDR_EXPR
864 && !is_gimple_min_invariant (ref)))
865 ref = TREE_OPERAND (ref, 0);
866 else
867 ref = NULL_TREE;
871 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
872 operands in *OPS, the reference alias set SET and the reference type TYPE.
873 Return true if something useful was produced. */
875 bool
876 ao_ref_init_from_vn_reference (ao_ref *ref,
877 alias_set_type set, tree type,
878 vec<vn_reference_op_s> ops)
880 vn_reference_op_t op;
881 unsigned i;
882 tree base = NULL_TREE;
883 tree *op0_p = &base;
884 offset_int offset = 0;
885 offset_int max_size;
886 offset_int size = -1;
887 tree size_tree = NULL_TREE;
888 alias_set_type base_alias_set = -1;
890 /* First get the final access size from just the outermost expression. */
891 op = &ops[0];
892 if (op->opcode == COMPONENT_REF)
893 size_tree = DECL_SIZE (op->op0);
894 else if (op->opcode == BIT_FIELD_REF)
895 size_tree = op->op0;
896 else
898 machine_mode mode = TYPE_MODE (type);
899 if (mode == BLKmode)
900 size_tree = TYPE_SIZE (type);
901 else
902 size = int (GET_MODE_BITSIZE (mode));
904 if (size_tree != NULL_TREE
905 && TREE_CODE (size_tree) == INTEGER_CST)
906 size = wi::to_offset (size_tree);
908 /* Initially, maxsize is the same as the accessed element size.
909 In the following it will only grow (or become -1). */
910 max_size = size;
912 /* Compute cumulative bit-offset for nested component-refs and array-refs,
913 and find the ultimate containing object. */
914 FOR_EACH_VEC_ELT (ops, i, op)
916 switch (op->opcode)
918 /* These may be in the reference ops, but we cannot do anything
919 sensible with them here. */
920 case ADDR_EXPR:
921 /* Apart from ADDR_EXPR arguments to MEM_REF. */
922 if (base != NULL_TREE
923 && TREE_CODE (base) == MEM_REF
924 && op->op0
925 && DECL_P (TREE_OPERAND (op->op0, 0)))
927 vn_reference_op_t pop = &ops[i-1];
928 base = TREE_OPERAND (op->op0, 0);
929 if (pop->off == -1)
931 max_size = -1;
932 offset = 0;
934 else
935 offset += pop->off * BITS_PER_UNIT;
936 op0_p = NULL;
937 break;
939 /* Fallthru. */
940 case CALL_EXPR:
941 return false;
943 /* Record the base objects. */
944 case MEM_REF:
945 base_alias_set = get_deref_alias_set (op->op0);
946 *op0_p = build2 (MEM_REF, op->type,
947 NULL_TREE, op->op0);
948 MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
949 MR_DEPENDENCE_BASE (*op0_p) = op->base;
950 op0_p = &TREE_OPERAND (*op0_p, 0);
951 break;
953 case VAR_DECL:
954 case PARM_DECL:
955 case RESULT_DECL:
956 case SSA_NAME:
957 *op0_p = op->op0;
958 op0_p = NULL;
959 break;
961 /* And now the usual component-reference style ops. */
962 case BIT_FIELD_REF:
963 offset += wi::to_offset (op->op1);
964 break;
966 case COMPONENT_REF:
968 tree field = op->op0;
969 /* We do not have a complete COMPONENT_REF tree here so we
970 cannot use component_ref_field_offset. Do the interesting
971 parts manually. */
972 tree this_offset = DECL_FIELD_OFFSET (field);
974 if (op->op1 || TREE_CODE (this_offset) != INTEGER_CST)
975 max_size = -1;
976 else
978 offset_int woffset = wi::lshift (wi::to_offset (this_offset),
979 LOG2_BITS_PER_UNIT);
980 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
981 offset += woffset;
983 break;
986 case ARRAY_RANGE_REF:
987 case ARRAY_REF:
988 /* We recorded the lower bound and the element size. */
989 if (TREE_CODE (op->op0) != INTEGER_CST
990 || TREE_CODE (op->op1) != INTEGER_CST
991 || TREE_CODE (op->op2) != INTEGER_CST)
992 max_size = -1;
993 else
995 offset_int woffset
996 = wi::sext (wi::to_offset (op->op0) - wi::to_offset (op->op1),
997 TYPE_PRECISION (TREE_TYPE (op->op0)));
998 woffset *= wi::to_offset (op->op2);
999 woffset = wi::lshift (woffset, LOG2_BITS_PER_UNIT);
1000 offset += woffset;
1002 break;
1004 case REALPART_EXPR:
1005 break;
1007 case IMAGPART_EXPR:
1008 offset += size;
1009 break;
1011 case VIEW_CONVERT_EXPR:
1012 break;
1014 case STRING_CST:
1015 case INTEGER_CST:
1016 case COMPLEX_CST:
1017 case VECTOR_CST:
1018 case REAL_CST:
1019 case CONSTRUCTOR:
1020 case CONST_DECL:
1021 return false;
1023 default:
1024 return false;
1028 if (base == NULL_TREE)
1029 return false;
1031 ref->ref = NULL_TREE;
1032 ref->base = base;
1033 ref->ref_alias_set = set;
1034 if (base_alias_set != -1)
1035 ref->base_alias_set = base_alias_set;
1036 else
1037 ref->base_alias_set = get_alias_set (base);
1038 /* We discount volatiles from value-numbering elsewhere. */
1039 ref->volatile_p = false;
1041 if (!wi::fits_shwi_p (size) || wi::neg_p (size))
1043 ref->offset = 0;
1044 ref->size = -1;
1045 ref->max_size = -1;
1046 return true;
1049 ref->size = size.to_shwi ();
1051 if (!wi::fits_shwi_p (offset))
1053 ref->offset = 0;
1054 ref->max_size = -1;
1055 return true;
1058 ref->offset = offset.to_shwi ();
1060 if (!wi::fits_shwi_p (max_size) || wi::neg_p (max_size))
1061 ref->max_size = -1;
1062 else
1063 ref->max_size = max_size.to_shwi ();
1065 return true;
1068 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1069 vn_reference_op_s's. */
1071 static void
1072 copy_reference_ops_from_call (gcall *call,
1073 vec<vn_reference_op_s> *result)
1075 vn_reference_op_s temp;
1076 unsigned i;
1077 tree lhs = gimple_call_lhs (call);
1078 int lr;
1080 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1081 different. By adding the lhs here in the vector, we ensure that the
1082 hashcode is different, guaranteeing a different value number. */
1083 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1085 memset (&temp, 0, sizeof (temp));
1086 temp.opcode = MODIFY_EXPR;
1087 temp.type = TREE_TYPE (lhs);
1088 temp.op0 = lhs;
1089 temp.off = -1;
1090 result->safe_push (temp);
1093 /* Copy the type, opcode, function, static chain and EH region, if any. */
1094 memset (&temp, 0, sizeof (temp));
1095 temp.type = gimple_call_return_type (call);
1096 temp.opcode = CALL_EXPR;
1097 temp.op0 = gimple_call_fn (call);
1098 temp.op1 = gimple_call_chain (call);
1099 if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1100 temp.op2 = size_int (lr);
1101 temp.off = -1;
1102 if (gimple_call_with_bounds_p (call))
1103 temp.with_bounds = 1;
1104 result->safe_push (temp);
1106 /* Copy the call arguments. As they can be references as well,
1107 just chain them together. */
1108 for (i = 0; i < gimple_call_num_args (call); ++i)
1110 tree callarg = gimple_call_arg (call, i);
1111 copy_reference_ops_from_ref (callarg, result);
1115 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1116 *I_P to point to the last element of the replacement. */
1117 static bool
1118 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1119 unsigned int *i_p)
1121 unsigned int i = *i_p;
1122 vn_reference_op_t op = &(*ops)[i];
1123 vn_reference_op_t mem_op = &(*ops)[i - 1];
1124 tree addr_base;
1125 HOST_WIDE_INT addr_offset = 0;
1127 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1128 from .foo.bar to the preceding MEM_REF offset and replace the
1129 address with &OBJ. */
1130 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1131 &addr_offset);
1132 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1133 if (addr_base != TREE_OPERAND (op->op0, 0))
1135 offset_int off = offset_int::from (mem_op->op0, SIGNED);
1136 off += addr_offset;
1137 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1138 op->op0 = build_fold_addr_expr (addr_base);
1139 if (tree_fits_shwi_p (mem_op->op0))
1140 mem_op->off = tree_to_shwi (mem_op->op0);
1141 else
1142 mem_op->off = -1;
1143 return true;
1145 return false;
1148 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1149 *I_P to point to the last element of the replacement. */
1150 static bool
1151 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1152 unsigned int *i_p)
1154 unsigned int i = *i_p;
1155 vn_reference_op_t op = &(*ops)[i];
1156 vn_reference_op_t mem_op = &(*ops)[i - 1];
1157 gimple *def_stmt;
1158 enum tree_code code;
1159 offset_int off;
1161 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1162 if (!is_gimple_assign (def_stmt))
1163 return false;
1165 code = gimple_assign_rhs_code (def_stmt);
1166 if (code != ADDR_EXPR
1167 && code != POINTER_PLUS_EXPR)
1168 return false;
1170 off = offset_int::from (mem_op->op0, SIGNED);
1172 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1173 from .foo.bar to the preceding MEM_REF offset and replace the
1174 address with &OBJ. */
1175 if (code == ADDR_EXPR)
1177 tree addr, addr_base;
1178 HOST_WIDE_INT addr_offset;
1180 addr = gimple_assign_rhs1 (def_stmt);
1181 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1182 &addr_offset);
1183 /* If that didn't work because the address isn't invariant propagate
1184 the reference tree from the address operation in case the current
1185 dereference isn't offsetted. */
1186 if (!addr_base
1187 && *i_p == ops->length () - 1
1188 && off == 0
1189 /* This makes us disable this transform for PRE where the
1190 reference ops might be also used for code insertion which
1191 is invalid. */
1192 && default_vn_walk_kind == VN_WALKREWRITE)
1194 auto_vec<vn_reference_op_s, 32> tem;
1195 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1196 ops->pop ();
1197 ops->pop ();
1198 ops->safe_splice (tem);
1199 --*i_p;
1200 return true;
1202 if (!addr_base
1203 || TREE_CODE (addr_base) != MEM_REF)
1204 return false;
1206 off += addr_offset;
1207 off += mem_ref_offset (addr_base);
1208 op->op0 = TREE_OPERAND (addr_base, 0);
1210 else
1212 tree ptr, ptroff;
1213 ptr = gimple_assign_rhs1 (def_stmt);
1214 ptroff = gimple_assign_rhs2 (def_stmt);
1215 if (TREE_CODE (ptr) != SSA_NAME
1216 || TREE_CODE (ptroff) != INTEGER_CST)
1217 return false;
1219 off += wi::to_offset (ptroff);
1220 op->op0 = ptr;
1223 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1224 if (tree_fits_shwi_p (mem_op->op0))
1225 mem_op->off = tree_to_shwi (mem_op->op0);
1226 else
1227 mem_op->off = -1;
1228 if (TREE_CODE (op->op0) == SSA_NAME)
1229 op->op0 = SSA_VAL (op->op0);
1230 if (TREE_CODE (op->op0) != SSA_NAME)
1231 op->opcode = TREE_CODE (op->op0);
1233 /* And recurse. */
1234 if (TREE_CODE (op->op0) == SSA_NAME)
1235 vn_reference_maybe_forwprop_address (ops, i_p);
1236 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1237 vn_reference_fold_indirect (ops, i_p);
1238 return true;
1241 /* Optimize the reference REF to a constant if possible or return
1242 NULL_TREE if not. */
1244 tree
1245 fully_constant_vn_reference_p (vn_reference_t ref)
1247 vec<vn_reference_op_s> operands = ref->operands;
1248 vn_reference_op_t op;
1250 /* Try to simplify the translated expression if it is
1251 a call to a builtin function with at most two arguments. */
1252 op = &operands[0];
1253 if (op->opcode == CALL_EXPR
1254 && TREE_CODE (op->op0) == ADDR_EXPR
1255 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1256 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1257 && operands.length () >= 2
1258 && operands.length () <= 3)
1260 vn_reference_op_t arg0, arg1 = NULL;
1261 bool anyconst = false;
1262 arg0 = &operands[1];
1263 if (operands.length () > 2)
1264 arg1 = &operands[2];
1265 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1266 || (arg0->opcode == ADDR_EXPR
1267 && is_gimple_min_invariant (arg0->op0)))
1268 anyconst = true;
1269 if (arg1
1270 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1271 || (arg1->opcode == ADDR_EXPR
1272 && is_gimple_min_invariant (arg1->op0))))
1273 anyconst = true;
1274 if (anyconst)
1276 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1277 arg1 ? 2 : 1,
1278 arg0->op0,
1279 arg1 ? arg1->op0 : NULL);
1280 if (folded
1281 && TREE_CODE (folded) == NOP_EXPR)
1282 folded = TREE_OPERAND (folded, 0);
1283 if (folded
1284 && is_gimple_min_invariant (folded))
1285 return folded;
1289 /* Simplify reads from constants or constant initializers. */
1290 else if (BITS_PER_UNIT == 8
1291 && is_gimple_reg_type (ref->type)
1292 && (!INTEGRAL_TYPE_P (ref->type)
1293 || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
1295 HOST_WIDE_INT off = 0;
1296 HOST_WIDE_INT size;
1297 if (INTEGRAL_TYPE_P (ref->type))
1298 size = TYPE_PRECISION (ref->type);
1299 else
1300 size = tree_to_shwi (TYPE_SIZE (ref->type));
1301 if (size % BITS_PER_UNIT != 0
1302 || size > MAX_BITSIZE_MODE_ANY_MODE)
1303 return NULL_TREE;
1304 size /= BITS_PER_UNIT;
1305 unsigned i;
1306 for (i = 0; i < operands.length (); ++i)
1308 if (operands[i].off == -1)
1309 return NULL_TREE;
1310 off += operands[i].off;
1311 if (operands[i].opcode == MEM_REF)
1313 ++i;
1314 break;
1317 vn_reference_op_t base = &operands[--i];
1318 tree ctor = error_mark_node;
1319 tree decl = NULL_TREE;
1320 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1321 ctor = base->op0;
1322 else if (base->opcode == MEM_REF
1323 && base[1].opcode == ADDR_EXPR
1324 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1325 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL))
1327 decl = TREE_OPERAND (base[1].op0, 0);
1328 ctor = ctor_for_folding (decl);
1330 if (ctor == NULL_TREE)
1331 return build_zero_cst (ref->type);
1332 else if (ctor != error_mark_node)
1334 if (decl)
1336 tree res = fold_ctor_reference (ref->type, ctor,
1337 off * BITS_PER_UNIT,
1338 size * BITS_PER_UNIT, decl);
1339 if (res)
1341 STRIP_USELESS_TYPE_CONVERSION (res);
1342 if (is_gimple_min_invariant (res))
1343 return res;
1346 else
1348 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1349 if (native_encode_expr (ctor, buf, size, off) > 0)
1350 return native_interpret_expr (ref->type, buf, size);
1355 return NULL_TREE;
1358 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1359 structures into their value numbers. This is done in-place, and
1360 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1361 whether any operands were valueized. */
1363 static vec<vn_reference_op_s>
1364 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1366 vn_reference_op_t vro;
1367 unsigned int i;
1369 *valueized_anything = false;
1371 FOR_EACH_VEC_ELT (orig, i, vro)
1373 if (vro->opcode == SSA_NAME
1374 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1376 tree tem = SSA_VAL (vro->op0);
1377 if (tem != vro->op0)
1379 *valueized_anything = true;
1380 vro->op0 = tem;
1382 /* If it transforms from an SSA_NAME to a constant, update
1383 the opcode. */
1384 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1385 vro->opcode = TREE_CODE (vro->op0);
1387 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1389 tree tem = SSA_VAL (vro->op1);
1390 if (tem != vro->op1)
1392 *valueized_anything = true;
1393 vro->op1 = tem;
1396 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1398 tree tem = SSA_VAL (vro->op2);
1399 if (tem != vro->op2)
1401 *valueized_anything = true;
1402 vro->op2 = tem;
1405 /* If it transforms from an SSA_NAME to an address, fold with
1406 a preceding indirect reference. */
1407 if (i > 0
1408 && vro->op0
1409 && TREE_CODE (vro->op0) == ADDR_EXPR
1410 && orig[i - 1].opcode == MEM_REF)
1412 if (vn_reference_fold_indirect (&orig, &i))
1413 *valueized_anything = true;
1415 else if (i > 0
1416 && vro->opcode == SSA_NAME
1417 && orig[i - 1].opcode == MEM_REF)
1419 if (vn_reference_maybe_forwprop_address (&orig, &i))
1420 *valueized_anything = true;
1422 /* If it transforms a non-constant ARRAY_REF into a constant
1423 one, adjust the constant offset. */
1424 else if (vro->opcode == ARRAY_REF
1425 && vro->off == -1
1426 && TREE_CODE (vro->op0) == INTEGER_CST
1427 && TREE_CODE (vro->op1) == INTEGER_CST
1428 && TREE_CODE (vro->op2) == INTEGER_CST)
1430 offset_int off = ((wi::to_offset (vro->op0)
1431 - wi::to_offset (vro->op1))
1432 * wi::to_offset (vro->op2));
1433 if (wi::fits_shwi_p (off))
1434 vro->off = off.to_shwi ();
1438 return orig;
1441 static vec<vn_reference_op_s>
1442 valueize_refs (vec<vn_reference_op_s> orig)
1444 bool tem;
1445 return valueize_refs_1 (orig, &tem);
1448 static vec<vn_reference_op_s> shared_lookup_references;
1450 /* Create a vector of vn_reference_op_s structures from REF, a
1451 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1452 this function. *VALUEIZED_ANYTHING will specify whether any
1453 operands were valueized. */
1455 static vec<vn_reference_op_s>
1456 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1458 if (!ref)
1459 return vNULL;
1460 shared_lookup_references.truncate (0);
1461 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1462 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1463 valueized_anything);
1464 return shared_lookup_references;
1467 /* Create a vector of vn_reference_op_s structures from CALL, a
1468 call statement. The vector is shared among all callers of
1469 this function. */
1471 static vec<vn_reference_op_s>
1472 valueize_shared_reference_ops_from_call (gcall *call)
1474 if (!call)
1475 return vNULL;
1476 shared_lookup_references.truncate (0);
1477 copy_reference_ops_from_call (call, &shared_lookup_references);
1478 shared_lookup_references = valueize_refs (shared_lookup_references);
1479 return shared_lookup_references;
1482 /* Lookup a SCCVN reference operation VR in the current hash table.
1483 Returns the resulting value number if it exists in the hash table,
1484 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1485 vn_reference_t stored in the hashtable if something is found. */
1487 static tree
1488 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1490 vn_reference_s **slot;
1491 hashval_t hash;
1493 hash = vr->hashcode;
1494 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1495 if (!slot && current_info == optimistic_info)
1496 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1497 if (slot)
1499 if (vnresult)
1500 *vnresult = (vn_reference_t)*slot;
1501 return ((vn_reference_t)*slot)->result;
1504 return NULL_TREE;
1507 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1508 with the current VUSE and performs the expression lookup. */
1510 static void *
1511 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1512 unsigned int cnt, void *vr_)
1514 vn_reference_t vr = (vn_reference_t)vr_;
1515 vn_reference_s **slot;
1516 hashval_t hash;
1518 /* This bounds the stmt walks we perform on reference lookups
1519 to O(1) instead of O(N) where N is the number of dominating
1520 stores. */
1521 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1522 return (void *)-1;
1524 if (last_vuse_ptr)
1525 *last_vuse_ptr = vuse;
1527 /* Fixup vuse and hash. */
1528 if (vr->vuse)
1529 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1530 vr->vuse = vuse_ssa_val (vuse);
1531 if (vr->vuse)
1532 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1534 hash = vr->hashcode;
1535 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1536 if (!slot && current_info == optimistic_info)
1537 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1538 if (slot)
1539 return *slot;
1541 return NULL;
1544 /* Lookup an existing or insert a new vn_reference entry into the
1545 value table for the VUSE, SET, TYPE, OPERANDS reference which
1546 has the value VALUE which is either a constant or an SSA name. */
1548 static vn_reference_t
1549 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1550 alias_set_type set,
1551 tree type,
1552 vec<vn_reference_op_s,
1553 va_heap> operands,
1554 tree value)
1556 vn_reference_s vr1;
1557 vn_reference_t result;
1558 unsigned value_id;
1559 vr1.vuse = vuse;
1560 vr1.operands = operands;
1561 vr1.type = type;
1562 vr1.set = set;
1563 vr1.hashcode = vn_reference_compute_hash (&vr1);
1564 if (vn_reference_lookup_1 (&vr1, &result))
1565 return result;
1566 if (TREE_CODE (value) == SSA_NAME)
1567 value_id = VN_INFO (value)->value_id;
1568 else
1569 value_id = get_or_alloc_constant_value_id (value);
1570 return vn_reference_insert_pieces (vuse, set, type,
1571 operands.copy (), value, value_id);
1574 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1575 from the statement defining VUSE and if not successful tries to
1576 translate *REFP and VR_ through an aggregate copy at the definition
1577 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
1578 of *REF and *VR. If only disambiguation was performed then
1579 *DISAMBIGUATE_ONLY is set to true. */
1581 static void *
1582 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1583 bool *disambiguate_only)
1585 vn_reference_t vr = (vn_reference_t)vr_;
1586 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
1587 tree base = ao_ref_base (ref);
1588 HOST_WIDE_INT offset, maxsize;
1589 static vec<vn_reference_op_s>
1590 lhs_ops = vNULL;
1591 ao_ref lhs_ref;
1592 bool lhs_ref_ok = false;
1594 /* If the reference is based on a parameter that was determined as
1595 pointing to readonly memory it doesn't change. */
1596 if (TREE_CODE (base) == MEM_REF
1597 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
1598 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
1599 && bitmap_bit_p (const_parms,
1600 SSA_NAME_VERSION (TREE_OPERAND (base, 0))))
1602 *disambiguate_only = true;
1603 return NULL;
1606 /* First try to disambiguate after value-replacing in the definitions LHS. */
1607 if (is_gimple_assign (def_stmt))
1609 tree lhs = gimple_assign_lhs (def_stmt);
1610 bool valueized_anything = false;
1611 /* Avoid re-allocation overhead. */
1612 lhs_ops.truncate (0);
1613 copy_reference_ops_from_ref (lhs, &lhs_ops);
1614 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1615 if (valueized_anything)
1617 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1618 get_alias_set (lhs),
1619 TREE_TYPE (lhs), lhs_ops);
1620 if (lhs_ref_ok
1621 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1623 *disambiguate_only = true;
1624 return NULL;
1627 else
1629 ao_ref_init (&lhs_ref, lhs);
1630 lhs_ref_ok = true;
1633 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1634 && gimple_call_num_args (def_stmt) <= 4)
1636 /* For builtin calls valueize its arguments and call the
1637 alias oracle again. Valueization may improve points-to
1638 info of pointers and constify size and position arguments.
1639 Originally this was motivated by PR61034 which has
1640 conditional calls to free falsely clobbering ref because
1641 of imprecise points-to info of the argument. */
1642 tree oldargs[4];
1643 bool valueized_anything = false;
1644 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1646 oldargs[i] = gimple_call_arg (def_stmt, i);
1647 if (TREE_CODE (oldargs[i]) == SSA_NAME
1648 && VN_INFO (oldargs[i])->valnum != oldargs[i])
1650 gimple_call_set_arg (def_stmt, i, VN_INFO (oldargs[i])->valnum);
1651 valueized_anything = true;
1654 if (valueized_anything)
1656 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1657 ref);
1658 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1659 gimple_call_set_arg (def_stmt, i, oldargs[i]);
1660 if (!res)
1662 *disambiguate_only = true;
1663 return NULL;
1668 if (*disambiguate_only)
1669 return (void *)-1;
1671 offset = ref->offset;
1672 maxsize = ref->max_size;
1674 /* If we cannot constrain the size of the reference we cannot
1675 test if anything kills it. */
1676 if (maxsize == -1)
1677 return (void *)-1;
1679 /* We can't deduce anything useful from clobbers. */
1680 if (gimple_clobber_p (def_stmt))
1681 return (void *)-1;
1683 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1684 from that definition.
1685 1) Memset. */
1686 if (is_gimple_reg_type (vr->type)
1687 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1688 && integer_zerop (gimple_call_arg (def_stmt, 1))
1689 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2))
1690 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1692 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1693 tree base2;
1694 HOST_WIDE_INT offset2, size2, maxsize2;
1695 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1696 size2 = tree_to_uhwi (gimple_call_arg (def_stmt, 2)) * 8;
1697 if ((unsigned HOST_WIDE_INT)size2 / 8
1698 == tree_to_uhwi (gimple_call_arg (def_stmt, 2))
1699 && maxsize2 != -1
1700 && operand_equal_p (base, base2, 0)
1701 && offset2 <= offset
1702 && offset2 + size2 >= offset + maxsize)
1704 tree val = build_zero_cst (vr->type);
1705 return vn_reference_lookup_or_insert_for_pieces
1706 (vuse, vr->set, vr->type, vr->operands, val);
1710 /* 2) Assignment from an empty CONSTRUCTOR. */
1711 else if (is_gimple_reg_type (vr->type)
1712 && gimple_assign_single_p (def_stmt)
1713 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1714 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1716 tree base2;
1717 HOST_WIDE_INT offset2, size2, maxsize2;
1718 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1719 &offset2, &size2, &maxsize2);
1720 if (maxsize2 != -1
1721 && operand_equal_p (base, base2, 0)
1722 && offset2 <= offset
1723 && offset2 + size2 >= offset + maxsize)
1725 tree val = build_zero_cst (vr->type);
1726 return vn_reference_lookup_or_insert_for_pieces
1727 (vuse, vr->set, vr->type, vr->operands, val);
1731 /* 3) Assignment from a constant. We can use folds native encode/interpret
1732 routines to extract the assigned bits. */
1733 else if (vn_walk_kind == VN_WALKREWRITE
1734 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1735 && ref->size == maxsize
1736 && maxsize % BITS_PER_UNIT == 0
1737 && offset % BITS_PER_UNIT == 0
1738 && is_gimple_reg_type (vr->type)
1739 && gimple_assign_single_p (def_stmt)
1740 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
1742 tree base2;
1743 HOST_WIDE_INT offset2, size2, maxsize2;
1744 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1745 &offset2, &size2, &maxsize2);
1746 if (maxsize2 != -1
1747 && maxsize2 == size2
1748 && size2 % BITS_PER_UNIT == 0
1749 && offset2 % BITS_PER_UNIT == 0
1750 && operand_equal_p (base, base2, 0)
1751 && offset2 <= offset
1752 && offset2 + size2 >= offset + maxsize)
1754 /* We support up to 512-bit values (for V8DFmode). */
1755 unsigned char buffer[64];
1756 int len;
1758 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1759 buffer, sizeof (buffer));
1760 if (len > 0)
1762 tree val = native_interpret_expr (vr->type,
1763 buffer
1764 + ((offset - offset2)
1765 / BITS_PER_UNIT),
1766 ref->size / BITS_PER_UNIT);
1767 if (val)
1768 return vn_reference_lookup_or_insert_for_pieces
1769 (vuse, vr->set, vr->type, vr->operands, val);
1774 /* 4) Assignment from an SSA name which definition we may be able
1775 to access pieces from. */
1776 else if (ref->size == maxsize
1777 && is_gimple_reg_type (vr->type)
1778 && gimple_assign_single_p (def_stmt)
1779 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
1781 tree rhs1 = gimple_assign_rhs1 (def_stmt);
1782 gimple *def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
1783 if (is_gimple_assign (def_stmt2)
1784 && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
1785 || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
1786 && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1))))
1788 tree base2;
1789 HOST_WIDE_INT offset2, size2, maxsize2, off;
1790 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1791 &offset2, &size2, &maxsize2);
1792 off = offset - offset2;
1793 if (maxsize2 != -1
1794 && maxsize2 == size2
1795 && operand_equal_p (base, base2, 0)
1796 && offset2 <= offset
1797 && offset2 + size2 >= offset + maxsize)
1799 tree val = NULL_TREE;
1800 HOST_WIDE_INT elsz
1801 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1))));
1802 if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR)
1804 if (off == 0)
1805 val = gimple_assign_rhs1 (def_stmt2);
1806 else if (off == elsz)
1807 val = gimple_assign_rhs2 (def_stmt2);
1809 else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR
1810 && off % elsz == 0)
1812 tree ctor = gimple_assign_rhs1 (def_stmt2);
1813 unsigned i = off / elsz;
1814 if (i < CONSTRUCTOR_NELTS (ctor))
1816 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
1817 if (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
1819 if (TREE_CODE (TREE_TYPE (elt->value))
1820 != VECTOR_TYPE)
1821 val = elt->value;
1825 if (val)
1826 return vn_reference_lookup_or_insert_for_pieces
1827 (vuse, vr->set, vr->type, vr->operands, val);
1832 /* 5) For aggregate copies translate the reference through them if
1833 the copy kills ref. */
1834 else if (vn_walk_kind == VN_WALKREWRITE
1835 && gimple_assign_single_p (def_stmt)
1836 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1837 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1838 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1840 tree base2;
1841 HOST_WIDE_INT maxsize2;
1842 int i, j;
1843 auto_vec<vn_reference_op_s> rhs;
1844 vn_reference_op_t vro;
1845 ao_ref r;
1847 if (!lhs_ref_ok)
1848 return (void *)-1;
1850 /* See if the assignment kills REF. */
1851 base2 = ao_ref_base (&lhs_ref);
1852 maxsize2 = lhs_ref.max_size;
1853 if (maxsize2 == -1
1854 || (base != base2
1855 && (TREE_CODE (base) != MEM_REF
1856 || TREE_CODE (base2) != MEM_REF
1857 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
1858 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
1859 TREE_OPERAND (base2, 1))))
1860 || !stmt_kills_ref_p (def_stmt, ref))
1861 return (void *)-1;
1863 /* Find the common base of ref and the lhs. lhs_ops already
1864 contains valueized operands for the lhs. */
1865 i = vr->operands.length () - 1;
1866 j = lhs_ops.length () - 1;
1867 while (j >= 0 && i >= 0
1868 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
1870 i--;
1871 j--;
1874 /* ??? The innermost op should always be a MEM_REF and we already
1875 checked that the assignment to the lhs kills vr. Thus for
1876 aggregate copies using char[] types the vn_reference_op_eq
1877 may fail when comparing types for compatibility. But we really
1878 don't care here - further lookups with the rewritten operands
1879 will simply fail if we messed up types too badly. */
1880 HOST_WIDE_INT extra_off = 0;
1881 if (j == 0 && i >= 0
1882 && lhs_ops[0].opcode == MEM_REF
1883 && lhs_ops[0].off != -1)
1885 if (lhs_ops[0].off == vr->operands[i].off)
1886 i--, j--;
1887 else if (vr->operands[i].opcode == MEM_REF
1888 && vr->operands[i].off != -1)
1890 extra_off = vr->operands[i].off - lhs_ops[0].off;
1891 i--, j--;
1895 /* i now points to the first additional op.
1896 ??? LHS may not be completely contained in VR, one or more
1897 VIEW_CONVERT_EXPRs could be in its way. We could at least
1898 try handling outermost VIEW_CONVERT_EXPRs. */
1899 if (j != -1)
1900 return (void *)-1;
1902 /* Now re-write REF to be based on the rhs of the assignment. */
1903 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1905 /* Apply an extra offset to the inner MEM_REF of the RHS. */
1906 if (extra_off != 0)
1908 if (rhs.length () < 2
1909 || rhs[0].opcode != MEM_REF
1910 || rhs[0].off == -1)
1911 return (void *)-1;
1912 rhs[0].off += extra_off;
1913 rhs[0].op0 = int_const_binop (PLUS_EXPR, rhs[0].op0,
1914 build_int_cst (TREE_TYPE (rhs[0].op0),
1915 extra_off));
1918 /* We need to pre-pend vr->operands[0..i] to rhs. */
1919 vec<vn_reference_op_s> old = vr->operands;
1920 if (i + 1 + rhs.length () > vr->operands.length ())
1922 vr->operands.safe_grow (i + 1 + rhs.length ());
1923 if (old == shared_lookup_references)
1924 shared_lookup_references = vr->operands;
1926 else
1927 vr->operands.truncate (i + 1 + rhs.length ());
1928 FOR_EACH_VEC_ELT (rhs, j, vro)
1929 vr->operands[i + 1 + j] = *vro;
1930 vr->operands = valueize_refs (vr->operands);
1931 if (old == shared_lookup_references)
1932 shared_lookup_references = vr->operands;
1933 vr->hashcode = vn_reference_compute_hash (vr);
1935 /* Try folding the new reference to a constant. */
1936 tree val = fully_constant_vn_reference_p (vr);
1937 if (val)
1938 return vn_reference_lookup_or_insert_for_pieces
1939 (vuse, vr->set, vr->type, vr->operands, val);
1941 /* Adjust *ref from the new operands. */
1942 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1943 return (void *)-1;
1944 /* This can happen with bitfields. */
1945 if (ref->size != r.size)
1946 return (void *)-1;
1947 *ref = r;
1949 /* Do not update last seen VUSE after translating. */
1950 last_vuse_ptr = NULL;
1952 /* Keep looking for the adjusted *REF / VR pair. */
1953 return NULL;
1956 /* 6) For memcpy copies translate the reference through them if
1957 the copy kills ref. */
1958 else if (vn_walk_kind == VN_WALKREWRITE
1959 && is_gimple_reg_type (vr->type)
1960 /* ??? Handle BCOPY as well. */
1961 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
1962 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
1963 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
1964 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
1965 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
1966 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
1967 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
1968 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2)))
1970 tree lhs, rhs;
1971 ao_ref r;
1972 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
1973 vn_reference_op_s op;
1974 HOST_WIDE_INT at;
1977 /* Only handle non-variable, addressable refs. */
1978 if (ref->size != maxsize
1979 || offset % BITS_PER_UNIT != 0
1980 || ref->size % BITS_PER_UNIT != 0)
1981 return (void *)-1;
1983 /* Extract a pointer base and an offset for the destination. */
1984 lhs = gimple_call_arg (def_stmt, 0);
1985 lhs_offset = 0;
1986 if (TREE_CODE (lhs) == SSA_NAME)
1988 lhs = SSA_VAL (lhs);
1989 if (TREE_CODE (lhs) == SSA_NAME)
1991 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
1992 if (gimple_assign_single_p (def_stmt)
1993 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
1994 lhs = gimple_assign_rhs1 (def_stmt);
1997 if (TREE_CODE (lhs) == ADDR_EXPR)
1999 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2000 &lhs_offset);
2001 if (!tem)
2002 return (void *)-1;
2003 if (TREE_CODE (tem) == MEM_REF
2004 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2006 lhs = TREE_OPERAND (tem, 0);
2007 if (TREE_CODE (lhs) == SSA_NAME)
2008 lhs = SSA_VAL (lhs);
2009 lhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2011 else if (DECL_P (tem))
2012 lhs = build_fold_addr_expr (tem);
2013 else
2014 return (void *)-1;
2016 if (TREE_CODE (lhs) != SSA_NAME
2017 && TREE_CODE (lhs) != ADDR_EXPR)
2018 return (void *)-1;
2020 /* Extract a pointer base and an offset for the source. */
2021 rhs = gimple_call_arg (def_stmt, 1);
2022 rhs_offset = 0;
2023 if (TREE_CODE (rhs) == SSA_NAME)
2024 rhs = SSA_VAL (rhs);
2025 if (TREE_CODE (rhs) == ADDR_EXPR)
2027 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2028 &rhs_offset);
2029 if (!tem)
2030 return (void *)-1;
2031 if (TREE_CODE (tem) == MEM_REF
2032 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2034 rhs = TREE_OPERAND (tem, 0);
2035 rhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2037 else if (DECL_P (tem))
2038 rhs = build_fold_addr_expr (tem);
2039 else
2040 return (void *)-1;
2042 if (TREE_CODE (rhs) != SSA_NAME
2043 && TREE_CODE (rhs) != ADDR_EXPR)
2044 return (void *)-1;
2046 copy_size = tree_to_uhwi (gimple_call_arg (def_stmt, 2));
2048 /* The bases of the destination and the references have to agree. */
2049 if ((TREE_CODE (base) != MEM_REF
2050 && !DECL_P (base))
2051 || (TREE_CODE (base) == MEM_REF
2052 && (TREE_OPERAND (base, 0) != lhs
2053 || !tree_fits_uhwi_p (TREE_OPERAND (base, 1))))
2054 || (DECL_P (base)
2055 && (TREE_CODE (lhs) != ADDR_EXPR
2056 || TREE_OPERAND (lhs, 0) != base)))
2057 return (void *)-1;
2059 at = offset / BITS_PER_UNIT;
2060 if (TREE_CODE (base) == MEM_REF)
2061 at += tree_to_uhwi (TREE_OPERAND (base, 1));
2062 /* If the access is completely outside of the memcpy destination
2063 area there is no aliasing. */
2064 if (lhs_offset >= at + maxsize / BITS_PER_UNIT
2065 || lhs_offset + copy_size <= at)
2066 return NULL;
2067 /* And the access has to be contained within the memcpy destination. */
2068 if (lhs_offset > at
2069 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
2070 return (void *)-1;
2072 /* Make room for 2 operands in the new reference. */
2073 if (vr->operands.length () < 2)
2075 vec<vn_reference_op_s> old = vr->operands;
2076 vr->operands.safe_grow_cleared (2);
2077 if (old == shared_lookup_references
2078 && vr->operands != old)
2079 shared_lookup_references = vr->operands;
2081 else
2082 vr->operands.truncate (2);
2084 /* The looked-through reference is a simple MEM_REF. */
2085 memset (&op, 0, sizeof (op));
2086 op.type = vr->type;
2087 op.opcode = MEM_REF;
2088 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
2089 op.off = at - lhs_offset + rhs_offset;
2090 vr->operands[0] = op;
2091 op.type = TREE_TYPE (rhs);
2092 op.opcode = TREE_CODE (rhs);
2093 op.op0 = rhs;
2094 op.off = -1;
2095 vr->operands[1] = op;
2096 vr->hashcode = vn_reference_compute_hash (vr);
2098 /* Adjust *ref from the new operands. */
2099 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2100 return (void *)-1;
2101 /* This can happen with bitfields. */
2102 if (ref->size != r.size)
2103 return (void *)-1;
2104 *ref = r;
2106 /* Do not update last seen VUSE after translating. */
2107 last_vuse_ptr = NULL;
2109 /* Keep looking for the adjusted *REF / VR pair. */
2110 return NULL;
2113 /* Bail out and stop walking. */
2114 return (void *)-1;
2117 /* Lookup a reference operation by it's parts, in the current hash table.
2118 Returns the resulting value number if it exists in the hash table,
2119 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2120 vn_reference_t stored in the hashtable if something is found. */
2122 tree
2123 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2124 vec<vn_reference_op_s> operands,
2125 vn_reference_t *vnresult, vn_lookup_kind kind)
2127 struct vn_reference_s vr1;
2128 vn_reference_t tmp;
2129 tree cst;
2131 if (!vnresult)
2132 vnresult = &tmp;
2133 *vnresult = NULL;
2135 vr1.vuse = vuse_ssa_val (vuse);
2136 shared_lookup_references.truncate (0);
2137 shared_lookup_references.safe_grow (operands.length ());
2138 memcpy (shared_lookup_references.address (),
2139 operands.address (),
2140 sizeof (vn_reference_op_s)
2141 * operands.length ());
2142 vr1.operands = operands = shared_lookup_references
2143 = valueize_refs (shared_lookup_references);
2144 vr1.type = type;
2145 vr1.set = set;
2146 vr1.hashcode = vn_reference_compute_hash (&vr1);
2147 if ((cst = fully_constant_vn_reference_p (&vr1)))
2148 return cst;
2150 vn_reference_lookup_1 (&vr1, vnresult);
2151 if (!*vnresult
2152 && kind != VN_NOWALK
2153 && vr1.vuse)
2155 ao_ref r;
2156 vn_walk_kind = kind;
2157 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2158 *vnresult =
2159 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2160 vn_reference_lookup_2,
2161 vn_reference_lookup_3,
2162 vuse_ssa_val, &vr1);
2163 gcc_checking_assert (vr1.operands == shared_lookup_references);
2166 if (*vnresult)
2167 return (*vnresult)->result;
2169 return NULL_TREE;
2172 /* Lookup OP in the current hash table, and return the resulting value
2173 number if it exists in the hash table. Return NULL_TREE if it does
2174 not exist in the hash table or if the result field of the structure
2175 was NULL.. VNRESULT will be filled in with the vn_reference_t
2176 stored in the hashtable if one exists. */
2178 tree
2179 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2180 vn_reference_t *vnresult)
2182 vec<vn_reference_op_s> operands;
2183 struct vn_reference_s vr1;
2184 tree cst;
2185 bool valuezied_anything;
2187 if (vnresult)
2188 *vnresult = NULL;
2190 vr1.vuse = vuse_ssa_val (vuse);
2191 vr1.operands = operands
2192 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2193 vr1.type = TREE_TYPE (op);
2194 vr1.set = get_alias_set (op);
2195 vr1.hashcode = vn_reference_compute_hash (&vr1);
2196 if ((cst = fully_constant_vn_reference_p (&vr1)))
2197 return cst;
2199 if (kind != VN_NOWALK
2200 && vr1.vuse)
2202 vn_reference_t wvnresult;
2203 ao_ref r;
2204 /* Make sure to use a valueized reference if we valueized anything.
2205 Otherwise preserve the full reference for advanced TBAA. */
2206 if (!valuezied_anything
2207 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2208 vr1.operands))
2209 ao_ref_init (&r, op);
2210 vn_walk_kind = kind;
2211 wvnresult =
2212 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2213 vn_reference_lookup_2,
2214 vn_reference_lookup_3,
2215 vuse_ssa_val, &vr1);
2216 gcc_checking_assert (vr1.operands == shared_lookup_references);
2217 if (wvnresult)
2219 if (vnresult)
2220 *vnresult = wvnresult;
2221 return wvnresult->result;
2224 return NULL_TREE;
2227 return vn_reference_lookup_1 (&vr1, vnresult);
2230 /* Lookup CALL in the current hash table and return the entry in
2231 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2233 void
2234 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2235 vn_reference_t vr)
2237 if (vnresult)
2238 *vnresult = NULL;
2240 tree vuse = gimple_vuse (call);
2242 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2243 vr->operands = valueize_shared_reference_ops_from_call (call);
2244 vr->type = gimple_expr_type (call);
2245 vr->set = 0;
2246 vr->hashcode = vn_reference_compute_hash (vr);
2247 vn_reference_lookup_1 (vr, vnresult);
2250 /* Insert OP into the current hash table with a value number of
2251 RESULT, and return the resulting reference structure we created. */
2253 static vn_reference_t
2254 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2256 vn_reference_s **slot;
2257 vn_reference_t vr1;
2258 bool tem;
2260 vr1 = current_info->references_pool->allocate ();
2261 if (TREE_CODE (result) == SSA_NAME)
2262 vr1->value_id = VN_INFO (result)->value_id;
2263 else
2264 vr1->value_id = get_or_alloc_constant_value_id (result);
2265 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2266 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2267 vr1->type = TREE_TYPE (op);
2268 vr1->set = get_alias_set (op);
2269 vr1->hashcode = vn_reference_compute_hash (vr1);
2270 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2271 vr1->result_vdef = vdef;
2273 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2274 INSERT);
2276 /* Because we lookup stores using vuses, and value number failures
2277 using the vdefs (see visit_reference_op_store for how and why),
2278 it's possible that on failure we may try to insert an already
2279 inserted store. This is not wrong, there is no ssa name for a
2280 store that we could use as a differentiator anyway. Thus, unlike
2281 the other lookup functions, you cannot gcc_assert (!*slot)
2282 here. */
2284 /* But free the old slot in case of a collision. */
2285 if (*slot)
2286 free_reference (*slot);
2288 *slot = vr1;
2289 return vr1;
2292 /* Insert a reference by it's pieces into the current hash table with
2293 a value number of RESULT. Return the resulting reference
2294 structure we created. */
2296 vn_reference_t
2297 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2298 vec<vn_reference_op_s> operands,
2299 tree result, unsigned int value_id)
2302 vn_reference_s **slot;
2303 vn_reference_t vr1;
2305 vr1 = current_info->references_pool->allocate ();
2306 vr1->value_id = value_id;
2307 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2308 vr1->operands = valueize_refs (operands);
2309 vr1->type = type;
2310 vr1->set = set;
2311 vr1->hashcode = vn_reference_compute_hash (vr1);
2312 if (result && TREE_CODE (result) == SSA_NAME)
2313 result = SSA_VAL (result);
2314 vr1->result = result;
2316 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2317 INSERT);
2319 /* At this point we should have all the things inserted that we have
2320 seen before, and we should never try inserting something that
2321 already exists. */
2322 gcc_assert (!*slot);
2323 if (*slot)
2324 free_reference (*slot);
2326 *slot = vr1;
2327 return vr1;
2330 /* Compute and return the hash value for nary operation VBO1. */
2332 static hashval_t
2333 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2335 inchash::hash hstate;
2336 unsigned i;
2338 for (i = 0; i < vno1->length; ++i)
2339 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2340 vno1->op[i] = SSA_VAL (vno1->op[i]);
2342 if (((vno1->length == 2
2343 && commutative_tree_code (vno1->opcode))
2344 || (vno1->length == 3
2345 && commutative_ternary_tree_code (vno1->opcode)))
2346 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2347 std::swap (vno1->op[0], vno1->op[1]);
2348 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
2349 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2351 std::swap (vno1->op[0], vno1->op[1]);
2352 vno1->opcode = swap_tree_comparison (vno1->opcode);
2355 hstate.add_int (vno1->opcode);
2356 for (i = 0; i < vno1->length; ++i)
2357 inchash::add_expr (vno1->op[i], hstate);
2359 return hstate.end ();
2362 /* Compare nary operations VNO1 and VNO2 and return true if they are
2363 equivalent. */
2365 bool
2366 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2368 unsigned i;
2370 if (vno1->hashcode != vno2->hashcode)
2371 return false;
2373 if (vno1->length != vno2->length)
2374 return false;
2376 if (vno1->opcode != vno2->opcode
2377 || !types_compatible_p (vno1->type, vno2->type))
2378 return false;
2380 for (i = 0; i < vno1->length; ++i)
2381 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2382 return false;
2384 return true;
2387 /* Initialize VNO from the pieces provided. */
2389 static void
2390 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2391 enum tree_code code, tree type, tree *ops)
2393 vno->opcode = code;
2394 vno->length = length;
2395 vno->type = type;
2396 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2399 /* Initialize VNO from OP. */
2401 static void
2402 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2404 unsigned i;
2406 vno->opcode = TREE_CODE (op);
2407 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2408 vno->type = TREE_TYPE (op);
2409 for (i = 0; i < vno->length; ++i)
2410 vno->op[i] = TREE_OPERAND (op, i);
2413 /* Return the number of operands for a vn_nary ops structure from STMT. */
2415 static unsigned int
2416 vn_nary_length_from_stmt (gimple *stmt)
2418 switch (gimple_assign_rhs_code (stmt))
2420 case REALPART_EXPR:
2421 case IMAGPART_EXPR:
2422 case VIEW_CONVERT_EXPR:
2423 return 1;
2425 case BIT_FIELD_REF:
2426 return 3;
2428 case CONSTRUCTOR:
2429 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2431 default:
2432 return gimple_num_ops (stmt) - 1;
2436 /* Initialize VNO from STMT. */
2438 static void
2439 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
2441 unsigned i;
2443 vno->opcode = gimple_assign_rhs_code (stmt);
2444 vno->type = gimple_expr_type (stmt);
2445 switch (vno->opcode)
2447 case REALPART_EXPR:
2448 case IMAGPART_EXPR:
2449 case VIEW_CONVERT_EXPR:
2450 vno->length = 1;
2451 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2452 break;
2454 case BIT_FIELD_REF:
2455 vno->length = 3;
2456 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2457 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2458 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2459 break;
2461 case CONSTRUCTOR:
2462 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2463 for (i = 0; i < vno->length; ++i)
2464 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2465 break;
2467 default:
2468 gcc_checking_assert (!gimple_assign_single_p (stmt));
2469 vno->length = gimple_num_ops (stmt) - 1;
2470 for (i = 0; i < vno->length; ++i)
2471 vno->op[i] = gimple_op (stmt, i + 1);
2475 /* Compute the hashcode for VNO and look for it in the hash table;
2476 return the resulting value number if it exists in the hash table.
2477 Return NULL_TREE if it does not exist in the hash table or if the
2478 result field of the operation is NULL. VNRESULT will contain the
2479 vn_nary_op_t from the hashtable if it exists. */
2481 static tree
2482 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2484 vn_nary_op_s **slot;
2486 if (vnresult)
2487 *vnresult = NULL;
2489 vno->hashcode = vn_nary_op_compute_hash (vno);
2490 slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2491 NO_INSERT);
2492 if (!slot && current_info == optimistic_info)
2493 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2494 NO_INSERT);
2495 if (!slot)
2496 return NULL_TREE;
2497 if (vnresult)
2498 *vnresult = *slot;
2499 return (*slot)->result;
2502 /* Lookup a n-ary operation by its pieces and return the resulting value
2503 number if it exists in the hash table. Return NULL_TREE if it does
2504 not exist in the hash table or if the result field of the operation
2505 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2506 if it exists. */
2508 tree
2509 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2510 tree type, tree *ops, vn_nary_op_t *vnresult)
2512 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2513 sizeof_vn_nary_op (length));
2514 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2515 return vn_nary_op_lookup_1 (vno1, vnresult);
2518 /* Lookup OP in the current hash table, and return the resulting value
2519 number if it exists in the hash table. Return NULL_TREE if it does
2520 not exist in the hash table or if the result field of the operation
2521 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2522 if it exists. */
2524 tree
2525 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2527 vn_nary_op_t vno1
2528 = XALLOCAVAR (struct vn_nary_op_s,
2529 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2530 init_vn_nary_op_from_op (vno1, op);
2531 return vn_nary_op_lookup_1 (vno1, vnresult);
2534 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2535 value number if it exists in the hash table. Return NULL_TREE if
2536 it does not exist in the hash table. VNRESULT will contain the
2537 vn_nary_op_t from the hashtable if it exists. */
2539 tree
2540 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
2542 vn_nary_op_t vno1
2543 = XALLOCAVAR (struct vn_nary_op_s,
2544 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2545 init_vn_nary_op_from_stmt (vno1, stmt);
2546 return vn_nary_op_lookup_1 (vno1, vnresult);
2549 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
2551 static tree
2552 vn_lookup_simplify_result (code_helper rcode, tree type, tree *ops)
2554 if (!rcode.is_tree_code ())
2555 return NULL_TREE;
2556 vn_nary_op_t vnresult = NULL;
2557 return vn_nary_op_lookup_pieces (TREE_CODE_LENGTH ((tree_code) rcode),
2558 (tree_code) rcode, type, ops, &vnresult);
2561 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2563 static vn_nary_op_t
2564 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2566 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2569 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2570 obstack. */
2572 static vn_nary_op_t
2573 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2575 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2576 &current_info->nary_obstack);
2578 vno1->value_id = value_id;
2579 vno1->length = length;
2580 vno1->result = result;
2582 return vno1;
2585 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2586 VNO->HASHCODE first. */
2588 static vn_nary_op_t
2589 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
2590 bool compute_hash)
2592 vn_nary_op_s **slot;
2594 if (compute_hash)
2595 vno->hashcode = vn_nary_op_compute_hash (vno);
2597 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
2598 gcc_assert (!*slot);
2600 *slot = vno;
2601 return vno;
2604 /* Insert a n-ary operation into the current hash table using it's
2605 pieces. Return the vn_nary_op_t structure we created and put in
2606 the hashtable. */
2608 vn_nary_op_t
2609 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2610 tree type, tree *ops,
2611 tree result, unsigned int value_id)
2613 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2614 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2615 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2618 /* Insert OP into the current hash table with a value number of
2619 RESULT. Return the vn_nary_op_t structure we created and put in
2620 the hashtable. */
2622 vn_nary_op_t
2623 vn_nary_op_insert (tree op, tree result)
2625 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2626 vn_nary_op_t vno1;
2628 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2629 init_vn_nary_op_from_op (vno1, op);
2630 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2633 /* Insert the rhs of STMT into the current hash table with a value number of
2634 RESULT. */
2636 static vn_nary_op_t
2637 vn_nary_op_insert_stmt (gimple *stmt, tree result)
2639 vn_nary_op_t vno1
2640 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2641 result, VN_INFO (result)->value_id);
2642 init_vn_nary_op_from_stmt (vno1, stmt);
2643 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2646 /* Compute a hashcode for PHI operation VP1 and return it. */
2648 static inline hashval_t
2649 vn_phi_compute_hash (vn_phi_t vp1)
2651 inchash::hash hstate (vp1->block->index);
2652 tree phi1op;
2653 tree type;
2654 edge e;
2655 edge_iterator ei;
2657 /* If all PHI arguments are constants we need to distinguish
2658 the PHI node via its type. */
2659 type = vp1->type;
2660 hstate.merge_hash (vn_hash_type (type));
2662 FOR_EACH_EDGE (e, ei, vp1->block->preds)
2664 /* Don't hash backedge values they need to be handled as VN_TOP
2665 for optimistic value-numbering. */
2666 if (e->flags & EDGE_DFS_BACK)
2667 continue;
2669 phi1op = vp1->phiargs[e->dest_idx];
2670 if (phi1op == VN_TOP)
2671 continue;
2672 inchash::add_expr (phi1op, hstate);
2675 return hstate.end ();
2678 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2680 static int
2681 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
2683 if (vp1->hashcode != vp2->hashcode)
2684 return false;
2686 if (vp1->block == vp2->block)
2688 int i;
2689 tree phi1op;
2691 /* If the PHI nodes do not have compatible types
2692 they are not the same. */
2693 if (!types_compatible_p (vp1->type, vp2->type))
2694 return false;
2696 /* Any phi in the same block will have it's arguments in the
2697 same edge order, because of how we store phi nodes. */
2698 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2700 tree phi2op = vp2->phiargs[i];
2701 if (phi1op == VN_TOP || phi2op == VN_TOP)
2702 continue;
2703 if (!expressions_equal_p (phi1op, phi2op))
2704 return false;
2706 return true;
2708 return false;
2711 static vec<tree> shared_lookup_phiargs;
2713 /* Lookup PHI in the current hash table, and return the resulting
2714 value number if it exists in the hash table. Return NULL_TREE if
2715 it does not exist in the hash table. */
2717 static tree
2718 vn_phi_lookup (gimple *phi)
2720 vn_phi_s **slot;
2721 struct vn_phi_s vp1;
2722 edge e;
2723 edge_iterator ei;
2725 shared_lookup_phiargs.truncate (0);
2726 shared_lookup_phiargs.safe_grow (gimple_phi_num_args (phi));
2728 /* Canonicalize the SSA_NAME's to their value number. */
2729 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
2731 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
2732 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2733 shared_lookup_phiargs[e->dest_idx] = def;
2735 vp1.type = TREE_TYPE (gimple_phi_result (phi));
2736 vp1.phiargs = shared_lookup_phiargs;
2737 vp1.block = gimple_bb (phi);
2738 vp1.hashcode = vn_phi_compute_hash (&vp1);
2739 slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
2740 NO_INSERT);
2741 if (!slot && current_info == optimistic_info)
2742 slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
2743 NO_INSERT);
2744 if (!slot)
2745 return NULL_TREE;
2746 return (*slot)->result;
2749 /* Insert PHI into the current hash table with a value number of
2750 RESULT. */
2752 static vn_phi_t
2753 vn_phi_insert (gimple *phi, tree result)
2755 vn_phi_s **slot;
2756 vn_phi_t vp1 = current_info->phis_pool->allocate ();
2757 vec<tree> args = vNULL;
2758 edge e;
2759 edge_iterator ei;
2761 args.safe_grow (gimple_phi_num_args (phi));
2763 /* Canonicalize the SSA_NAME's to their value number. */
2764 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
2766 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
2767 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2768 args[e->dest_idx] = def;
2770 vp1->value_id = VN_INFO (result)->value_id;
2771 vp1->type = TREE_TYPE (gimple_phi_result (phi));
2772 vp1->phiargs = args;
2773 vp1->block = gimple_bb (phi);
2774 vp1->result = result;
2775 vp1->hashcode = vn_phi_compute_hash (vp1);
2777 slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
2779 /* Because we iterate over phi operations more than once, it's
2780 possible the slot might already exist here, hence no assert.*/
2781 *slot = vp1;
2782 return vp1;
2786 /* Print set of components in strongly connected component SCC to OUT. */
2788 static void
2789 print_scc (FILE *out, vec<tree> scc)
2791 tree var;
2792 unsigned int i;
2794 fprintf (out, "SCC consists of:");
2795 FOR_EACH_VEC_ELT (scc, i, var)
2797 fprintf (out, " ");
2798 print_generic_expr (out, var, 0);
2800 fprintf (out, "\n");
2803 /* Set the value number of FROM to TO, return true if it has changed
2804 as a result. */
2806 static inline bool
2807 set_ssa_val_to (tree from, tree to)
2809 tree currval = SSA_VAL (from);
2810 HOST_WIDE_INT toff, coff;
2812 /* The only thing we allow as value numbers are ssa_names
2813 and invariants. So assert that here. We don't allow VN_TOP
2814 as visiting a stmt should produce a value-number other than
2815 that.
2816 ??? Still VN_TOP can happen for unreachable code, so force
2817 it to varying in that case. Not all code is prepared to
2818 get VN_TOP on valueization. */
2819 if (to == VN_TOP)
2821 if (dump_file && (dump_flags & TDF_DETAILS))
2822 fprintf (dump_file, "Forcing value number to varying on "
2823 "receiving VN_TOP\n");
2824 to = from;
2827 gcc_assert (to != NULL_TREE
2828 && ((TREE_CODE (to) == SSA_NAME
2829 && (to == from || SSA_VAL (to) == to))
2830 || is_gimple_min_invariant (to)));
2832 if (from != to)
2834 if (currval == from)
2836 if (dump_file && (dump_flags & TDF_DETAILS))
2838 fprintf (dump_file, "Not changing value number of ");
2839 print_generic_expr (dump_file, from, 0);
2840 fprintf (dump_file, " from VARYING to ");
2841 print_generic_expr (dump_file, to, 0);
2842 fprintf (dump_file, "\n");
2844 return false;
2846 else if (TREE_CODE (to) == SSA_NAME
2847 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2848 to = from;
2851 if (dump_file && (dump_flags & TDF_DETAILS))
2853 fprintf (dump_file, "Setting value number of ");
2854 print_generic_expr (dump_file, from, 0);
2855 fprintf (dump_file, " to ");
2856 print_generic_expr (dump_file, to, 0);
2859 if (currval != to
2860 && !operand_equal_p (currval, to, 0)
2861 /* ??? For addresses involving volatile objects or types operand_equal_p
2862 does not reliably detect ADDR_EXPRs as equal. We know we are only
2863 getting invariant gimple addresses here, so can use
2864 get_addr_base_and_unit_offset to do this comparison. */
2865 && !(TREE_CODE (currval) == ADDR_EXPR
2866 && TREE_CODE (to) == ADDR_EXPR
2867 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
2868 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
2869 && coff == toff))
2871 VN_INFO (from)->valnum = to;
2872 if (dump_file && (dump_flags & TDF_DETAILS))
2873 fprintf (dump_file, " (changed)\n");
2874 return true;
2876 if (dump_file && (dump_flags & TDF_DETAILS))
2877 fprintf (dump_file, "\n");
2878 return false;
2881 /* Mark as processed all the definitions in the defining stmt of USE, or
2882 the USE itself. */
2884 static void
2885 mark_use_processed (tree use)
2887 ssa_op_iter iter;
2888 def_operand_p defp;
2889 gimple *stmt = SSA_NAME_DEF_STMT (use);
2891 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
2893 VN_INFO (use)->use_processed = true;
2894 return;
2897 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2899 tree def = DEF_FROM_PTR (defp);
2901 VN_INFO (def)->use_processed = true;
2905 /* Set all definitions in STMT to value number to themselves.
2906 Return true if a value number changed. */
2908 static bool
2909 defs_to_varying (gimple *stmt)
2911 bool changed = false;
2912 ssa_op_iter iter;
2913 def_operand_p defp;
2915 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2917 tree def = DEF_FROM_PTR (defp);
2918 changed |= set_ssa_val_to (def, def);
2920 return changed;
2923 /* Visit a copy between LHS and RHS, return true if the value number
2924 changed. */
2926 static bool
2927 visit_copy (tree lhs, tree rhs)
2929 /* Valueize. */
2930 rhs = SSA_VAL (rhs);
2932 return set_ssa_val_to (lhs, rhs);
2935 /* Visit a nary operator RHS, value number it, and return true if the
2936 value number of LHS has changed as a result. */
2938 static bool
2939 visit_nary_op (tree lhs, gimple *stmt)
2941 bool changed = false;
2942 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2944 if (result)
2945 changed = set_ssa_val_to (lhs, result);
2946 else
2948 changed = set_ssa_val_to (lhs, lhs);
2949 vn_nary_op_insert_stmt (stmt, lhs);
2952 return changed;
2955 /* Visit a call STMT storing into LHS. Return true if the value number
2956 of the LHS has changed as a result. */
2958 static bool
2959 visit_reference_op_call (tree lhs, gcall *stmt)
2961 bool changed = false;
2962 struct vn_reference_s vr1;
2963 vn_reference_t vnresult = NULL;
2964 tree vdef = gimple_vdef (stmt);
2966 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
2967 if (lhs && TREE_CODE (lhs) != SSA_NAME)
2968 lhs = NULL_TREE;
2970 vn_reference_lookup_call (stmt, &vnresult, &vr1);
2971 if (vnresult)
2973 if (vnresult->result_vdef && vdef)
2974 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
2976 if (!vnresult->result && lhs)
2977 vnresult->result = lhs;
2979 if (vnresult->result && lhs)
2980 changed |= set_ssa_val_to (lhs, vnresult->result);
2982 else
2984 vn_reference_t vr2;
2985 vn_reference_s **slot;
2986 if (vdef)
2987 changed |= set_ssa_val_to (vdef, vdef);
2988 if (lhs)
2989 changed |= set_ssa_val_to (lhs, lhs);
2990 vr2 = current_info->references_pool->allocate ();
2991 vr2->vuse = vr1.vuse;
2992 /* As we are not walking the virtual operand chain we know the
2993 shared_lookup_references are still original so we can re-use
2994 them here. */
2995 vr2->operands = vr1.operands.copy ();
2996 vr2->type = vr1.type;
2997 vr2->set = vr1.set;
2998 vr2->hashcode = vr1.hashcode;
2999 vr2->result = lhs;
3000 vr2->result_vdef = vdef;
3001 slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3002 INSERT);
3003 gcc_assert (!*slot);
3004 *slot = vr2;
3007 return changed;
3010 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3011 and return true if the value number of the LHS has changed as a result. */
3013 static bool
3014 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
3016 bool changed = false;
3017 tree last_vuse;
3018 tree result;
3020 last_vuse = gimple_vuse (stmt);
3021 last_vuse_ptr = &last_vuse;
3022 result = vn_reference_lookup (op, gimple_vuse (stmt),
3023 default_vn_walk_kind, NULL);
3024 last_vuse_ptr = NULL;
3026 /* We handle type-punning through unions by value-numbering based
3027 on offset and size of the access. Be prepared to handle a
3028 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3029 if (result
3030 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3032 /* We will be setting the value number of lhs to the value number
3033 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3034 So first simplify and lookup this expression to see if it
3035 is already available. */
3036 gimple_seq stmts = NULL;
3037 mprts_hook = vn_lookup_simplify_result;
3038 tree val = gimple_simplify (VIEW_CONVERT_EXPR, TREE_TYPE (op),
3039 result, &stmts, vn_valueize);
3040 mprts_hook = NULL;
3041 if (!val)
3043 val = vn_nary_op_lookup_pieces (1, VIEW_CONVERT_EXPR,
3044 TREE_TYPE (op), &result, NULL);
3045 if (!val)
3047 val = make_ssa_name (TREE_TYPE (op));
3048 gimple *new_stmt = gimple_build_assign (val, VIEW_CONVERT_EXPR,
3049 build1 (VIEW_CONVERT_EXPR,
3050 TREE_TYPE (op),
3051 result));
3052 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
3055 if (gimple_seq_empty_p (stmts))
3056 /* The expression is already available. */
3057 result = val;
3058 else
3060 gcc_assert (gimple_seq_singleton_p (stmts));
3061 /* The expression is not yet available, value-number lhs to
3062 the new SSA_NAME we created. */
3063 result = val;
3064 /* Initialize value-number information properly. */
3065 VN_INFO_GET (result)->valnum = result;
3066 VN_INFO (result)->value_id = get_next_value_id ();
3067 VN_INFO (result)->expr = stmts;
3068 VN_INFO (result)->needs_insertion = true;
3069 /* As all "inserted" statements are singleton SCCs, insert
3070 to the valid table. This is strictly needed to
3071 avoid re-generating new value SSA_NAMEs for the same
3072 expression during SCC iteration over and over (the
3073 optimistic table gets cleared after each iteration).
3074 We do not need to insert into the optimistic table, as
3075 lookups there will fall back to the valid table. */
3076 if (current_info == optimistic_info)
3078 current_info = valid_info;
3079 vn_nary_op_insert_stmt (gimple_seq_first_stmt (stmts), result);
3080 current_info = optimistic_info;
3082 else
3083 vn_nary_op_insert_stmt (gimple_seq_first_stmt (stmts), result);
3084 if (dump_file && (dump_flags & TDF_DETAILS))
3086 fprintf (dump_file, "Inserting name ");
3087 print_generic_expr (dump_file, result, 0);
3088 fprintf (dump_file, " for expression ");
3089 print_gimple_expr (dump_file, gimple_seq_first_stmt (stmts),
3090 0, TDF_SLIM);
3091 fprintf (dump_file, "\n");
3096 if (result)
3097 changed = set_ssa_val_to (lhs, result);
3098 else
3100 changed = set_ssa_val_to (lhs, lhs);
3101 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
3104 return changed;
3108 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3109 and return true if the value number of the LHS has changed as a result. */
3111 static bool
3112 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
3114 bool changed = false;
3115 vn_reference_t vnresult = NULL;
3116 tree result, assign;
3117 bool resultsame = false;
3118 tree vuse = gimple_vuse (stmt);
3119 tree vdef = gimple_vdef (stmt);
3121 if (TREE_CODE (op) == SSA_NAME)
3122 op = SSA_VAL (op);
3124 /* First we want to lookup using the *vuses* from the store and see
3125 if there the last store to this location with the same address
3126 had the same value.
3128 The vuses represent the memory state before the store. If the
3129 memory state, address, and value of the store is the same as the
3130 last store to this location, then this store will produce the
3131 same memory state as that store.
3133 In this case the vdef versions for this store are value numbered to those
3134 vuse versions, since they represent the same memory state after
3135 this store.
3137 Otherwise, the vdefs for the store are used when inserting into
3138 the table, since the store generates a new memory state. */
3140 result = vn_reference_lookup (lhs, vuse, VN_NOWALK, NULL);
3142 if (result)
3144 if (TREE_CODE (result) == SSA_NAME)
3145 result = SSA_VAL (result);
3146 resultsame = expressions_equal_p (result, op);
3149 if ((!result || !resultsame)
3150 /* Only perform the following when being called from PRE
3151 which embeds tail merging. */
3152 && default_vn_walk_kind == VN_WALK)
3154 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3155 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult);
3156 if (vnresult)
3158 VN_INFO (vdef)->use_processed = true;
3159 return set_ssa_val_to (vdef, vnresult->result_vdef);
3163 if (!result || !resultsame)
3165 if (dump_file && (dump_flags & TDF_DETAILS))
3167 fprintf (dump_file, "No store match\n");
3168 fprintf (dump_file, "Value numbering store ");
3169 print_generic_expr (dump_file, lhs, 0);
3170 fprintf (dump_file, " to ");
3171 print_generic_expr (dump_file, op, 0);
3172 fprintf (dump_file, "\n");
3174 /* Have to set value numbers before insert, since insert is
3175 going to valueize the references in-place. */
3176 if (vdef)
3178 changed |= set_ssa_val_to (vdef, vdef);
3181 /* Do not insert structure copies into the tables. */
3182 if (is_gimple_min_invariant (op)
3183 || is_gimple_reg (op))
3184 vn_reference_insert (lhs, op, vdef, NULL);
3186 /* Only perform the following when being called from PRE
3187 which embeds tail merging. */
3188 if (default_vn_walk_kind == VN_WALK)
3190 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3191 vn_reference_insert (assign, lhs, vuse, vdef);
3194 else
3196 /* We had a match, so value number the vdef to have the value
3197 number of the vuse it came from. */
3199 if (dump_file && (dump_flags & TDF_DETAILS))
3200 fprintf (dump_file, "Store matched earlier value,"
3201 "value numbering store vdefs to matching vuses.\n");
3203 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3206 return changed;
3209 /* Visit and value number PHI, return true if the value number
3210 changed. */
3212 static bool
3213 visit_phi (gimple *phi)
3215 bool changed = false;
3216 tree result;
3217 tree sameval = VN_TOP;
3218 bool allsame = true;
3220 /* TODO: We could check for this in init_sccvn, and replace this
3221 with a gcc_assert. */
3222 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3223 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3225 /* See if all non-TOP arguments have the same value. TOP is
3226 equivalent to everything, so we can ignore it. */
3227 edge_iterator ei;
3228 edge e;
3229 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3230 if (e->flags & EDGE_EXECUTABLE)
3232 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3234 if (TREE_CODE (def) == SSA_NAME)
3235 def = SSA_VAL (def);
3236 if (def == VN_TOP)
3237 continue;
3238 if (sameval == VN_TOP)
3239 sameval = def;
3240 else if (!expressions_equal_p (def, sameval))
3242 allsame = false;
3243 break;
3247 /* If none of the edges was executable or all incoming values are
3248 undefined keep the value-number at VN_TOP. */
3249 if (sameval == VN_TOP)
3250 return set_ssa_val_to (PHI_RESULT (phi), VN_TOP);
3252 /* First see if it is equivalent to a phi node in this block. We prefer
3253 this as it allows IV elimination - see PRs 66502 and 67167. */
3254 result = vn_phi_lookup (phi);
3255 if (result)
3256 changed = set_ssa_val_to (PHI_RESULT (phi), result);
3257 /* Otherwise all value numbered to the same value, the phi node has that
3258 value. */
3259 else if (allsame)
3260 changed = set_ssa_val_to (PHI_RESULT (phi), sameval);
3261 else
3263 vn_phi_insert (phi, PHI_RESULT (phi));
3264 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3267 return changed;
3270 /* Try to simplify RHS using equivalences and constant folding. */
3272 static tree
3273 try_to_simplify (gassign *stmt)
3275 enum tree_code code = gimple_assign_rhs_code (stmt);
3276 tree tem;
3278 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3279 in this case, there is no point in doing extra work. */
3280 if (code == SSA_NAME)
3281 return NULL_TREE;
3283 /* First try constant folding based on our current lattice. */
3284 mprts_hook = vn_lookup_simplify_result;
3285 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
3286 mprts_hook = NULL;
3287 if (tem
3288 && (TREE_CODE (tem) == SSA_NAME
3289 || is_gimple_min_invariant (tem)))
3290 return tem;
3292 return NULL_TREE;
3295 /* Visit and value number USE, return true if the value number
3296 changed. */
3298 static bool
3299 visit_use (tree use)
3301 bool changed = false;
3302 gimple *stmt = SSA_NAME_DEF_STMT (use);
3304 mark_use_processed (use);
3306 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3307 if (dump_file && (dump_flags & TDF_DETAILS)
3308 && !SSA_NAME_IS_DEFAULT_DEF (use))
3310 fprintf (dump_file, "Value numbering ");
3311 print_generic_expr (dump_file, use, 0);
3312 fprintf (dump_file, " stmt = ");
3313 print_gimple_stmt (dump_file, stmt, 0, 0);
3316 /* Handle uninitialized uses. */
3317 if (SSA_NAME_IS_DEFAULT_DEF (use))
3318 changed = set_ssa_val_to (use, use);
3319 else
3321 if (gimple_code (stmt) == GIMPLE_PHI)
3322 changed = visit_phi (stmt);
3323 else if (gimple_has_volatile_ops (stmt))
3324 changed = defs_to_varying (stmt);
3325 else if (is_gimple_assign (stmt))
3327 enum tree_code code = gimple_assign_rhs_code (stmt);
3328 tree lhs = gimple_assign_lhs (stmt);
3329 tree rhs1 = gimple_assign_rhs1 (stmt);
3330 tree simplified;
3332 /* Shortcut for copies. Simplifying copies is pointless,
3333 since we copy the expression and value they represent. */
3334 if (code == SSA_NAME
3335 && TREE_CODE (lhs) == SSA_NAME)
3337 changed = visit_copy (lhs, rhs1);
3338 goto done;
3340 simplified = try_to_simplify (as_a <gassign *> (stmt));
3341 if (simplified)
3343 if (dump_file && (dump_flags & TDF_DETAILS))
3345 fprintf (dump_file, "RHS ");
3346 print_gimple_expr (dump_file, stmt, 0, 0);
3347 fprintf (dump_file, " simplified to ");
3348 print_generic_expr (dump_file, simplified, 0);
3349 fprintf (dump_file, "\n");
3352 /* Setting value numbers to constants will occasionally
3353 screw up phi congruence because constants are not
3354 uniquely associated with a single ssa name that can be
3355 looked up. */
3356 if (simplified
3357 && is_gimple_min_invariant (simplified)
3358 && TREE_CODE (lhs) == SSA_NAME)
3360 changed = set_ssa_val_to (lhs, simplified);
3361 goto done;
3363 else if (simplified
3364 && TREE_CODE (simplified) == SSA_NAME
3365 && TREE_CODE (lhs) == SSA_NAME)
3367 changed = visit_copy (lhs, simplified);
3368 goto done;
3371 if ((TREE_CODE (lhs) == SSA_NAME
3372 /* We can substitute SSA_NAMEs that are live over
3373 abnormal edges with their constant value. */
3374 && !(gimple_assign_copy_p (stmt)
3375 && is_gimple_min_invariant (rhs1))
3376 && !(simplified
3377 && is_gimple_min_invariant (simplified))
3378 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3379 /* Stores or copies from SSA_NAMEs that are live over
3380 abnormal edges are a problem. */
3381 || (code == SSA_NAME
3382 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3383 changed = defs_to_varying (stmt);
3384 else if (REFERENCE_CLASS_P (lhs)
3385 || DECL_P (lhs))
3386 changed = visit_reference_op_store (lhs, rhs1, stmt);
3387 else if (TREE_CODE (lhs) == SSA_NAME)
3389 if ((gimple_assign_copy_p (stmt)
3390 && is_gimple_min_invariant (rhs1))
3391 || (simplified
3392 && is_gimple_min_invariant (simplified)))
3394 if (simplified)
3395 changed = set_ssa_val_to (lhs, simplified);
3396 else
3397 changed = set_ssa_val_to (lhs, rhs1);
3399 else
3401 /* First try to lookup the simplified expression. */
3402 if (simplified)
3404 enum gimple_rhs_class rhs_class;
3407 rhs_class = get_gimple_rhs_class (TREE_CODE (simplified));
3408 if ((rhs_class == GIMPLE_UNARY_RHS
3409 || rhs_class == GIMPLE_BINARY_RHS
3410 || rhs_class == GIMPLE_TERNARY_RHS)
3411 && valid_gimple_rhs_p (simplified))
3413 tree result = vn_nary_op_lookup (simplified, NULL);
3414 if (result)
3416 changed = set_ssa_val_to (lhs, result);
3417 goto done;
3422 /* Otherwise visit the original statement. */
3423 switch (vn_get_stmt_kind (stmt))
3425 case VN_NARY:
3426 changed = visit_nary_op (lhs, stmt);
3427 break;
3428 case VN_REFERENCE:
3429 changed = visit_reference_op_load (lhs, rhs1, stmt);
3430 break;
3431 default:
3432 changed = defs_to_varying (stmt);
3433 break;
3437 else
3438 changed = defs_to_varying (stmt);
3440 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
3442 tree lhs = gimple_call_lhs (stmt);
3443 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3445 /* Try constant folding based on our current lattice. */
3446 tree simplified = gimple_fold_stmt_to_constant_1 (stmt,
3447 vn_valueize);
3448 if (simplified)
3450 if (dump_file && (dump_flags & TDF_DETAILS))
3452 fprintf (dump_file, "call ");
3453 print_gimple_expr (dump_file, stmt, 0, 0);
3454 fprintf (dump_file, " simplified to ");
3455 print_generic_expr (dump_file, simplified, 0);
3456 fprintf (dump_file, "\n");
3459 /* Setting value numbers to constants will occasionally
3460 screw up phi congruence because constants are not
3461 uniquely associated with a single ssa name that can be
3462 looked up. */
3463 if (simplified
3464 && is_gimple_min_invariant (simplified))
3466 changed = set_ssa_val_to (lhs, simplified);
3467 if (gimple_vdef (stmt))
3468 changed |= set_ssa_val_to (gimple_vdef (stmt),
3469 SSA_VAL (gimple_vuse (stmt)));
3470 goto done;
3472 else if (simplified
3473 && TREE_CODE (simplified) == SSA_NAME)
3475 changed = visit_copy (lhs, simplified);
3476 if (gimple_vdef (stmt))
3477 changed |= set_ssa_val_to (gimple_vdef (stmt),
3478 SSA_VAL (gimple_vuse (stmt)));
3479 goto done;
3481 else
3483 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3485 changed = defs_to_varying (stmt);
3486 goto done;
3491 if (!gimple_call_internal_p (stmt)
3492 && (/* Calls to the same function with the same vuse
3493 and the same operands do not necessarily return the same
3494 value, unless they're pure or const. */
3495 gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST)
3496 /* If calls have a vdef, subsequent calls won't have
3497 the same incoming vuse. So, if 2 calls with vdef have the
3498 same vuse, we know they're not subsequent.
3499 We can value number 2 calls to the same function with the
3500 same vuse and the same operands which are not subsequent
3501 the same, because there is no code in the program that can
3502 compare the 2 values... */
3503 || (gimple_vdef (stmt)
3504 /* ... unless the call returns a pointer which does
3505 not alias with anything else. In which case the
3506 information that the values are distinct are encoded
3507 in the IL. */
3508 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
3509 /* Only perform the following when being called from PRE
3510 which embeds tail merging. */
3511 && default_vn_walk_kind == VN_WALK)))
3512 changed = visit_reference_op_call (lhs, call_stmt);
3513 else
3514 changed = defs_to_varying (stmt);
3516 else
3517 changed = defs_to_varying (stmt);
3519 done:
3520 return changed;
3523 /* Compare two operands by reverse postorder index */
3525 static int
3526 compare_ops (const void *pa, const void *pb)
3528 const tree opa = *((const tree *)pa);
3529 const tree opb = *((const tree *)pb);
3530 gimple *opstmta = SSA_NAME_DEF_STMT (opa);
3531 gimple *opstmtb = SSA_NAME_DEF_STMT (opb);
3532 basic_block bba;
3533 basic_block bbb;
3535 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3536 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3537 else if (gimple_nop_p (opstmta))
3538 return -1;
3539 else if (gimple_nop_p (opstmtb))
3540 return 1;
3542 bba = gimple_bb (opstmta);
3543 bbb = gimple_bb (opstmtb);
3545 if (!bba && !bbb)
3546 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3547 else if (!bba)
3548 return -1;
3549 else if (!bbb)
3550 return 1;
3552 if (bba == bbb)
3554 if (gimple_code (opstmta) == GIMPLE_PHI
3555 && gimple_code (opstmtb) == GIMPLE_PHI)
3556 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3557 else if (gimple_code (opstmta) == GIMPLE_PHI)
3558 return -1;
3559 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3560 return 1;
3561 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3562 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3563 else
3564 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3566 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3569 /* Sort an array containing members of a strongly connected component
3570 SCC so that the members are ordered by RPO number.
3571 This means that when the sort is complete, iterating through the
3572 array will give you the members in RPO order. */
3574 static void
3575 sort_scc (vec<tree> scc)
3577 scc.qsort (compare_ops);
3580 /* Insert the no longer used nary ONARY to the hash INFO. */
3582 static void
3583 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3585 size_t size = sizeof_vn_nary_op (onary->length);
3586 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3587 &info->nary_obstack);
3588 memcpy (nary, onary, size);
3589 vn_nary_op_insert_into (nary, info->nary, false);
3592 /* Insert the no longer used phi OPHI to the hash INFO. */
3594 static void
3595 copy_phi (vn_phi_t ophi, vn_tables_t info)
3597 vn_phi_t phi = info->phis_pool->allocate ();
3598 vn_phi_s **slot;
3599 memcpy (phi, ophi, sizeof (*phi));
3600 ophi->phiargs.create (0);
3601 slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
3602 gcc_assert (!*slot);
3603 *slot = phi;
3606 /* Insert the no longer used reference OREF to the hash INFO. */
3608 static void
3609 copy_reference (vn_reference_t oref, vn_tables_t info)
3611 vn_reference_t ref;
3612 vn_reference_s **slot;
3613 ref = info->references_pool->allocate ();
3614 memcpy (ref, oref, sizeof (*ref));
3615 oref->operands.create (0);
3616 slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
3617 if (*slot)
3618 free_reference (*slot);
3619 *slot = ref;
3622 /* Process a strongly connected component in the SSA graph. */
3624 static void
3625 process_scc (vec<tree> scc)
3627 tree var;
3628 unsigned int i;
3629 unsigned int iterations = 0;
3630 bool changed = true;
3631 vn_nary_op_iterator_type hin;
3632 vn_phi_iterator_type hip;
3633 vn_reference_iterator_type hir;
3634 vn_nary_op_t nary;
3635 vn_phi_t phi;
3636 vn_reference_t ref;
3638 /* If the SCC has a single member, just visit it. */
3639 if (scc.length () == 1)
3641 tree use = scc[0];
3642 if (VN_INFO (use)->use_processed)
3643 return;
3644 /* We need to make sure it doesn't form a cycle itself, which can
3645 happen for self-referential PHI nodes. In that case we would
3646 end up inserting an expression with VN_TOP operands into the
3647 valid table which makes us derive bogus equivalences later.
3648 The cheapest way to check this is to assume it for all PHI nodes. */
3649 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3650 /* Fallthru to iteration. */ ;
3651 else
3653 visit_use (use);
3654 return;
3658 if (dump_file && (dump_flags & TDF_DETAILS))
3659 print_scc (dump_file, scc);
3661 /* Iterate over the SCC with the optimistic table until it stops
3662 changing. */
3663 current_info = optimistic_info;
3664 while (changed)
3666 changed = false;
3667 iterations++;
3668 if (dump_file && (dump_flags & TDF_DETAILS))
3669 fprintf (dump_file, "Starting iteration %d\n", iterations);
3670 /* As we are value-numbering optimistically we have to
3671 clear the expression tables and the simplified expressions
3672 in each iteration until we converge. */
3673 optimistic_info->nary->empty ();
3674 optimistic_info->phis->empty ();
3675 optimistic_info->references->empty ();
3676 obstack_free (&optimistic_info->nary_obstack, NULL);
3677 gcc_obstack_init (&optimistic_info->nary_obstack);
3678 optimistic_info->phis_pool->release ();
3679 optimistic_info->references_pool->release ();
3680 FOR_EACH_VEC_ELT (scc, i, var)
3681 gcc_assert (!VN_INFO (var)->needs_insertion
3682 && VN_INFO (var)->expr == NULL);
3683 FOR_EACH_VEC_ELT (scc, i, var)
3684 changed |= visit_use (var);
3687 if (dump_file && (dump_flags & TDF_DETAILS))
3688 fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
3689 statistics_histogram_event (cfun, "SCC iterations", iterations);
3691 /* Finally, copy the contents of the no longer used optimistic
3692 table to the valid table. */
3693 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
3694 copy_nary (nary, valid_info);
3695 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
3696 copy_phi (phi, valid_info);
3697 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
3698 ref, vn_reference_t, hir)
3699 copy_reference (ref, valid_info);
3701 current_info = valid_info;
3705 /* Pop the components of the found SCC for NAME off the SCC stack
3706 and process them. Returns true if all went well, false if
3707 we run into resource limits. */
3709 static bool
3710 extract_and_process_scc_for_name (tree name)
3712 auto_vec<tree> scc;
3713 tree x;
3715 /* Found an SCC, pop the components off the SCC stack and
3716 process them. */
3719 x = sccstack.pop ();
3721 VN_INFO (x)->on_sccstack = false;
3722 scc.safe_push (x);
3723 } while (x != name);
3725 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
3726 if (scc.length ()
3727 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
3729 if (dump_file)
3730 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
3731 "SCC size %u exceeding %u\n", scc.length (),
3732 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
3734 return false;
3737 if (scc.length () > 1)
3738 sort_scc (scc);
3740 process_scc (scc);
3742 return true;
3745 /* Depth first search on NAME to discover and process SCC's in the SSA
3746 graph.
3747 Execution of this algorithm relies on the fact that the SCC's are
3748 popped off the stack in topological order.
3749 Returns true if successful, false if we stopped processing SCC's due
3750 to resource constraints. */
3752 static bool
3753 DFS (tree name)
3755 vec<ssa_op_iter> itervec = vNULL;
3756 vec<tree> namevec = vNULL;
3757 use_operand_p usep = NULL;
3758 gimple *defstmt;
3759 tree use;
3760 ssa_op_iter iter;
3762 start_over:
3763 /* SCC info */
3764 VN_INFO (name)->dfsnum = next_dfs_num++;
3765 VN_INFO (name)->visited = true;
3766 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
3768 sccstack.safe_push (name);
3769 VN_INFO (name)->on_sccstack = true;
3770 defstmt = SSA_NAME_DEF_STMT (name);
3772 /* Recursively DFS on our operands, looking for SCC's. */
3773 if (!gimple_nop_p (defstmt))
3775 /* Push a new iterator. */
3776 if (gphi *phi = dyn_cast <gphi *> (defstmt))
3777 usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
3778 else
3779 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
3781 else
3782 clear_and_done_ssa_iter (&iter);
3784 while (1)
3786 /* If we are done processing uses of a name, go up the stack
3787 of iterators and process SCCs as we found them. */
3788 if (op_iter_done (&iter))
3790 /* See if we found an SCC. */
3791 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
3792 if (!extract_and_process_scc_for_name (name))
3794 namevec.release ();
3795 itervec.release ();
3796 return false;
3799 /* Check if we are done. */
3800 if (namevec.is_empty ())
3802 namevec.release ();
3803 itervec.release ();
3804 return true;
3807 /* Restore the last use walker and continue walking there. */
3808 use = name;
3809 name = namevec.pop ();
3810 memcpy (&iter, &itervec.last (),
3811 sizeof (ssa_op_iter));
3812 itervec.pop ();
3813 goto continue_walking;
3816 use = USE_FROM_PTR (usep);
3818 /* Since we handle phi nodes, we will sometimes get
3819 invariants in the use expression. */
3820 if (TREE_CODE (use) == SSA_NAME)
3822 if (! (VN_INFO (use)->visited))
3824 /* Recurse by pushing the current use walking state on
3825 the stack and starting over. */
3826 itervec.safe_push (iter);
3827 namevec.safe_push (name);
3828 name = use;
3829 goto start_over;
3831 continue_walking:
3832 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3833 VN_INFO (use)->low);
3835 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3836 && VN_INFO (use)->on_sccstack)
3838 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3839 VN_INFO (name)->low);
3843 usep = op_iter_next_use (&iter);
3847 /* Allocate a value number table. */
3849 static void
3850 allocate_vn_table (vn_tables_t table)
3852 table->phis = new vn_phi_table_type (23);
3853 table->nary = new vn_nary_op_table_type (23);
3854 table->references = new vn_reference_table_type (23);
3856 gcc_obstack_init (&table->nary_obstack);
3857 table->phis_pool = new object_allocator<vn_phi_s> ("VN phis");
3858 table->references_pool = new object_allocator<vn_reference_s>
3859 ("VN references");
3862 /* Free a value number table. */
3864 static void
3865 free_vn_table (vn_tables_t table)
3867 delete table->phis;
3868 table->phis = NULL;
3869 delete table->nary;
3870 table->nary = NULL;
3871 delete table->references;
3872 table->references = NULL;
3873 obstack_free (&table->nary_obstack, NULL);
3874 delete table->phis_pool;
3875 delete table->references_pool;
3878 static void
3879 init_scc_vn (void)
3881 size_t i;
3882 int j;
3883 int *rpo_numbers_temp;
3885 calculate_dominance_info (CDI_DOMINATORS);
3886 mark_dfs_back_edges ();
3888 sccstack.create (0);
3889 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
3891 constant_value_ids = BITMAP_ALLOC (NULL);
3893 next_dfs_num = 1;
3894 next_value_id = 1;
3896 vn_ssa_aux_table.create (num_ssa_names + 1);
3897 /* VEC_alloc doesn't actually grow it to the right size, it just
3898 preallocates the space to do so. */
3899 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
3900 gcc_obstack_init (&vn_ssa_aux_obstack);
3902 shared_lookup_phiargs.create (0);
3903 shared_lookup_references.create (0);
3904 rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
3905 rpo_numbers_temp =
3906 XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
3907 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
3909 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3910 the i'th block in RPO order is bb. We want to map bb's to RPO
3911 numbers, so we need to rearrange this array. */
3912 for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
3913 rpo_numbers[rpo_numbers_temp[j]] = j;
3915 XDELETE (rpo_numbers_temp);
3917 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
3919 renumber_gimple_stmt_uids ();
3921 /* Create the valid and optimistic value numbering tables. */
3922 valid_info = XCNEW (struct vn_tables_s);
3923 allocate_vn_table (valid_info);
3924 optimistic_info = XCNEW (struct vn_tables_s);
3925 allocate_vn_table (optimistic_info);
3926 current_info = valid_info;
3928 /* Create the VN_INFO structures, and initialize value numbers to
3929 TOP or VARYING for parameters. */
3930 for (i = 1; i < num_ssa_names; i++)
3932 tree name = ssa_name (i);
3933 if (!name)
3934 continue;
3936 VN_INFO_GET (name)->valnum = VN_TOP;
3937 VN_INFO (name)->needs_insertion = false;
3938 VN_INFO (name)->expr = NULL;
3939 VN_INFO (name)->value_id = 0;
3941 if (!SSA_NAME_IS_DEFAULT_DEF (name))
3942 continue;
3944 switch (TREE_CODE (SSA_NAME_VAR (name)))
3946 case VAR_DECL:
3947 /* Undefined vars keep TOP. */
3948 break;
3950 case PARM_DECL:
3951 /* Parameters are VARYING but we can record a condition
3952 if we know it is a non-NULL pointer. */
3953 VN_INFO (name)->visited = true;
3954 VN_INFO (name)->valnum = name;
3955 if (POINTER_TYPE_P (TREE_TYPE (name))
3956 && nonnull_arg_p (SSA_NAME_VAR (name)))
3958 tree ops[2];
3959 ops[0] = name;
3960 ops[1] = build_int_cst (TREE_TYPE (name), 0);
3961 vn_nary_op_insert_pieces (2, NE_EXPR, boolean_type_node, ops,
3962 boolean_true_node, 0);
3963 if (dump_file && (dump_flags & TDF_DETAILS))
3965 fprintf (dump_file, "Recording ");
3966 print_generic_expr (dump_file, name, TDF_SLIM);
3967 fprintf (dump_file, " != 0\n");
3970 break;
3972 case RESULT_DECL:
3973 /* If the result is passed by invisible reference the default
3974 def is initialized, otherwise it's uninitialized. */
3975 if (DECL_BY_REFERENCE (SSA_NAME_VAR (name)))
3977 VN_INFO (name)->visited = true;
3978 VN_INFO (name)->valnum = name;
3980 break;
3982 default:
3983 gcc_unreachable ();
3988 void
3989 free_scc_vn (void)
3991 size_t i;
3993 delete constant_to_value_id;
3994 constant_to_value_id = NULL;
3995 BITMAP_FREE (constant_value_ids);
3996 shared_lookup_phiargs.release ();
3997 shared_lookup_references.release ();
3998 XDELETEVEC (rpo_numbers);
4000 for (i = 0; i < num_ssa_names; i++)
4002 tree name = ssa_name (i);
4003 if (name
4004 && SSA_NAME_VERSION (name) < vn_ssa_aux_table.length ()
4005 && vn_ssa_aux_table[SSA_NAME_VERSION (name)]
4006 && VN_INFO (name)->needs_insertion)
4007 release_ssa_name (name);
4009 obstack_free (&vn_ssa_aux_obstack, NULL);
4010 vn_ssa_aux_table.release ();
4012 sccstack.release ();
4013 free_vn_table (valid_info);
4014 XDELETE (valid_info);
4015 free_vn_table (optimistic_info);
4016 XDELETE (optimistic_info);
4018 BITMAP_FREE (const_parms);
4021 /* Set *ID according to RESULT. */
4023 static void
4024 set_value_id_for_result (tree result, unsigned int *id)
4026 if (result && TREE_CODE (result) == SSA_NAME)
4027 *id = VN_INFO (result)->value_id;
4028 else if (result && is_gimple_min_invariant (result))
4029 *id = get_or_alloc_constant_value_id (result);
4030 else
4031 *id = get_next_value_id ();
4034 /* Set the value ids in the valid hash tables. */
4036 static void
4037 set_hashtable_value_ids (void)
4039 vn_nary_op_iterator_type hin;
4040 vn_phi_iterator_type hip;
4041 vn_reference_iterator_type hir;
4042 vn_nary_op_t vno;
4043 vn_reference_t vr;
4044 vn_phi_t vp;
4046 /* Now set the value ids of the things we had put in the hash
4047 table. */
4049 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4050 set_value_id_for_result (vno->result, &vno->value_id);
4052 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4053 set_value_id_for_result (vp->result, &vp->value_id);
4055 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4056 hir)
4057 set_value_id_for_result (vr->result, &vr->value_id);
4060 class sccvn_dom_walker : public dom_walker
4062 public:
4063 sccvn_dom_walker ()
4064 : dom_walker (CDI_DOMINATORS), fail (false), cond_stack (vNULL) {}
4066 virtual void before_dom_children (basic_block);
4067 virtual void after_dom_children (basic_block);
4069 void record_cond (basic_block,
4070 enum tree_code code, tree lhs, tree rhs, bool value);
4071 void record_conds (basic_block,
4072 enum tree_code code, tree lhs, tree rhs, bool value);
4074 bool fail;
4075 vec<std::pair <basic_block, std::pair <vn_nary_op_t, vn_nary_op_t> > >
4076 cond_stack;
4079 /* Record a temporary condition for the BB and its dominated blocks. */
4081 void
4082 sccvn_dom_walker::record_cond (basic_block bb,
4083 enum tree_code code, tree lhs, tree rhs,
4084 bool value)
4086 tree ops[2] = { lhs, rhs };
4087 vn_nary_op_t old = NULL;
4088 if (vn_nary_op_lookup_pieces (2, code, boolean_type_node, ops, &old))
4089 current_info->nary->remove_elt_with_hash (old, old->hashcode);
4090 vn_nary_op_t cond
4091 = vn_nary_op_insert_pieces (2, code, boolean_type_node, ops,
4092 value
4093 ? boolean_true_node
4094 : boolean_false_node, 0);
4095 if (dump_file && (dump_flags & TDF_DETAILS))
4097 fprintf (dump_file, "Recording temporarily ");
4098 print_generic_expr (dump_file, ops[0], TDF_SLIM);
4099 fprintf (dump_file, " %s ", get_tree_code_name (code));
4100 print_generic_expr (dump_file, ops[1], TDF_SLIM);
4101 fprintf (dump_file, " == %s%s\n",
4102 value ? "true" : "false",
4103 old ? " (old entry saved)" : "");
4105 cond_stack.safe_push (std::make_pair (bb, std::make_pair (cond, old)));
4108 /* Record temporary conditions for the BB and its dominated blocks
4109 according to LHS CODE RHS == VALUE and its dominated conditions. */
4111 void
4112 sccvn_dom_walker::record_conds (basic_block bb,
4113 enum tree_code code, tree lhs, tree rhs,
4114 bool value)
4116 /* Record the original condition. */
4117 record_cond (bb, code, lhs, rhs, value);
4119 if (!value)
4120 return;
4122 /* Record dominated conditions if the condition is true. Note that
4123 the inversion is already recorded. */
4124 switch (code)
4126 case LT_EXPR:
4127 case GT_EXPR:
4128 record_cond (bb, code == LT_EXPR ? LE_EXPR : GE_EXPR, lhs, rhs, true);
4129 record_cond (bb, NE_EXPR, lhs, rhs, true);
4130 record_cond (bb, EQ_EXPR, lhs, rhs, false);
4131 break;
4133 case EQ_EXPR:
4134 record_cond (bb, LE_EXPR, lhs, rhs, true);
4135 record_cond (bb, GE_EXPR, lhs, rhs, true);
4136 record_cond (bb, LT_EXPR, lhs, rhs, false);
4137 record_cond (bb, GT_EXPR, lhs, rhs, false);
4138 break;
4140 default:
4141 break;
4145 /* Restore expressions and values derived from conditionals. */
4147 void
4148 sccvn_dom_walker::after_dom_children (basic_block bb)
4150 while (!cond_stack.is_empty ()
4151 && cond_stack.last ().first == bb)
4153 vn_nary_op_t cond = cond_stack.last ().second.first;
4154 vn_nary_op_t old = cond_stack.last ().second.second;
4155 current_info->nary->remove_elt_with_hash (cond, cond->hashcode);
4156 if (old)
4157 vn_nary_op_insert_into (old, current_info->nary, false);
4158 cond_stack.pop ();
4162 /* Value number all statements in BB. */
4164 void
4165 sccvn_dom_walker::before_dom_children (basic_block bb)
4167 edge e;
4168 edge_iterator ei;
4170 if (fail)
4171 return;
4173 /* If any of the predecessor edges that do not come from blocks dominated
4174 by us are still marked as possibly executable consider this block
4175 reachable. */
4176 bool reachable = bb == ENTRY_BLOCK_PTR_FOR_FN (cfun);
4177 FOR_EACH_EDGE (e, ei, bb->preds)
4178 if (!dominated_by_p (CDI_DOMINATORS, e->src, bb))
4179 reachable |= (e->flags & EDGE_EXECUTABLE);
4181 /* If the block is not reachable all outgoing edges are not
4182 executable. Neither are incoming edges with src dominated by us. */
4183 if (!reachable)
4185 if (dump_file && (dump_flags & TDF_DETAILS))
4186 fprintf (dump_file, "Marking all outgoing edges of unreachable "
4187 "BB %d as not executable\n", bb->index);
4189 FOR_EACH_EDGE (e, ei, bb->succs)
4190 e->flags &= ~EDGE_EXECUTABLE;
4192 FOR_EACH_EDGE (e, ei, bb->preds)
4194 if (dominated_by_p (CDI_DOMINATORS, e->src, bb))
4196 if (dump_file && (dump_flags & TDF_DETAILS))
4197 fprintf (dump_file, "Marking backedge from BB %d into "
4198 "unreachable BB %d as not executable\n",
4199 e->src->index, bb->index);
4200 e->flags &= ~EDGE_EXECUTABLE;
4203 return;
4206 if (dump_file && (dump_flags & TDF_DETAILS))
4207 fprintf (dump_file, "Visiting BB %d\n", bb->index);
4209 /* If we have a single predecessor record the equivalence from a
4210 possible condition on the predecessor edge. */
4211 if (single_pred_p (bb))
4213 edge e = single_pred_edge (bb);
4214 /* Check if there are multiple executable successor edges in
4215 the source block. Otherwise there is no additional info
4216 to be recorded. */
4217 edge e2;
4218 FOR_EACH_EDGE (e2, ei, e->src->succs)
4219 if (e2 != e
4220 && e2->flags & EDGE_EXECUTABLE)
4221 break;
4222 if (e2 && (e2->flags & EDGE_EXECUTABLE))
4224 gimple *stmt = last_stmt (e->src);
4225 if (stmt
4226 && gimple_code (stmt) == GIMPLE_COND)
4228 enum tree_code code = gimple_cond_code (stmt);
4229 tree lhs = gimple_cond_lhs (stmt);
4230 tree rhs = gimple_cond_rhs (stmt);
4231 record_conds (bb, code, lhs, rhs,
4232 (e->flags & EDGE_TRUE_VALUE) != 0);
4233 code = invert_tree_comparison (code, HONOR_NANS (lhs));
4234 if (code != ERROR_MARK)
4235 record_conds (bb, code, lhs, rhs,
4236 (e->flags & EDGE_TRUE_VALUE) == 0);
4241 /* Value-number all defs in the basic-block. */
4242 for (gphi_iterator gsi = gsi_start_phis (bb);
4243 !gsi_end_p (gsi); gsi_next (&gsi))
4245 gphi *phi = gsi.phi ();
4246 tree res = PHI_RESULT (phi);
4247 if (!VN_INFO (res)->visited
4248 && !DFS (res))
4250 fail = true;
4251 return;
4254 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
4255 !gsi_end_p (gsi); gsi_next (&gsi))
4257 ssa_op_iter i;
4258 tree op;
4259 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
4260 if (!VN_INFO (op)->visited
4261 && !DFS (op))
4263 fail = true;
4264 return;
4268 /* Finally look at the last stmt. */
4269 gimple *stmt = last_stmt (bb);
4270 if (!stmt)
4271 return;
4273 enum gimple_code code = gimple_code (stmt);
4274 if (code != GIMPLE_COND
4275 && code != GIMPLE_SWITCH
4276 && code != GIMPLE_GOTO)
4277 return;
4279 if (dump_file && (dump_flags & TDF_DETAILS))
4281 fprintf (dump_file, "Visiting control stmt ending BB %d: ", bb->index);
4282 print_gimple_stmt (dump_file, stmt, 0, 0);
4285 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
4286 if value-numbering can prove they are not reachable. Handling
4287 computed gotos is also possible. */
4288 tree val;
4289 switch (code)
4291 case GIMPLE_COND:
4293 tree lhs = vn_valueize (gimple_cond_lhs (stmt));
4294 tree rhs = vn_valueize (gimple_cond_rhs (stmt));
4295 val = gimple_simplify (gimple_cond_code (stmt),
4296 boolean_type_node, lhs, rhs,
4297 NULL, vn_valueize);
4298 /* If that didn't simplify to a constant see if we have recorded
4299 temporary expressions from taken edges. */
4300 if (!val || TREE_CODE (val) != INTEGER_CST)
4302 tree ops[2];
4303 ops[0] = lhs;
4304 ops[1] = rhs;
4305 val = vn_nary_op_lookup_pieces (2, gimple_cond_code (stmt),
4306 boolean_type_node, ops, NULL);
4308 break;
4310 case GIMPLE_SWITCH:
4311 val = gimple_switch_index (as_a <gswitch *> (stmt));
4312 break;
4313 case GIMPLE_GOTO:
4314 val = gimple_goto_dest (stmt);
4315 break;
4316 default:
4317 gcc_unreachable ();
4319 if (!val)
4320 return;
4322 edge taken = find_taken_edge (bb, vn_valueize (val));
4323 if (!taken)
4324 return;
4326 if (dump_file && (dump_flags & TDF_DETAILS))
4327 fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
4328 "not executable\n", bb->index, bb->index, taken->dest->index);
4330 FOR_EACH_EDGE (e, ei, bb->succs)
4331 if (e != taken)
4332 e->flags &= ~EDGE_EXECUTABLE;
4335 /* Do SCCVN. Returns true if it finished, false if we bailed out
4336 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
4337 how we use the alias oracle walking during the VN process. */
4339 bool
4340 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
4342 basic_block bb;
4343 size_t i;
4345 default_vn_walk_kind = default_vn_walk_kind_;
4347 init_scc_vn ();
4349 /* Collect pointers we know point to readonly memory. */
4350 const_parms = BITMAP_ALLOC (NULL);
4351 tree fnspec = lookup_attribute ("fn spec",
4352 TYPE_ATTRIBUTES (TREE_TYPE (cfun->decl)));
4353 if (fnspec)
4355 fnspec = TREE_VALUE (TREE_VALUE (fnspec));
4356 i = 1;
4357 for (tree arg = DECL_ARGUMENTS (cfun->decl);
4358 arg; arg = DECL_CHAIN (arg), ++i)
4360 if (i >= (unsigned) TREE_STRING_LENGTH (fnspec))
4361 break;
4362 if (TREE_STRING_POINTER (fnspec)[i] == 'R'
4363 || TREE_STRING_POINTER (fnspec)[i] == 'r')
4365 tree name = ssa_default_def (cfun, arg);
4366 if (name)
4367 bitmap_set_bit (const_parms, SSA_NAME_VERSION (name));
4372 /* Mark all edges as possibly executable. */
4373 FOR_ALL_BB_FN (bb, cfun)
4375 edge_iterator ei;
4376 edge e;
4377 FOR_EACH_EDGE (e, ei, bb->succs)
4378 e->flags |= EDGE_EXECUTABLE;
4381 /* Walk all blocks in dominator order, value-numbering stmts
4382 SSA defs and decide whether outgoing edges are not executable. */
4383 sccvn_dom_walker walker;
4384 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
4385 if (walker.fail)
4387 free_scc_vn ();
4388 return false;
4391 /* Initialize the value ids and prune out remaining VN_TOPs
4392 from dead code. */
4393 for (i = 1; i < num_ssa_names; ++i)
4395 tree name = ssa_name (i);
4396 vn_ssa_aux_t info;
4397 if (!name)
4398 continue;
4399 info = VN_INFO (name);
4400 if (!info->visited)
4401 info->valnum = name;
4402 if (info->valnum == name
4403 || info->valnum == VN_TOP)
4404 info->value_id = get_next_value_id ();
4405 else if (is_gimple_min_invariant (info->valnum))
4406 info->value_id = get_or_alloc_constant_value_id (info->valnum);
4409 /* Propagate. */
4410 for (i = 1; i < num_ssa_names; ++i)
4412 tree name = ssa_name (i);
4413 vn_ssa_aux_t info;
4414 if (!name)
4415 continue;
4416 info = VN_INFO (name);
4417 if (TREE_CODE (info->valnum) == SSA_NAME
4418 && info->valnum != name
4419 && info->value_id != VN_INFO (info->valnum)->value_id)
4420 info->value_id = VN_INFO (info->valnum)->value_id;
4423 set_hashtable_value_ids ();
4425 if (dump_file && (dump_flags & TDF_DETAILS))
4427 fprintf (dump_file, "Value numbers:\n");
4428 for (i = 0; i < num_ssa_names; i++)
4430 tree name = ssa_name (i);
4431 if (name
4432 && VN_INFO (name)->visited
4433 && SSA_VAL (name) != name)
4435 print_generic_expr (dump_file, name, 0);
4436 fprintf (dump_file, " = ");
4437 print_generic_expr (dump_file, SSA_VAL (name), 0);
4438 fprintf (dump_file, "\n");
4443 return true;
4446 /* Return the maximum value id we have ever seen. */
4448 unsigned int
4449 get_max_value_id (void)
4451 return next_value_id;
4454 /* Return the next unique value id. */
4456 unsigned int
4457 get_next_value_id (void)
4459 return next_value_id++;
4463 /* Compare two expressions E1 and E2 and return true if they are equal. */
4465 bool
4466 expressions_equal_p (tree e1, tree e2)
4468 /* The obvious case. */
4469 if (e1 == e2)
4470 return true;
4472 /* If only one of them is null, they cannot be equal. */
4473 if (!e1 || !e2)
4474 return false;
4476 /* Now perform the actual comparison. */
4477 if (TREE_CODE (e1) == TREE_CODE (e2)
4478 && operand_equal_p (e1, e2, OEP_PURE_SAME))
4479 return true;
4481 return false;
4485 /* Return true if the nary operation NARY may trap. This is a copy
4486 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4488 bool
4489 vn_nary_may_trap (vn_nary_op_t nary)
4491 tree type;
4492 tree rhs2 = NULL_TREE;
4493 bool honor_nans = false;
4494 bool honor_snans = false;
4495 bool fp_operation = false;
4496 bool honor_trapv = false;
4497 bool handled, ret;
4498 unsigned i;
4500 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4501 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4502 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4504 type = nary->type;
4505 fp_operation = FLOAT_TYPE_P (type);
4506 if (fp_operation)
4508 honor_nans = flag_trapping_math && !flag_finite_math_only;
4509 honor_snans = flag_signaling_nans != 0;
4511 else if (INTEGRAL_TYPE_P (type)
4512 && TYPE_OVERFLOW_TRAPS (type))
4513 honor_trapv = true;
4515 if (nary->length >= 2)
4516 rhs2 = nary->op[1];
4517 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4518 honor_trapv,
4519 honor_nans, honor_snans, rhs2,
4520 &handled);
4521 if (handled
4522 && ret)
4523 return true;
4525 for (i = 0; i < nary->length; ++i)
4526 if (tree_could_trap_p (nary->op[i]))
4527 return true;
4529 return false;