[19/46] Make vect_dr_stmt return a stmt_vec_info
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blob833291a57a662730250b95d7e11384447380330c
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2018 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "ssa.h"
29 #include "expmed.h"
30 #include "insn-config.h"
31 #include "memmodel.h"
32 #include "emit-rtl.h"
33 #include "cgraph.h"
34 #include "gimple-pretty-print.h"
35 #include "alias.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "cfganal.h"
39 #include "tree-inline.h"
40 #include "internal-fn.h"
41 #include "gimple-fold.h"
42 #include "tree-eh.h"
43 #include "gimplify.h"
44 #include "flags.h"
45 #include "dojump.h"
46 #include "explow.h"
47 #include "calls.h"
48 #include "varasm.h"
49 #include "stmt.h"
50 #include "expr.h"
51 #include "tree-dfa.h"
52 #include "tree-ssa.h"
53 #include "dumpfile.h"
54 #include "cfgloop.h"
55 #include "params.h"
56 #include "tree-ssa-propagate.h"
57 #include "tree-cfg.h"
58 #include "domwalk.h"
59 #include "gimple-iterator.h"
60 #include "gimple-match.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63 #include "tree-pass.h"
64 #include "statistics.h"
65 #include "langhooks.h"
66 #include "ipa-utils.h"
67 #include "dbgcnt.h"
68 #include "tree-cfgcleanup.h"
69 #include "tree-ssa-loop.h"
70 #include "tree-scalar-evolution.h"
71 #include "tree-ssa-sccvn.h"
73 /* This algorithm is based on the SCC algorithm presented by Keith
74 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
75 (http://citeseer.ist.psu.edu/41805.html). In
76 straight line code, it is equivalent to a regular hash based value
77 numbering that is performed in reverse postorder.
79 For code with cycles, there are two alternatives, both of which
80 require keeping the hashtables separate from the actual list of
81 value numbers for SSA names.
83 1. Iterate value numbering in an RPO walk of the blocks, removing
84 all the entries from the hashtable after each iteration (but
85 keeping the SSA name->value number mapping between iterations).
86 Iterate until it does not change.
88 2. Perform value numbering as part of an SCC walk on the SSA graph,
89 iterating only the cycles in the SSA graph until they do not change
90 (using a separate, optimistic hashtable for value numbering the SCC
91 operands).
93 The second is not just faster in practice (because most SSA graph
94 cycles do not involve all the variables in the graph), it also has
95 some nice properties.
97 One of these nice properties is that when we pop an SCC off the
98 stack, we are guaranteed to have processed all the operands coming from
99 *outside of that SCC*, so we do not need to do anything special to
100 ensure they have value numbers.
102 Another nice property is that the SCC walk is done as part of a DFS
103 of the SSA graph, which makes it easy to perform combining and
104 simplifying operations at the same time.
106 The code below is deliberately written in a way that makes it easy
107 to separate the SCC walk from the other work it does.
109 In order to propagate constants through the code, we track which
110 expressions contain constants, and use those while folding. In
111 theory, we could also track expressions whose value numbers are
112 replaced, in case we end up folding based on expression
113 identities.
115 In order to value number memory, we assign value numbers to vuses.
116 This enables us to note that, for example, stores to the same
117 address of the same value from the same starting memory states are
118 equivalent.
119 TODO:
121 1. We can iterate only the changing portions of the SCC's, but
122 I have not seen an SCC big enough for this to be a win.
123 2. If you differentiate between phi nodes for loops and phi nodes
124 for if-then-else, you can properly consider phi nodes in different
125 blocks for equivalence.
126 3. We could value number vuses in more cases, particularly, whole
127 structure copies.
131 static tree *last_vuse_ptr;
132 static vn_lookup_kind vn_walk_kind;
133 static vn_lookup_kind default_vn_walk_kind;
134 bitmap const_parms;
136 /* vn_nary_op hashtable helpers. */
138 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
140 typedef vn_nary_op_s *compare_type;
141 static inline hashval_t hash (const vn_nary_op_s *);
142 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
145 /* Return the computed hashcode for nary operation P1. */
147 inline hashval_t
148 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
150 return vno1->hashcode;
153 /* Compare nary operations P1 and P2 and return true if they are
154 equivalent. */
156 inline bool
157 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
159 return vno1 == vno2 || vn_nary_op_eq (vno1, vno2);
162 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
163 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
166 /* vn_phi hashtable helpers. */
168 static int
169 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
171 struct vn_phi_hasher : nofree_ptr_hash <vn_phi_s>
173 static inline hashval_t hash (const vn_phi_s *);
174 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
177 /* Return the computed hashcode for phi operation P1. */
179 inline hashval_t
180 vn_phi_hasher::hash (const vn_phi_s *vp1)
182 return vp1->hashcode;
185 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
187 inline bool
188 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
190 return vp1 == vp2 || vn_phi_eq (vp1, vp2);
193 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
194 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
197 /* Compare two reference operands P1 and P2 for equality. Return true if
198 they are equal, and false otherwise. */
200 static int
201 vn_reference_op_eq (const void *p1, const void *p2)
203 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
204 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
206 return (vro1->opcode == vro2->opcode
207 /* We do not care for differences in type qualification. */
208 && (vro1->type == vro2->type
209 || (vro1->type && vro2->type
210 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
211 TYPE_MAIN_VARIANT (vro2->type))))
212 && expressions_equal_p (vro1->op0, vro2->op0)
213 && expressions_equal_p (vro1->op1, vro2->op1)
214 && expressions_equal_p (vro1->op2, vro2->op2));
217 /* Free a reference operation structure VP. */
219 static inline void
220 free_reference (vn_reference_s *vr)
222 vr->operands.release ();
226 /* vn_reference hashtable helpers. */
228 struct vn_reference_hasher : nofree_ptr_hash <vn_reference_s>
230 static inline hashval_t hash (const vn_reference_s *);
231 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
234 /* Return the hashcode for a given reference operation P1. */
236 inline hashval_t
237 vn_reference_hasher::hash (const vn_reference_s *vr1)
239 return vr1->hashcode;
242 inline bool
243 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
245 return v == c || vn_reference_eq (v, c);
248 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
249 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
252 /* The set of VN hashtables. */
254 typedef struct vn_tables_s
256 vn_nary_op_table_type *nary;
257 vn_phi_table_type *phis;
258 vn_reference_table_type *references;
259 } *vn_tables_t;
262 /* vn_constant hashtable helpers. */
264 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
266 static inline hashval_t hash (const vn_constant_s *);
267 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
270 /* Hash table hash function for vn_constant_t. */
272 inline hashval_t
273 vn_constant_hasher::hash (const vn_constant_s *vc1)
275 return vc1->hashcode;
278 /* Hash table equality function for vn_constant_t. */
280 inline bool
281 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
283 if (vc1->hashcode != vc2->hashcode)
284 return false;
286 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
289 static hash_table<vn_constant_hasher> *constant_to_value_id;
290 static bitmap constant_value_ids;
293 /* Obstack we allocate the vn-tables elements from. */
294 static obstack vn_tables_obstack;
295 /* Special obstack we never unwind. */
296 static obstack vn_tables_insert_obstack;
298 static vn_reference_t last_inserted_ref;
299 static vn_phi_t last_inserted_phi;
300 static vn_nary_op_t last_inserted_nary;
302 /* Valid hashtables storing information we have proven to be
303 correct. */
304 static vn_tables_t valid_info;
307 /* Reverse post order index for each basic block. */
308 static int *rpo_numbers;
310 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
312 /* Return the SSA value of the VUSE x, supporting released VDEFs
313 during elimination which will value-number the VDEF to the
314 associated VUSE (but not substitute in the whole lattice). */
316 static inline tree
317 vuse_ssa_val (tree x)
319 if (!x)
320 return NULL_TREE;
324 tree tem = SSA_VAL (x);
325 /* stmt walking can walk over a backedge and reach code we didn't
326 value-number yet. */
327 if (tem == VN_TOP)
328 return x;
329 x = tem;
331 while (SSA_NAME_IN_FREE_LIST (x));
333 return x;
336 /* This represents the top of the VN lattice, which is the universal
337 value. */
339 tree VN_TOP;
341 /* Unique counter for our value ids. */
343 static unsigned int next_value_id;
345 /* Next DFS number and the stack for strongly connected component
346 detection. */
348 static unsigned int next_dfs_num;
349 static vec<tree> sccstack;
353 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
354 are allocated on an obstack for locality reasons, and to free them
355 without looping over the vec. */
357 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
358 static struct obstack vn_ssa_aux_obstack;
360 /* Return whether there is value numbering information for a given SSA name. */
362 bool
363 has_VN_INFO (tree name)
365 if (SSA_NAME_VERSION (name) < vn_ssa_aux_table.length ())
366 return vn_ssa_aux_table[SSA_NAME_VERSION (name)] != NULL;
367 return false;
370 /* Return the value numbering information for a given SSA name. */
372 vn_ssa_aux_t
373 VN_INFO (tree name)
375 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
376 gcc_checking_assert (res);
377 return res;
380 /* Set the value numbering info for a given SSA name to a given
381 value. */
383 static inline void
384 VN_INFO_SET (tree name, vn_ssa_aux_t value)
386 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
389 /* Initialize the value numbering info for a given SSA name.
390 This should be called just once for every SSA name. */
392 vn_ssa_aux_t
393 VN_INFO_GET (tree name)
395 vn_ssa_aux_t newinfo;
397 gcc_assert (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ()
398 || vn_ssa_aux_table[SSA_NAME_VERSION (name)] == NULL);
399 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
400 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
401 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
402 vn_ssa_aux_table.safe_grow_cleared (SSA_NAME_VERSION (name) + 1);
403 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
404 return newinfo;
408 /* Return the vn_kind the expression computed by the stmt should be
409 associated with. */
411 enum vn_kind
412 vn_get_stmt_kind (gimple *stmt)
414 switch (gimple_code (stmt))
416 case GIMPLE_CALL:
417 return VN_REFERENCE;
418 case GIMPLE_PHI:
419 return VN_PHI;
420 case GIMPLE_ASSIGN:
422 enum tree_code code = gimple_assign_rhs_code (stmt);
423 tree rhs1 = gimple_assign_rhs1 (stmt);
424 switch (get_gimple_rhs_class (code))
426 case GIMPLE_UNARY_RHS:
427 case GIMPLE_BINARY_RHS:
428 case GIMPLE_TERNARY_RHS:
429 return VN_NARY;
430 case GIMPLE_SINGLE_RHS:
431 switch (TREE_CODE_CLASS (code))
433 case tcc_reference:
434 /* VOP-less references can go through unary case. */
435 if ((code == REALPART_EXPR
436 || code == IMAGPART_EXPR
437 || code == VIEW_CONVERT_EXPR
438 || code == BIT_FIELD_REF)
439 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
440 return VN_NARY;
442 /* Fallthrough. */
443 case tcc_declaration:
444 return VN_REFERENCE;
446 case tcc_constant:
447 return VN_CONSTANT;
449 default:
450 if (code == ADDR_EXPR)
451 return (is_gimple_min_invariant (rhs1)
452 ? VN_CONSTANT : VN_REFERENCE);
453 else if (code == CONSTRUCTOR)
454 return VN_NARY;
455 return VN_NONE;
457 default:
458 return VN_NONE;
461 default:
462 return VN_NONE;
466 /* Lookup a value id for CONSTANT and return it. If it does not
467 exist returns 0. */
469 unsigned int
470 get_constant_value_id (tree constant)
472 vn_constant_s **slot;
473 struct vn_constant_s vc;
475 vc.hashcode = vn_hash_constant_with_type (constant);
476 vc.constant = constant;
477 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
478 if (slot)
479 return (*slot)->value_id;
480 return 0;
483 /* Lookup a value id for CONSTANT, and if it does not exist, create a
484 new one and return it. If it does exist, return it. */
486 unsigned int
487 get_or_alloc_constant_value_id (tree constant)
489 vn_constant_s **slot;
490 struct vn_constant_s vc;
491 vn_constant_t vcp;
493 vc.hashcode = vn_hash_constant_with_type (constant);
494 vc.constant = constant;
495 slot = constant_to_value_id->find_slot (&vc, INSERT);
496 if (*slot)
497 return (*slot)->value_id;
499 vcp = XNEW (struct vn_constant_s);
500 vcp->hashcode = vc.hashcode;
501 vcp->constant = constant;
502 vcp->value_id = get_next_value_id ();
503 *slot = vcp;
504 bitmap_set_bit (constant_value_ids, vcp->value_id);
505 return vcp->value_id;
508 /* Return true if V is a value id for a constant. */
510 bool
511 value_id_constant_p (unsigned int v)
513 return bitmap_bit_p (constant_value_ids, v);
516 /* Compute the hash for a reference operand VRO1. */
518 static void
519 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
521 hstate.add_int (vro1->opcode);
522 if (vro1->op0)
523 inchash::add_expr (vro1->op0, hstate);
524 if (vro1->op1)
525 inchash::add_expr (vro1->op1, hstate);
526 if (vro1->op2)
527 inchash::add_expr (vro1->op2, hstate);
530 /* Compute a hash for the reference operation VR1 and return it. */
532 static hashval_t
533 vn_reference_compute_hash (const vn_reference_t vr1)
535 inchash::hash hstate;
536 hashval_t result;
537 int i;
538 vn_reference_op_t vro;
539 poly_int64 off = -1;
540 bool deref = false;
542 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
544 if (vro->opcode == MEM_REF)
545 deref = true;
546 else if (vro->opcode != ADDR_EXPR)
547 deref = false;
548 if (maybe_ne (vro->off, -1))
550 if (known_eq (off, -1))
551 off = 0;
552 off += vro->off;
554 else
556 if (maybe_ne (off, -1)
557 && maybe_ne (off, 0))
558 hstate.add_poly_int (off);
559 off = -1;
560 if (deref
561 && vro->opcode == ADDR_EXPR)
563 if (vro->op0)
565 tree op = TREE_OPERAND (vro->op0, 0);
566 hstate.add_int (TREE_CODE (op));
567 inchash::add_expr (op, hstate);
570 else
571 vn_reference_op_compute_hash (vro, hstate);
574 result = hstate.end ();
575 /* ??? We would ICE later if we hash instead of adding that in. */
576 if (vr1->vuse)
577 result += SSA_NAME_VERSION (vr1->vuse);
579 return result;
582 /* Return true if reference operations VR1 and VR2 are equivalent. This
583 means they have the same set of operands and vuses. */
585 bool
586 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
588 unsigned i, j;
590 /* Early out if this is not a hash collision. */
591 if (vr1->hashcode != vr2->hashcode)
592 return false;
594 /* The VOP needs to be the same. */
595 if (vr1->vuse != vr2->vuse)
596 return false;
598 /* If the operands are the same we are done. */
599 if (vr1->operands == vr2->operands)
600 return true;
602 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
603 return false;
605 if (INTEGRAL_TYPE_P (vr1->type)
606 && INTEGRAL_TYPE_P (vr2->type))
608 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
609 return false;
611 else if (INTEGRAL_TYPE_P (vr1->type)
612 && (TYPE_PRECISION (vr1->type)
613 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
614 return false;
615 else if (INTEGRAL_TYPE_P (vr2->type)
616 && (TYPE_PRECISION (vr2->type)
617 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
618 return false;
620 i = 0;
621 j = 0;
624 poly_int64 off1 = 0, off2 = 0;
625 vn_reference_op_t vro1, vro2;
626 vn_reference_op_s tem1, tem2;
627 bool deref1 = false, deref2 = false;
628 for (; vr1->operands.iterate (i, &vro1); i++)
630 if (vro1->opcode == MEM_REF)
631 deref1 = true;
632 /* Do not look through a storage order barrier. */
633 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
634 return false;
635 if (known_eq (vro1->off, -1))
636 break;
637 off1 += vro1->off;
639 for (; vr2->operands.iterate (j, &vro2); j++)
641 if (vro2->opcode == MEM_REF)
642 deref2 = true;
643 /* Do not look through a storage order barrier. */
644 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
645 return false;
646 if (known_eq (vro2->off, -1))
647 break;
648 off2 += vro2->off;
650 if (maybe_ne (off1, off2))
651 return false;
652 if (deref1 && vro1->opcode == ADDR_EXPR)
654 memset (&tem1, 0, sizeof (tem1));
655 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
656 tem1.type = TREE_TYPE (tem1.op0);
657 tem1.opcode = TREE_CODE (tem1.op0);
658 vro1 = &tem1;
659 deref1 = false;
661 if (deref2 && vro2->opcode == ADDR_EXPR)
663 memset (&tem2, 0, sizeof (tem2));
664 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
665 tem2.type = TREE_TYPE (tem2.op0);
666 tem2.opcode = TREE_CODE (tem2.op0);
667 vro2 = &tem2;
668 deref2 = false;
670 if (deref1 != deref2)
671 return false;
672 if (!vn_reference_op_eq (vro1, vro2))
673 return false;
674 ++j;
675 ++i;
677 while (vr1->operands.length () != i
678 || vr2->operands.length () != j);
680 return true;
683 /* Copy the operations present in load/store REF into RESULT, a vector of
684 vn_reference_op_s's. */
686 static void
687 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
689 if (TREE_CODE (ref) == TARGET_MEM_REF)
691 vn_reference_op_s temp;
693 result->reserve (3);
695 memset (&temp, 0, sizeof (temp));
696 temp.type = TREE_TYPE (ref);
697 temp.opcode = TREE_CODE (ref);
698 temp.op0 = TMR_INDEX (ref);
699 temp.op1 = TMR_STEP (ref);
700 temp.op2 = TMR_OFFSET (ref);
701 temp.off = -1;
702 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
703 temp.base = MR_DEPENDENCE_BASE (ref);
704 result->quick_push (temp);
706 memset (&temp, 0, sizeof (temp));
707 temp.type = NULL_TREE;
708 temp.opcode = ERROR_MARK;
709 temp.op0 = TMR_INDEX2 (ref);
710 temp.off = -1;
711 result->quick_push (temp);
713 memset (&temp, 0, sizeof (temp));
714 temp.type = NULL_TREE;
715 temp.opcode = TREE_CODE (TMR_BASE (ref));
716 temp.op0 = TMR_BASE (ref);
717 temp.off = -1;
718 result->quick_push (temp);
719 return;
722 /* For non-calls, store the information that makes up the address. */
723 tree orig = ref;
724 while (ref)
726 vn_reference_op_s temp;
728 memset (&temp, 0, sizeof (temp));
729 temp.type = TREE_TYPE (ref);
730 temp.opcode = TREE_CODE (ref);
731 temp.off = -1;
733 switch (temp.opcode)
735 case MODIFY_EXPR:
736 temp.op0 = TREE_OPERAND (ref, 1);
737 break;
738 case WITH_SIZE_EXPR:
739 temp.op0 = TREE_OPERAND (ref, 1);
740 temp.off = 0;
741 break;
742 case MEM_REF:
743 /* The base address gets its own vn_reference_op_s structure. */
744 temp.op0 = TREE_OPERAND (ref, 1);
745 if (!mem_ref_offset (ref).to_shwi (&temp.off))
746 temp.off = -1;
747 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
748 temp.base = MR_DEPENDENCE_BASE (ref);
749 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
750 break;
751 case BIT_FIELD_REF:
752 /* Record bits, position and storage order. */
753 temp.op0 = TREE_OPERAND (ref, 1);
754 temp.op1 = TREE_OPERAND (ref, 2);
755 if (!multiple_p (bit_field_offset (ref), BITS_PER_UNIT, &temp.off))
756 temp.off = -1;
757 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
758 break;
759 case COMPONENT_REF:
760 /* The field decl is enough to unambiguously specify the field,
761 a matching type is not necessary and a mismatching type
762 is always a spurious difference. */
763 temp.type = NULL_TREE;
764 temp.op0 = TREE_OPERAND (ref, 1);
765 temp.op1 = TREE_OPERAND (ref, 2);
767 tree this_offset = component_ref_field_offset (ref);
768 if (this_offset
769 && poly_int_tree_p (this_offset))
771 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
772 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
774 poly_offset_int off
775 = (wi::to_poly_offset (this_offset)
776 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
777 /* Probibit value-numbering zero offset components
778 of addresses the same before the pass folding
779 __builtin_object_size had a chance to run
780 (checking cfun->after_inlining does the
781 trick here). */
782 if (TREE_CODE (orig) != ADDR_EXPR
783 || maybe_ne (off, 0)
784 || cfun->after_inlining)
785 off.to_shwi (&temp.off);
789 break;
790 case ARRAY_RANGE_REF:
791 case ARRAY_REF:
793 tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
794 /* Record index as operand. */
795 temp.op0 = TREE_OPERAND (ref, 1);
796 /* Always record lower bounds and element size. */
797 temp.op1 = array_ref_low_bound (ref);
798 /* But record element size in units of the type alignment. */
799 temp.op2 = TREE_OPERAND (ref, 3);
800 temp.align = eltype->type_common.align;
801 if (! temp.op2)
802 temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
803 size_int (TYPE_ALIGN_UNIT (eltype)));
804 if (poly_int_tree_p (temp.op0)
805 && poly_int_tree_p (temp.op1)
806 && TREE_CODE (temp.op2) == INTEGER_CST)
808 poly_offset_int off = ((wi::to_poly_offset (temp.op0)
809 - wi::to_poly_offset (temp.op1))
810 * wi::to_offset (temp.op2)
811 * vn_ref_op_align_unit (&temp));
812 off.to_shwi (&temp.off);
815 break;
816 case VAR_DECL:
817 if (DECL_HARD_REGISTER (ref))
819 temp.op0 = ref;
820 break;
822 /* Fallthru. */
823 case PARM_DECL:
824 case CONST_DECL:
825 case RESULT_DECL:
826 /* Canonicalize decls to MEM[&decl] which is what we end up with
827 when valueizing MEM[ptr] with ptr = &decl. */
828 temp.opcode = MEM_REF;
829 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
830 temp.off = 0;
831 result->safe_push (temp);
832 temp.opcode = ADDR_EXPR;
833 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
834 temp.type = TREE_TYPE (temp.op0);
835 temp.off = -1;
836 break;
837 case STRING_CST:
838 case INTEGER_CST:
839 case COMPLEX_CST:
840 case VECTOR_CST:
841 case REAL_CST:
842 case FIXED_CST:
843 case CONSTRUCTOR:
844 case SSA_NAME:
845 temp.op0 = ref;
846 break;
847 case ADDR_EXPR:
848 if (is_gimple_min_invariant (ref))
850 temp.op0 = ref;
851 break;
853 break;
854 /* These are only interesting for their operands, their
855 existence, and their type. They will never be the last
856 ref in the chain of references (IE they require an
857 operand), so we don't have to put anything
858 for op* as it will be handled by the iteration */
859 case REALPART_EXPR:
860 temp.off = 0;
861 break;
862 case VIEW_CONVERT_EXPR:
863 temp.off = 0;
864 temp.reverse = storage_order_barrier_p (ref);
865 break;
866 case IMAGPART_EXPR:
867 /* This is only interesting for its constant offset. */
868 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
869 break;
870 default:
871 gcc_unreachable ();
873 result->safe_push (temp);
875 if (REFERENCE_CLASS_P (ref)
876 || TREE_CODE (ref) == MODIFY_EXPR
877 || TREE_CODE (ref) == WITH_SIZE_EXPR
878 || (TREE_CODE (ref) == ADDR_EXPR
879 && !is_gimple_min_invariant (ref)))
880 ref = TREE_OPERAND (ref, 0);
881 else
882 ref = NULL_TREE;
886 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
887 operands in *OPS, the reference alias set SET and the reference type TYPE.
888 Return true if something useful was produced. */
890 bool
891 ao_ref_init_from_vn_reference (ao_ref *ref,
892 alias_set_type set, tree type,
893 vec<vn_reference_op_s> ops)
895 vn_reference_op_t op;
896 unsigned i;
897 tree base = NULL_TREE;
898 tree *op0_p = &base;
899 poly_offset_int offset = 0;
900 poly_offset_int max_size;
901 poly_offset_int size = -1;
902 tree size_tree = NULL_TREE;
903 alias_set_type base_alias_set = -1;
905 /* First get the final access size from just the outermost expression. */
906 op = &ops[0];
907 if (op->opcode == COMPONENT_REF)
908 size_tree = DECL_SIZE (op->op0);
909 else if (op->opcode == BIT_FIELD_REF)
910 size_tree = op->op0;
911 else
913 machine_mode mode = TYPE_MODE (type);
914 if (mode == BLKmode)
915 size_tree = TYPE_SIZE (type);
916 else
917 size = GET_MODE_BITSIZE (mode);
919 if (size_tree != NULL_TREE
920 && poly_int_tree_p (size_tree))
921 size = wi::to_poly_offset (size_tree);
923 /* Initially, maxsize is the same as the accessed element size.
924 In the following it will only grow (or become -1). */
925 max_size = size;
927 /* Compute cumulative bit-offset for nested component-refs and array-refs,
928 and find the ultimate containing object. */
929 FOR_EACH_VEC_ELT (ops, i, op)
931 switch (op->opcode)
933 /* These may be in the reference ops, but we cannot do anything
934 sensible with them here. */
935 case ADDR_EXPR:
936 /* Apart from ADDR_EXPR arguments to MEM_REF. */
937 if (base != NULL_TREE
938 && TREE_CODE (base) == MEM_REF
939 && op->op0
940 && DECL_P (TREE_OPERAND (op->op0, 0)))
942 vn_reference_op_t pop = &ops[i-1];
943 base = TREE_OPERAND (op->op0, 0);
944 if (known_eq (pop->off, -1))
946 max_size = -1;
947 offset = 0;
949 else
950 offset += pop->off * BITS_PER_UNIT;
951 op0_p = NULL;
952 break;
954 /* Fallthru. */
955 case CALL_EXPR:
956 return false;
958 /* Record the base objects. */
959 case MEM_REF:
960 base_alias_set = get_deref_alias_set (op->op0);
961 *op0_p = build2 (MEM_REF, op->type,
962 NULL_TREE, op->op0);
963 MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
964 MR_DEPENDENCE_BASE (*op0_p) = op->base;
965 op0_p = &TREE_OPERAND (*op0_p, 0);
966 break;
968 case VAR_DECL:
969 case PARM_DECL:
970 case RESULT_DECL:
971 case SSA_NAME:
972 *op0_p = op->op0;
973 op0_p = NULL;
974 break;
976 /* And now the usual component-reference style ops. */
977 case BIT_FIELD_REF:
978 offset += wi::to_poly_offset (op->op1);
979 break;
981 case COMPONENT_REF:
983 tree field = op->op0;
984 /* We do not have a complete COMPONENT_REF tree here so we
985 cannot use component_ref_field_offset. Do the interesting
986 parts manually. */
987 tree this_offset = DECL_FIELD_OFFSET (field);
989 if (op->op1 || !poly_int_tree_p (this_offset))
990 max_size = -1;
991 else
993 poly_offset_int woffset = (wi::to_poly_offset (this_offset)
994 << LOG2_BITS_PER_UNIT);
995 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
996 offset += woffset;
998 break;
1001 case ARRAY_RANGE_REF:
1002 case ARRAY_REF:
1003 /* We recorded the lower bound and the element size. */
1004 if (!poly_int_tree_p (op->op0)
1005 || !poly_int_tree_p (op->op1)
1006 || TREE_CODE (op->op2) != INTEGER_CST)
1007 max_size = -1;
1008 else
1010 poly_offset_int woffset
1011 = wi::sext (wi::to_poly_offset (op->op0)
1012 - wi::to_poly_offset (op->op1),
1013 TYPE_PRECISION (TREE_TYPE (op->op0)));
1014 woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
1015 woffset <<= LOG2_BITS_PER_UNIT;
1016 offset += woffset;
1018 break;
1020 case REALPART_EXPR:
1021 break;
1023 case IMAGPART_EXPR:
1024 offset += size;
1025 break;
1027 case VIEW_CONVERT_EXPR:
1028 break;
1030 case STRING_CST:
1031 case INTEGER_CST:
1032 case COMPLEX_CST:
1033 case VECTOR_CST:
1034 case REAL_CST:
1035 case CONSTRUCTOR:
1036 case CONST_DECL:
1037 return false;
1039 default:
1040 return false;
1044 if (base == NULL_TREE)
1045 return false;
1047 ref->ref = NULL_TREE;
1048 ref->base = base;
1049 ref->ref_alias_set = set;
1050 if (base_alias_set != -1)
1051 ref->base_alias_set = base_alias_set;
1052 else
1053 ref->base_alias_set = get_alias_set (base);
1054 /* We discount volatiles from value-numbering elsewhere. */
1055 ref->volatile_p = false;
1057 if (!size.to_shwi (&ref->size) || maybe_lt (ref->size, 0))
1059 ref->offset = 0;
1060 ref->size = -1;
1061 ref->max_size = -1;
1062 return true;
1065 if (!offset.to_shwi (&ref->offset))
1067 ref->offset = 0;
1068 ref->max_size = -1;
1069 return true;
1072 if (!max_size.to_shwi (&ref->max_size) || maybe_lt (ref->max_size, 0))
1073 ref->max_size = -1;
1075 return true;
1078 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1079 vn_reference_op_s's. */
1081 static void
1082 copy_reference_ops_from_call (gcall *call,
1083 vec<vn_reference_op_s> *result)
1085 vn_reference_op_s temp;
1086 unsigned i;
1087 tree lhs = gimple_call_lhs (call);
1088 int lr;
1090 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1091 different. By adding the lhs here in the vector, we ensure that the
1092 hashcode is different, guaranteeing a different value number. */
1093 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1095 memset (&temp, 0, sizeof (temp));
1096 temp.opcode = MODIFY_EXPR;
1097 temp.type = TREE_TYPE (lhs);
1098 temp.op0 = lhs;
1099 temp.off = -1;
1100 result->safe_push (temp);
1103 /* Copy the type, opcode, function, static chain and EH region, if any. */
1104 memset (&temp, 0, sizeof (temp));
1105 temp.type = gimple_call_return_type (call);
1106 temp.opcode = CALL_EXPR;
1107 temp.op0 = gimple_call_fn (call);
1108 temp.op1 = gimple_call_chain (call);
1109 if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1110 temp.op2 = size_int (lr);
1111 temp.off = -1;
1112 result->safe_push (temp);
1114 /* Copy the call arguments. As they can be references as well,
1115 just chain them together. */
1116 for (i = 0; i < gimple_call_num_args (call); ++i)
1118 tree callarg = gimple_call_arg (call, i);
1119 copy_reference_ops_from_ref (callarg, result);
1123 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1124 *I_P to point to the last element of the replacement. */
1125 static bool
1126 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1127 unsigned int *i_p)
1129 unsigned int i = *i_p;
1130 vn_reference_op_t op = &(*ops)[i];
1131 vn_reference_op_t mem_op = &(*ops)[i - 1];
1132 tree addr_base;
1133 poly_int64 addr_offset = 0;
1135 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1136 from .foo.bar to the preceding MEM_REF offset and replace the
1137 address with &OBJ. */
1138 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1139 &addr_offset);
1140 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1141 if (addr_base != TREE_OPERAND (op->op0, 0))
1143 poly_offset_int off
1144 = (poly_offset_int::from (wi::to_poly_wide (mem_op->op0),
1145 SIGNED)
1146 + addr_offset);
1147 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1148 op->op0 = build_fold_addr_expr (addr_base);
1149 if (tree_fits_shwi_p (mem_op->op0))
1150 mem_op->off = tree_to_shwi (mem_op->op0);
1151 else
1152 mem_op->off = -1;
1153 return true;
1155 return false;
1158 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1159 *I_P to point to the last element of the replacement. */
1160 static bool
1161 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1162 unsigned int *i_p)
1164 unsigned int i = *i_p;
1165 vn_reference_op_t op = &(*ops)[i];
1166 vn_reference_op_t mem_op = &(*ops)[i - 1];
1167 gimple *def_stmt;
1168 enum tree_code code;
1169 poly_offset_int off;
1171 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1172 if (!is_gimple_assign (def_stmt))
1173 return false;
1175 code = gimple_assign_rhs_code (def_stmt);
1176 if (code != ADDR_EXPR
1177 && code != POINTER_PLUS_EXPR)
1178 return false;
1180 off = poly_offset_int::from (wi::to_poly_wide (mem_op->op0), SIGNED);
1182 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1183 from .foo.bar to the preceding MEM_REF offset and replace the
1184 address with &OBJ. */
1185 if (code == ADDR_EXPR)
1187 tree addr, addr_base;
1188 poly_int64 addr_offset;
1190 addr = gimple_assign_rhs1 (def_stmt);
1191 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1192 &addr_offset);
1193 /* If that didn't work because the address isn't invariant propagate
1194 the reference tree from the address operation in case the current
1195 dereference isn't offsetted. */
1196 if (!addr_base
1197 && *i_p == ops->length () - 1
1198 && known_eq (off, 0)
1199 /* This makes us disable this transform for PRE where the
1200 reference ops might be also used for code insertion which
1201 is invalid. */
1202 && default_vn_walk_kind == VN_WALKREWRITE)
1204 auto_vec<vn_reference_op_s, 32> tem;
1205 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1206 /* Make sure to preserve TBAA info. The only objects not
1207 wrapped in MEM_REFs that can have their address taken are
1208 STRING_CSTs. */
1209 if (tem.length () >= 2
1210 && tem[tem.length () - 2].opcode == MEM_REF)
1212 vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1213 new_mem_op->op0
1214 = wide_int_to_tree (TREE_TYPE (mem_op->op0),
1215 wi::to_poly_wide (new_mem_op->op0));
1217 else
1218 gcc_assert (tem.last ().opcode == STRING_CST);
1219 ops->pop ();
1220 ops->pop ();
1221 ops->safe_splice (tem);
1222 --*i_p;
1223 return true;
1225 if (!addr_base
1226 || TREE_CODE (addr_base) != MEM_REF
1227 || (TREE_CODE (TREE_OPERAND (addr_base, 0)) == SSA_NAME
1228 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (addr_base, 0))))
1229 return false;
1231 off += addr_offset;
1232 off += mem_ref_offset (addr_base);
1233 op->op0 = TREE_OPERAND (addr_base, 0);
1235 else
1237 tree ptr, ptroff;
1238 ptr = gimple_assign_rhs1 (def_stmt);
1239 ptroff = gimple_assign_rhs2 (def_stmt);
1240 if (TREE_CODE (ptr) != SSA_NAME
1241 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1242 || !poly_int_tree_p (ptroff))
1243 return false;
1245 off += wi::to_poly_offset (ptroff);
1246 op->op0 = ptr;
1249 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1250 if (tree_fits_shwi_p (mem_op->op0))
1251 mem_op->off = tree_to_shwi (mem_op->op0);
1252 else
1253 mem_op->off = -1;
1254 if (TREE_CODE (op->op0) == SSA_NAME)
1255 op->op0 = SSA_VAL (op->op0);
1256 if (TREE_CODE (op->op0) != SSA_NAME)
1257 op->opcode = TREE_CODE (op->op0);
1259 /* And recurse. */
1260 if (TREE_CODE (op->op0) == SSA_NAME)
1261 vn_reference_maybe_forwprop_address (ops, i_p);
1262 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1263 vn_reference_fold_indirect (ops, i_p);
1264 return true;
1267 /* Optimize the reference REF to a constant if possible or return
1268 NULL_TREE if not. */
1270 tree
1271 fully_constant_vn_reference_p (vn_reference_t ref)
1273 vec<vn_reference_op_s> operands = ref->operands;
1274 vn_reference_op_t op;
1276 /* Try to simplify the translated expression if it is
1277 a call to a builtin function with at most two arguments. */
1278 op = &operands[0];
1279 if (op->opcode == CALL_EXPR
1280 && TREE_CODE (op->op0) == ADDR_EXPR
1281 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1282 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1283 && operands.length () >= 2
1284 && operands.length () <= 3)
1286 vn_reference_op_t arg0, arg1 = NULL;
1287 bool anyconst = false;
1288 arg0 = &operands[1];
1289 if (operands.length () > 2)
1290 arg1 = &operands[2];
1291 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1292 || (arg0->opcode == ADDR_EXPR
1293 && is_gimple_min_invariant (arg0->op0)))
1294 anyconst = true;
1295 if (arg1
1296 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1297 || (arg1->opcode == ADDR_EXPR
1298 && is_gimple_min_invariant (arg1->op0))))
1299 anyconst = true;
1300 if (anyconst)
1302 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1303 arg1 ? 2 : 1,
1304 arg0->op0,
1305 arg1 ? arg1->op0 : NULL);
1306 if (folded
1307 && TREE_CODE (folded) == NOP_EXPR)
1308 folded = TREE_OPERAND (folded, 0);
1309 if (folded
1310 && is_gimple_min_invariant (folded))
1311 return folded;
1315 /* Simplify reads from constants or constant initializers. */
1316 else if (BITS_PER_UNIT == 8
1317 && is_gimple_reg_type (ref->type)
1318 && (!INTEGRAL_TYPE_P (ref->type)
1319 || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
1321 poly_int64 off = 0;
1322 HOST_WIDE_INT size;
1323 if (INTEGRAL_TYPE_P (ref->type))
1324 size = TYPE_PRECISION (ref->type);
1325 else
1326 size = tree_to_shwi (TYPE_SIZE (ref->type));
1327 if (size % BITS_PER_UNIT != 0
1328 || size > MAX_BITSIZE_MODE_ANY_MODE)
1329 return NULL_TREE;
1330 size /= BITS_PER_UNIT;
1331 unsigned i;
1332 for (i = 0; i < operands.length (); ++i)
1334 if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1336 ++i;
1337 break;
1339 if (known_eq (operands[i].off, -1))
1340 return NULL_TREE;
1341 off += operands[i].off;
1342 if (operands[i].opcode == MEM_REF)
1344 ++i;
1345 break;
1348 vn_reference_op_t base = &operands[--i];
1349 tree ctor = error_mark_node;
1350 tree decl = NULL_TREE;
1351 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1352 ctor = base->op0;
1353 else if (base->opcode == MEM_REF
1354 && base[1].opcode == ADDR_EXPR
1355 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1356 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL
1357 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == STRING_CST))
1359 decl = TREE_OPERAND (base[1].op0, 0);
1360 if (TREE_CODE (decl) == STRING_CST)
1361 ctor = decl;
1362 else
1363 ctor = ctor_for_folding (decl);
1365 if (ctor == NULL_TREE)
1366 return build_zero_cst (ref->type);
1367 else if (ctor != error_mark_node)
1369 HOST_WIDE_INT const_off;
1370 if (decl)
1372 tree res = fold_ctor_reference (ref->type, ctor,
1373 off * BITS_PER_UNIT,
1374 size * BITS_PER_UNIT, decl);
1375 if (res)
1377 STRIP_USELESS_TYPE_CONVERSION (res);
1378 if (is_gimple_min_invariant (res))
1379 return res;
1382 else if (off.is_constant (&const_off))
1384 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1385 int len = native_encode_expr (ctor, buf, size, const_off);
1386 if (len > 0)
1387 return native_interpret_expr (ref->type, buf, len);
1392 return NULL_TREE;
1395 /* Return true if OPS contain a storage order barrier. */
1397 static bool
1398 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1400 vn_reference_op_t op;
1401 unsigned i;
1403 FOR_EACH_VEC_ELT (ops, i, op)
1404 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1405 return true;
1407 return false;
1410 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1411 structures into their value numbers. This is done in-place, and
1412 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1413 whether any operands were valueized. */
1415 static vec<vn_reference_op_s>
1416 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1418 vn_reference_op_t vro;
1419 unsigned int i;
1421 *valueized_anything = false;
1423 FOR_EACH_VEC_ELT (orig, i, vro)
1425 if (vro->opcode == SSA_NAME
1426 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1428 tree tem = SSA_VAL (vro->op0);
1429 if (tem != vro->op0)
1431 *valueized_anything = true;
1432 vro->op0 = tem;
1434 /* If it transforms from an SSA_NAME to a constant, update
1435 the opcode. */
1436 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1437 vro->opcode = TREE_CODE (vro->op0);
1439 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1441 tree tem = SSA_VAL (vro->op1);
1442 if (tem != vro->op1)
1444 *valueized_anything = true;
1445 vro->op1 = tem;
1448 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1450 tree tem = SSA_VAL (vro->op2);
1451 if (tem != vro->op2)
1453 *valueized_anything = true;
1454 vro->op2 = tem;
1457 /* If it transforms from an SSA_NAME to an address, fold with
1458 a preceding indirect reference. */
1459 if (i > 0
1460 && vro->op0
1461 && TREE_CODE (vro->op0) == ADDR_EXPR
1462 && orig[i - 1].opcode == MEM_REF)
1464 if (vn_reference_fold_indirect (&orig, &i))
1465 *valueized_anything = true;
1467 else if (i > 0
1468 && vro->opcode == SSA_NAME
1469 && orig[i - 1].opcode == MEM_REF)
1471 if (vn_reference_maybe_forwprop_address (&orig, &i))
1472 *valueized_anything = true;
1474 /* If it transforms a non-constant ARRAY_REF into a constant
1475 one, adjust the constant offset. */
1476 else if (vro->opcode == ARRAY_REF
1477 && known_eq (vro->off, -1)
1478 && poly_int_tree_p (vro->op0)
1479 && poly_int_tree_p (vro->op1)
1480 && TREE_CODE (vro->op2) == INTEGER_CST)
1482 poly_offset_int off = ((wi::to_poly_offset (vro->op0)
1483 - wi::to_poly_offset (vro->op1))
1484 * wi::to_offset (vro->op2)
1485 * vn_ref_op_align_unit (vro));
1486 off.to_shwi (&vro->off);
1490 return orig;
1493 static vec<vn_reference_op_s>
1494 valueize_refs (vec<vn_reference_op_s> orig)
1496 bool tem;
1497 return valueize_refs_1 (orig, &tem);
1500 static vec<vn_reference_op_s> shared_lookup_references;
1502 /* Create a vector of vn_reference_op_s structures from REF, a
1503 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1504 this function. *VALUEIZED_ANYTHING will specify whether any
1505 operands were valueized. */
1507 static vec<vn_reference_op_s>
1508 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1510 if (!ref)
1511 return vNULL;
1512 shared_lookup_references.truncate (0);
1513 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1514 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1515 valueized_anything);
1516 return shared_lookup_references;
1519 /* Create a vector of vn_reference_op_s structures from CALL, a
1520 call statement. The vector is shared among all callers of
1521 this function. */
1523 static vec<vn_reference_op_s>
1524 valueize_shared_reference_ops_from_call (gcall *call)
1526 if (!call)
1527 return vNULL;
1528 shared_lookup_references.truncate (0);
1529 copy_reference_ops_from_call (call, &shared_lookup_references);
1530 shared_lookup_references = valueize_refs (shared_lookup_references);
1531 return shared_lookup_references;
1534 /* Lookup a SCCVN reference operation VR in the current hash table.
1535 Returns the resulting value number if it exists in the hash table,
1536 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1537 vn_reference_t stored in the hashtable if something is found. */
1539 static tree
1540 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1542 vn_reference_s **slot;
1543 hashval_t hash;
1545 hash = vr->hashcode;
1546 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1547 if (slot)
1549 if (vnresult)
1550 *vnresult = (vn_reference_t)*slot;
1551 return ((vn_reference_t)*slot)->result;
1554 return NULL_TREE;
1557 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1558 with the current VUSE and performs the expression lookup. */
1560 static void *
1561 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1562 unsigned int cnt, void *vr_)
1564 vn_reference_t vr = (vn_reference_t)vr_;
1565 vn_reference_s **slot;
1566 hashval_t hash;
1568 /* This bounds the stmt walks we perform on reference lookups
1569 to O(1) instead of O(N) where N is the number of dominating
1570 stores. */
1571 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1572 return (void *)-1;
1574 if (last_vuse_ptr)
1575 *last_vuse_ptr = vuse;
1577 /* Fixup vuse and hash. */
1578 if (vr->vuse)
1579 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1580 vr->vuse = vuse_ssa_val (vuse);
1581 if (vr->vuse)
1582 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1584 hash = vr->hashcode;
1585 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1586 if (slot)
1587 return *slot;
1589 return NULL;
1592 /* Lookup an existing or insert a new vn_reference entry into the
1593 value table for the VUSE, SET, TYPE, OPERANDS reference which
1594 has the value VALUE which is either a constant or an SSA name. */
1596 static vn_reference_t
1597 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1598 alias_set_type set,
1599 tree type,
1600 vec<vn_reference_op_s,
1601 va_heap> operands,
1602 tree value)
1604 vn_reference_s vr1;
1605 vn_reference_t result;
1606 unsigned value_id;
1607 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1608 vr1.operands = operands;
1609 vr1.type = type;
1610 vr1.set = set;
1611 vr1.hashcode = vn_reference_compute_hash (&vr1);
1612 if (vn_reference_lookup_1 (&vr1, &result))
1613 return result;
1614 if (TREE_CODE (value) == SSA_NAME)
1615 value_id = VN_INFO (value)->value_id;
1616 else
1617 value_id = get_or_alloc_constant_value_id (value);
1618 return vn_reference_insert_pieces (vuse, set, type,
1619 operands.copy (), value, value_id);
1622 static vn_nary_op_t vn_nary_op_insert_stmt (gimple *, tree);
1623 static unsigned int vn_nary_length_from_stmt (gimple *);
1624 static vn_nary_op_t alloc_vn_nary_op_noinit (unsigned int, obstack *);
1625 static vn_nary_op_t vn_nary_op_insert_into (vn_nary_op_t,
1626 vn_nary_op_table_type *, bool);
1627 static void init_vn_nary_op_from_stmt (vn_nary_op_t, gimple *);
1629 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
1631 static tree
1632 vn_lookup_simplify_result (gimple_match_op *res_op)
1634 if (!res_op->code.is_tree_code ())
1635 return NULL_TREE;
1636 tree *ops = res_op->ops;
1637 unsigned int length = res_op->num_ops;
1638 if (res_op->code == CONSTRUCTOR
1639 /* ??? We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
1640 and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree. */
1641 && TREE_CODE (res_op->ops[0]) == CONSTRUCTOR)
1643 length = CONSTRUCTOR_NELTS (res_op->ops[0]);
1644 ops = XALLOCAVEC (tree, length);
1645 for (unsigned i = 0; i < length; ++i)
1646 ops[i] = CONSTRUCTOR_ELT (res_op->ops[0], i)->value;
1648 vn_nary_op_t vnresult = NULL;
1649 return vn_nary_op_lookup_pieces (length, (tree_code) res_op->code,
1650 res_op->type, ops, &vnresult);
1653 /* Return a value-number for RCODE OPS... either by looking up an existing
1654 value-number for the simplified result or by inserting the operation if
1655 INSERT is true. */
1657 static tree
1658 vn_nary_build_or_lookup_1 (gimple_match_op *res_op, bool insert)
1660 tree result = NULL_TREE;
1661 /* We will be creating a value number for
1662 RCODE (OPS...).
1663 So first simplify and lookup this expression to see if it
1664 is already available. */
1665 mprts_hook = vn_lookup_simplify_result;
1666 bool res = false;
1667 switch (TREE_CODE_LENGTH ((tree_code) res_op->code))
1669 case 1:
1670 res = gimple_resimplify1 (NULL, res_op, vn_valueize);
1671 break;
1672 case 2:
1673 res = gimple_resimplify2 (NULL, res_op, vn_valueize);
1674 break;
1675 case 3:
1676 res = gimple_resimplify3 (NULL, res_op, vn_valueize);
1677 break;
1679 mprts_hook = NULL;
1680 gimple *new_stmt = NULL;
1681 if (res
1682 && gimple_simplified_result_is_gimple_val (res_op))
1683 /* The expression is already available. */
1684 result = res_op->ops[0];
1685 else
1687 tree val = vn_lookup_simplify_result (res_op);
1688 if (!val && insert)
1690 gimple_seq stmts = NULL;
1691 result = maybe_push_res_to_seq (res_op, &stmts);
1692 if (result)
1694 gcc_assert (gimple_seq_singleton_p (stmts));
1695 new_stmt = gimple_seq_first_stmt (stmts);
1698 else
1699 /* The expression is already available. */
1700 result = val;
1702 if (new_stmt)
1704 /* The expression is not yet available, value-number lhs to
1705 the new SSA_NAME we created. */
1706 /* Initialize value-number information properly. */
1707 VN_INFO_GET (result)->valnum = result;
1708 VN_INFO (result)->value_id = get_next_value_id ();
1709 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
1710 new_stmt);
1711 VN_INFO (result)->needs_insertion = true;
1712 /* ??? PRE phi-translation inserts NARYs without corresponding
1713 SSA name result. Re-use those but set their result according
1714 to the stmt we just built. */
1715 vn_nary_op_t nary = NULL;
1716 vn_nary_op_lookup_stmt (new_stmt, &nary);
1717 if (nary)
1719 gcc_assert (nary->result == NULL_TREE);
1720 nary->result = gimple_assign_lhs (new_stmt);
1722 /* As all "inserted" statements are singleton SCCs, insert
1723 to the valid table. This is strictly needed to
1724 avoid re-generating new value SSA_NAMEs for the same
1725 expression during SCC iteration over and over (the
1726 optimistic table gets cleared after each iteration).
1727 We do not need to insert into the optimistic table, as
1728 lookups there will fall back to the valid table. */
1729 else
1731 unsigned int length = vn_nary_length_from_stmt (new_stmt);
1732 vn_nary_op_t vno1
1733 = alloc_vn_nary_op_noinit (length, &vn_tables_insert_obstack);
1734 vno1->value_id = VN_INFO (result)->value_id;
1735 vno1->length = length;
1736 vno1->result = result;
1737 init_vn_nary_op_from_stmt (vno1, new_stmt);
1738 vn_nary_op_insert_into (vno1, valid_info->nary, true);
1739 /* Also do not link it into the undo chain. */
1740 last_inserted_nary = vno1->next;
1741 vno1->next = (vn_nary_op_t)(void *)-1;
1743 if (dump_file && (dump_flags & TDF_DETAILS))
1745 fprintf (dump_file, "Inserting name ");
1746 print_generic_expr (dump_file, result);
1747 fprintf (dump_file, " for expression ");
1748 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
1749 fprintf (dump_file, "\n");
1752 return result;
1755 /* Return a value-number for RCODE OPS... either by looking up an existing
1756 value-number for the simplified result or by inserting the operation. */
1758 static tree
1759 vn_nary_build_or_lookup (gimple_match_op *res_op)
1761 return vn_nary_build_or_lookup_1 (res_op, true);
1764 /* Try to simplify the expression RCODE OPS... of type TYPE and return
1765 its value if present. */
1767 tree
1768 vn_nary_simplify (vn_nary_op_t nary)
1770 if (nary->length > gimple_match_op::MAX_NUM_OPS)
1771 return NULL_TREE;
1772 gimple_match_op op (gimple_match_cond::UNCOND, nary->opcode,
1773 nary->type, nary->length);
1774 memcpy (op.ops, nary->op, sizeof (tree) * nary->length);
1775 return vn_nary_build_or_lookup_1 (&op, false);
1779 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1780 from the statement defining VUSE and if not successful tries to
1781 translate *REFP and VR_ through an aggregate copy at the definition
1782 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
1783 of *REF and *VR. If only disambiguation was performed then
1784 *DISAMBIGUATE_ONLY is set to true. */
1786 static void *
1787 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1788 bool *disambiguate_only)
1790 vn_reference_t vr = (vn_reference_t)vr_;
1791 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
1792 tree base = ao_ref_base (ref);
1793 HOST_WIDE_INT offseti, maxsizei;
1794 static vec<vn_reference_op_s> lhs_ops;
1795 ao_ref lhs_ref;
1796 bool lhs_ref_ok = false;
1797 poly_int64 copy_size;
1799 /* If the reference is based on a parameter that was determined as
1800 pointing to readonly memory it doesn't change. */
1801 if (TREE_CODE (base) == MEM_REF
1802 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
1803 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
1804 && bitmap_bit_p (const_parms,
1805 SSA_NAME_VERSION (TREE_OPERAND (base, 0))))
1807 *disambiguate_only = true;
1808 return NULL;
1811 /* First try to disambiguate after value-replacing in the definitions LHS. */
1812 if (is_gimple_assign (def_stmt))
1814 tree lhs = gimple_assign_lhs (def_stmt);
1815 bool valueized_anything = false;
1816 /* Avoid re-allocation overhead. */
1817 lhs_ops.truncate (0);
1818 copy_reference_ops_from_ref (lhs, &lhs_ops);
1819 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1820 if (valueized_anything)
1822 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1823 get_alias_set (lhs),
1824 TREE_TYPE (lhs), lhs_ops);
1825 if (lhs_ref_ok
1826 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1828 *disambiguate_only = true;
1829 return NULL;
1832 else
1834 ao_ref_init (&lhs_ref, lhs);
1835 lhs_ref_ok = true;
1838 /* If we reach a clobbering statement try to skip it and see if
1839 we find a VN result with exactly the same value as the
1840 possible clobber. In this case we can ignore the clobber
1841 and return the found value.
1842 Note that we don't need to worry about partial overlapping
1843 accesses as we then can use TBAA to disambiguate against the
1844 clobbering statement when looking up a load (thus the
1845 VN_WALKREWRITE guard). */
1846 if (vn_walk_kind == VN_WALKREWRITE
1847 && is_gimple_reg_type (TREE_TYPE (lhs))
1848 && types_compatible_p (TREE_TYPE (lhs), vr->type))
1850 tree *saved_last_vuse_ptr = last_vuse_ptr;
1851 /* Do not update last_vuse_ptr in vn_reference_lookup_2. */
1852 last_vuse_ptr = NULL;
1853 tree saved_vuse = vr->vuse;
1854 hashval_t saved_hashcode = vr->hashcode;
1855 void *res = vn_reference_lookup_2 (ref,
1856 gimple_vuse (def_stmt), 0, vr);
1857 /* Need to restore vr->vuse and vr->hashcode. */
1858 vr->vuse = saved_vuse;
1859 vr->hashcode = saved_hashcode;
1860 last_vuse_ptr = saved_last_vuse_ptr;
1861 if (res && res != (void *)-1)
1863 vn_reference_t vnresult = (vn_reference_t) res;
1864 if (vnresult->result
1865 && operand_equal_p (vnresult->result,
1866 gimple_assign_rhs1 (def_stmt), 0))
1867 return res;
1871 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1872 && gimple_call_num_args (def_stmt) <= 4)
1874 /* For builtin calls valueize its arguments and call the
1875 alias oracle again. Valueization may improve points-to
1876 info of pointers and constify size and position arguments.
1877 Originally this was motivated by PR61034 which has
1878 conditional calls to free falsely clobbering ref because
1879 of imprecise points-to info of the argument. */
1880 tree oldargs[4];
1881 bool valueized_anything = false;
1882 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1884 oldargs[i] = gimple_call_arg (def_stmt, i);
1885 tree val = vn_valueize (oldargs[i]);
1886 if (val != oldargs[i])
1888 gimple_call_set_arg (def_stmt, i, val);
1889 valueized_anything = true;
1892 if (valueized_anything)
1894 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1895 ref);
1896 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1897 gimple_call_set_arg (def_stmt, i, oldargs[i]);
1898 if (!res)
1900 *disambiguate_only = true;
1901 return NULL;
1906 if (*disambiguate_only)
1907 return (void *)-1;
1909 /* If we cannot constrain the size of the reference we cannot
1910 test if anything kills it. */
1911 if (!ref->max_size_known_p ())
1912 return (void *)-1;
1914 poly_int64 offset = ref->offset;
1915 poly_int64 maxsize = ref->max_size;
1917 /* We can't deduce anything useful from clobbers. */
1918 if (gimple_clobber_p (def_stmt))
1919 return (void *)-1;
1921 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1922 from that definition.
1923 1) Memset. */
1924 if (is_gimple_reg_type (vr->type)
1925 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1926 && (integer_zerop (gimple_call_arg (def_stmt, 1))
1927 || ((TREE_CODE (gimple_call_arg (def_stmt, 1)) == INTEGER_CST
1928 || (INTEGRAL_TYPE_P (vr->type) && known_eq (ref->size, 8)))
1929 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1930 && offset.is_constant (&offseti)
1931 && offseti % BITS_PER_UNIT == 0))
1932 && poly_int_tree_p (gimple_call_arg (def_stmt, 2))
1933 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
1934 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME))
1936 tree base2;
1937 poly_int64 offset2, size2, maxsize2;
1938 bool reverse;
1939 tree ref2 = gimple_call_arg (def_stmt, 0);
1940 if (TREE_CODE (ref2) == SSA_NAME)
1942 ref2 = SSA_VAL (ref2);
1943 if (TREE_CODE (ref2) == SSA_NAME
1944 && (TREE_CODE (base) != MEM_REF
1945 || TREE_OPERAND (base, 0) != ref2))
1947 gimple *def_stmt = SSA_NAME_DEF_STMT (ref2);
1948 if (gimple_assign_single_p (def_stmt)
1949 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
1950 ref2 = gimple_assign_rhs1 (def_stmt);
1953 if (TREE_CODE (ref2) == ADDR_EXPR)
1955 ref2 = TREE_OPERAND (ref2, 0);
1956 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
1957 &reverse);
1958 if (!known_size_p (maxsize2)
1959 || !known_eq (maxsize2, size2)
1960 || !operand_equal_p (base, base2, OEP_ADDRESS_OF))
1961 return (void *)-1;
1963 else if (TREE_CODE (ref2) == SSA_NAME)
1965 poly_int64 soff;
1966 if (TREE_CODE (base) != MEM_REF
1967 || !(mem_ref_offset (base) << LOG2_BITS_PER_UNIT).to_shwi (&soff))
1968 return (void *)-1;
1969 offset += soff;
1970 offset2 = 0;
1971 if (TREE_OPERAND (base, 0) != ref2)
1973 gimple *def = SSA_NAME_DEF_STMT (ref2);
1974 if (is_gimple_assign (def)
1975 && gimple_assign_rhs_code (def) == POINTER_PLUS_EXPR
1976 && gimple_assign_rhs1 (def) == TREE_OPERAND (base, 0)
1977 && poly_int_tree_p (gimple_assign_rhs2 (def))
1978 && (wi::to_poly_offset (gimple_assign_rhs2 (def))
1979 << LOG2_BITS_PER_UNIT).to_shwi (&offset2))
1981 ref2 = gimple_assign_rhs1 (def);
1982 if (TREE_CODE (ref2) == SSA_NAME)
1983 ref2 = SSA_VAL (ref2);
1985 else
1986 return (void *)-1;
1989 else
1990 return (void *)-1;
1991 tree len = gimple_call_arg (def_stmt, 2);
1992 if (known_subrange_p (offset, maxsize, offset2,
1993 wi::to_poly_offset (len) << LOG2_BITS_PER_UNIT))
1995 tree val;
1996 if (integer_zerop (gimple_call_arg (def_stmt, 1)))
1997 val = build_zero_cst (vr->type);
1998 else if (INTEGRAL_TYPE_P (vr->type)
1999 && known_eq (ref->size, 8))
2001 gimple_match_op res_op (gimple_match_cond::UNCOND, NOP_EXPR,
2002 vr->type, gimple_call_arg (def_stmt, 1));
2003 val = vn_nary_build_or_lookup (&res_op);
2004 if (!val
2005 || (TREE_CODE (val) == SSA_NAME
2006 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2007 return (void *)-1;
2009 else
2011 unsigned len = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (vr->type));
2012 unsigned char *buf = XALLOCAVEC (unsigned char, len);
2013 memset (buf, TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 1)),
2014 len);
2015 val = native_interpret_expr (vr->type, buf, len);
2016 if (!val)
2017 return (void *)-1;
2019 return vn_reference_lookup_or_insert_for_pieces
2020 (vuse, vr->set, vr->type, vr->operands, val);
2024 /* 2) Assignment from an empty CONSTRUCTOR. */
2025 else if (is_gimple_reg_type (vr->type)
2026 && gimple_assign_single_p (def_stmt)
2027 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
2028 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
2030 tree base2;
2031 poly_int64 offset2, size2, maxsize2;
2032 bool reverse;
2033 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2034 &offset2, &size2, &maxsize2, &reverse);
2035 if (known_size_p (maxsize2)
2036 && operand_equal_p (base, base2, 0)
2037 && known_subrange_p (offset, maxsize, offset2, size2))
2039 tree val = build_zero_cst (vr->type);
2040 return vn_reference_lookup_or_insert_for_pieces
2041 (vuse, vr->set, vr->type, vr->operands, val);
2045 /* 3) Assignment from a constant. We can use folds native encode/interpret
2046 routines to extract the assigned bits. */
2047 else if (known_eq (ref->size, maxsize)
2048 && is_gimple_reg_type (vr->type)
2049 && !contains_storage_order_barrier_p (vr->operands)
2050 && gimple_assign_single_p (def_stmt)
2051 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
2052 /* native_encode and native_decode operate on arrays of bytes
2053 and so fundamentally need a compile-time size and offset. */
2054 && maxsize.is_constant (&maxsizei)
2055 && maxsizei % BITS_PER_UNIT == 0
2056 && offset.is_constant (&offseti)
2057 && offseti % BITS_PER_UNIT == 0
2058 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
2059 || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
2060 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
2062 tree base2;
2063 HOST_WIDE_INT offset2, size2;
2064 bool reverse;
2065 base2 = get_ref_base_and_extent_hwi (gimple_assign_lhs (def_stmt),
2066 &offset2, &size2, &reverse);
2067 if (base2
2068 && !reverse
2069 && size2 % BITS_PER_UNIT == 0
2070 && offset2 % BITS_PER_UNIT == 0
2071 && operand_equal_p (base, base2, 0)
2072 && known_subrange_p (offseti, maxsizei, offset2, size2))
2074 /* We support up to 512-bit values (for V8DFmode). */
2075 unsigned char buffer[64];
2076 int len;
2078 tree rhs = gimple_assign_rhs1 (def_stmt);
2079 if (TREE_CODE (rhs) == SSA_NAME)
2080 rhs = SSA_VAL (rhs);
2081 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
2082 buffer, sizeof (buffer),
2083 (offseti - offset2) / BITS_PER_UNIT);
2084 if (len > 0 && len * BITS_PER_UNIT >= maxsizei)
2086 tree type = vr->type;
2087 /* Make sure to interpret in a type that has a range
2088 covering the whole access size. */
2089 if (INTEGRAL_TYPE_P (vr->type)
2090 && maxsizei != TYPE_PRECISION (vr->type))
2091 type = build_nonstandard_integer_type (maxsizei,
2092 TYPE_UNSIGNED (type));
2093 tree val = native_interpret_expr (type, buffer,
2094 maxsizei / BITS_PER_UNIT);
2095 /* If we chop off bits because the types precision doesn't
2096 match the memory access size this is ok when optimizing
2097 reads but not when called from the DSE code during
2098 elimination. */
2099 if (val
2100 && type != vr->type)
2102 if (! int_fits_type_p (val, vr->type))
2103 val = NULL_TREE;
2104 else
2105 val = fold_convert (vr->type, val);
2108 if (val)
2109 return vn_reference_lookup_or_insert_for_pieces
2110 (vuse, vr->set, vr->type, vr->operands, val);
2115 /* 4) Assignment from an SSA name which definition we may be able
2116 to access pieces from. */
2117 else if (known_eq (ref->size, maxsize)
2118 && is_gimple_reg_type (vr->type)
2119 && !contains_storage_order_barrier_p (vr->operands)
2120 && gimple_assign_single_p (def_stmt)
2121 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
2123 tree base2;
2124 poly_int64 offset2, size2, maxsize2;
2125 bool reverse;
2126 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2127 &offset2, &size2, &maxsize2,
2128 &reverse);
2129 if (!reverse
2130 && known_size_p (maxsize2)
2131 && known_eq (maxsize2, size2)
2132 && operand_equal_p (base, base2, 0)
2133 && known_subrange_p (offset, maxsize, offset2, size2)
2134 /* ??? We can't handle bitfield precision extracts without
2135 either using an alternate type for the BIT_FIELD_REF and
2136 then doing a conversion or possibly adjusting the offset
2137 according to endianness. */
2138 && (! INTEGRAL_TYPE_P (vr->type)
2139 || known_eq (ref->size, TYPE_PRECISION (vr->type)))
2140 && multiple_p (ref->size, BITS_PER_UNIT))
2142 gimple_match_op op (gimple_match_cond::UNCOND,
2143 BIT_FIELD_REF, vr->type,
2144 SSA_VAL (gimple_assign_rhs1 (def_stmt)),
2145 bitsize_int (ref->size),
2146 bitsize_int (offset - offset2));
2147 tree val = vn_nary_build_or_lookup (&op);
2148 if (val
2149 && (TREE_CODE (val) != SSA_NAME
2150 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2152 vn_reference_t res = vn_reference_lookup_or_insert_for_pieces
2153 (vuse, vr->set, vr->type, vr->operands, val);
2154 return res;
2159 /* 5) For aggregate copies translate the reference through them if
2160 the copy kills ref. */
2161 else if (vn_walk_kind == VN_WALKREWRITE
2162 && gimple_assign_single_p (def_stmt)
2163 && (DECL_P (gimple_assign_rhs1 (def_stmt))
2164 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
2165 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
2167 tree base2;
2168 int i, j, k;
2169 auto_vec<vn_reference_op_s> rhs;
2170 vn_reference_op_t vro;
2171 ao_ref r;
2173 if (!lhs_ref_ok)
2174 return (void *)-1;
2176 /* See if the assignment kills REF. */
2177 base2 = ao_ref_base (&lhs_ref);
2178 if (!lhs_ref.max_size_known_p ()
2179 || (base != base2
2180 && (TREE_CODE (base) != MEM_REF
2181 || TREE_CODE (base2) != MEM_REF
2182 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
2183 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
2184 TREE_OPERAND (base2, 1))))
2185 || !stmt_kills_ref_p (def_stmt, ref))
2186 return (void *)-1;
2188 /* Find the common base of ref and the lhs. lhs_ops already
2189 contains valueized operands for the lhs. */
2190 i = vr->operands.length () - 1;
2191 j = lhs_ops.length () - 1;
2192 while (j >= 0 && i >= 0
2193 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
2195 i--;
2196 j--;
2199 /* ??? The innermost op should always be a MEM_REF and we already
2200 checked that the assignment to the lhs kills vr. Thus for
2201 aggregate copies using char[] types the vn_reference_op_eq
2202 may fail when comparing types for compatibility. But we really
2203 don't care here - further lookups with the rewritten operands
2204 will simply fail if we messed up types too badly. */
2205 poly_int64 extra_off = 0;
2206 if (j == 0 && i >= 0
2207 && lhs_ops[0].opcode == MEM_REF
2208 && maybe_ne (lhs_ops[0].off, -1))
2210 if (known_eq (lhs_ops[0].off, vr->operands[i].off))
2211 i--, j--;
2212 else if (vr->operands[i].opcode == MEM_REF
2213 && maybe_ne (vr->operands[i].off, -1))
2215 extra_off = vr->operands[i].off - lhs_ops[0].off;
2216 i--, j--;
2220 /* i now points to the first additional op.
2221 ??? LHS may not be completely contained in VR, one or more
2222 VIEW_CONVERT_EXPRs could be in its way. We could at least
2223 try handling outermost VIEW_CONVERT_EXPRs. */
2224 if (j != -1)
2225 return (void *)-1;
2227 /* Punt if the additional ops contain a storage order barrier. */
2228 for (k = i; k >= 0; k--)
2230 vro = &vr->operands[k];
2231 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
2232 return (void *)-1;
2235 /* Now re-write REF to be based on the rhs of the assignment. */
2236 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
2238 /* Apply an extra offset to the inner MEM_REF of the RHS. */
2239 if (maybe_ne (extra_off, 0))
2241 if (rhs.length () < 2)
2242 return (void *)-1;
2243 int ix = rhs.length () - 2;
2244 if (rhs[ix].opcode != MEM_REF
2245 || known_eq (rhs[ix].off, -1))
2246 return (void *)-1;
2247 rhs[ix].off += extra_off;
2248 rhs[ix].op0 = int_const_binop (PLUS_EXPR, rhs[ix].op0,
2249 build_int_cst (TREE_TYPE (rhs[ix].op0),
2250 extra_off));
2253 /* We need to pre-pend vr->operands[0..i] to rhs. */
2254 vec<vn_reference_op_s> old = vr->operands;
2255 if (i + 1 + rhs.length () > vr->operands.length ())
2256 vr->operands.safe_grow (i + 1 + rhs.length ());
2257 else
2258 vr->operands.truncate (i + 1 + rhs.length ());
2259 FOR_EACH_VEC_ELT (rhs, j, vro)
2260 vr->operands[i + 1 + j] = *vro;
2261 vr->operands = valueize_refs (vr->operands);
2262 if (old == shared_lookup_references)
2263 shared_lookup_references = vr->operands;
2264 vr->hashcode = vn_reference_compute_hash (vr);
2266 /* Try folding the new reference to a constant. */
2267 tree val = fully_constant_vn_reference_p (vr);
2268 if (val)
2269 return vn_reference_lookup_or_insert_for_pieces
2270 (vuse, vr->set, vr->type, vr->operands, val);
2272 /* Adjust *ref from the new operands. */
2273 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2274 return (void *)-1;
2275 /* This can happen with bitfields. */
2276 if (maybe_ne (ref->size, r.size))
2277 return (void *)-1;
2278 *ref = r;
2280 /* Do not update last seen VUSE after translating. */
2281 last_vuse_ptr = NULL;
2283 /* Keep looking for the adjusted *REF / VR pair. */
2284 return NULL;
2287 /* 6) For memcpy copies translate the reference through them if
2288 the copy kills ref. */
2289 else if (vn_walk_kind == VN_WALKREWRITE
2290 && is_gimple_reg_type (vr->type)
2291 /* ??? Handle BCOPY as well. */
2292 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2293 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2294 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2295 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2296 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2297 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2298 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2299 && poly_int_tree_p (gimple_call_arg (def_stmt, 2), &copy_size))
2301 tree lhs, rhs;
2302 ao_ref r;
2303 poly_int64 rhs_offset, lhs_offset;
2304 vn_reference_op_s op;
2305 poly_uint64 mem_offset;
2306 poly_int64 at, byte_maxsize;
2308 /* Only handle non-variable, addressable refs. */
2309 if (maybe_ne (ref->size, maxsize)
2310 || !multiple_p (offset, BITS_PER_UNIT, &at)
2311 || !multiple_p (maxsize, BITS_PER_UNIT, &byte_maxsize))
2312 return (void *)-1;
2314 /* Extract a pointer base and an offset for the destination. */
2315 lhs = gimple_call_arg (def_stmt, 0);
2316 lhs_offset = 0;
2317 if (TREE_CODE (lhs) == SSA_NAME)
2319 lhs = SSA_VAL (lhs);
2320 if (TREE_CODE (lhs) == SSA_NAME)
2322 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
2323 if (gimple_assign_single_p (def_stmt)
2324 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2325 lhs = gimple_assign_rhs1 (def_stmt);
2328 if (TREE_CODE (lhs) == ADDR_EXPR)
2330 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2331 &lhs_offset);
2332 if (!tem)
2333 return (void *)-1;
2334 if (TREE_CODE (tem) == MEM_REF
2335 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
2337 lhs = TREE_OPERAND (tem, 0);
2338 if (TREE_CODE (lhs) == SSA_NAME)
2339 lhs = SSA_VAL (lhs);
2340 lhs_offset += mem_offset;
2342 else if (DECL_P (tem))
2343 lhs = build_fold_addr_expr (tem);
2344 else
2345 return (void *)-1;
2347 if (TREE_CODE (lhs) != SSA_NAME
2348 && TREE_CODE (lhs) != ADDR_EXPR)
2349 return (void *)-1;
2351 /* Extract a pointer base and an offset for the source. */
2352 rhs = gimple_call_arg (def_stmt, 1);
2353 rhs_offset = 0;
2354 if (TREE_CODE (rhs) == SSA_NAME)
2355 rhs = SSA_VAL (rhs);
2356 if (TREE_CODE (rhs) == ADDR_EXPR)
2358 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2359 &rhs_offset);
2360 if (!tem)
2361 return (void *)-1;
2362 if (TREE_CODE (tem) == MEM_REF
2363 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
2365 rhs = TREE_OPERAND (tem, 0);
2366 rhs_offset += mem_offset;
2368 else if (DECL_P (tem)
2369 || TREE_CODE (tem) == STRING_CST)
2370 rhs = build_fold_addr_expr (tem);
2371 else
2372 return (void *)-1;
2374 if (TREE_CODE (rhs) != SSA_NAME
2375 && TREE_CODE (rhs) != ADDR_EXPR)
2376 return (void *)-1;
2378 /* The bases of the destination and the references have to agree. */
2379 if (TREE_CODE (base) == MEM_REF)
2381 if (TREE_OPERAND (base, 0) != lhs
2382 || !poly_int_tree_p (TREE_OPERAND (base, 1), &mem_offset))
2383 return (void *) -1;
2384 at += mem_offset;
2386 else if (!DECL_P (base)
2387 || TREE_CODE (lhs) != ADDR_EXPR
2388 || TREE_OPERAND (lhs, 0) != base)
2389 return (void *)-1;
2391 /* If the access is completely outside of the memcpy destination
2392 area there is no aliasing. */
2393 if (!ranges_maybe_overlap_p (lhs_offset, copy_size, at, byte_maxsize))
2394 return NULL;
2395 /* And the access has to be contained within the memcpy destination. */
2396 if (!known_subrange_p (at, byte_maxsize, lhs_offset, copy_size))
2397 return (void *)-1;
2399 /* Make room for 2 operands in the new reference. */
2400 if (vr->operands.length () < 2)
2402 vec<vn_reference_op_s> old = vr->operands;
2403 vr->operands.safe_grow_cleared (2);
2404 if (old == shared_lookup_references)
2405 shared_lookup_references = vr->operands;
2407 else
2408 vr->operands.truncate (2);
2410 /* The looked-through reference is a simple MEM_REF. */
2411 memset (&op, 0, sizeof (op));
2412 op.type = vr->type;
2413 op.opcode = MEM_REF;
2414 op.op0 = build_int_cst (ptr_type_node, at - lhs_offset + rhs_offset);
2415 op.off = at - lhs_offset + rhs_offset;
2416 vr->operands[0] = op;
2417 op.type = TREE_TYPE (rhs);
2418 op.opcode = TREE_CODE (rhs);
2419 op.op0 = rhs;
2420 op.off = -1;
2421 vr->operands[1] = op;
2422 vr->hashcode = vn_reference_compute_hash (vr);
2424 /* Try folding the new reference to a constant. */
2425 tree val = fully_constant_vn_reference_p (vr);
2426 if (val)
2427 return vn_reference_lookup_or_insert_for_pieces
2428 (vuse, vr->set, vr->type, vr->operands, val);
2430 /* Adjust *ref from the new operands. */
2431 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2432 return (void *)-1;
2433 /* This can happen with bitfields. */
2434 if (maybe_ne (ref->size, r.size))
2435 return (void *)-1;
2436 *ref = r;
2438 /* Do not update last seen VUSE after translating. */
2439 last_vuse_ptr = NULL;
2441 /* Keep looking for the adjusted *REF / VR pair. */
2442 return NULL;
2445 /* Bail out and stop walking. */
2446 return (void *)-1;
2449 /* Return a reference op vector from OP that can be used for
2450 vn_reference_lookup_pieces. The caller is responsible for releasing
2451 the vector. */
2453 vec<vn_reference_op_s>
2454 vn_reference_operands_for_lookup (tree op)
2456 bool valueized;
2457 return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
2460 /* Lookup a reference operation by it's parts, in the current hash table.
2461 Returns the resulting value number if it exists in the hash table,
2462 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2463 vn_reference_t stored in the hashtable if something is found. */
2465 tree
2466 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2467 vec<vn_reference_op_s> operands,
2468 vn_reference_t *vnresult, vn_lookup_kind kind)
2470 struct vn_reference_s vr1;
2471 vn_reference_t tmp;
2472 tree cst;
2474 if (!vnresult)
2475 vnresult = &tmp;
2476 *vnresult = NULL;
2478 vr1.vuse = vuse_ssa_val (vuse);
2479 shared_lookup_references.truncate (0);
2480 shared_lookup_references.safe_grow (operands.length ());
2481 memcpy (shared_lookup_references.address (),
2482 operands.address (),
2483 sizeof (vn_reference_op_s)
2484 * operands.length ());
2485 vr1.operands = operands = shared_lookup_references
2486 = valueize_refs (shared_lookup_references);
2487 vr1.type = type;
2488 vr1.set = set;
2489 vr1.hashcode = vn_reference_compute_hash (&vr1);
2490 if ((cst = fully_constant_vn_reference_p (&vr1)))
2491 return cst;
2493 vn_reference_lookup_1 (&vr1, vnresult);
2494 if (!*vnresult
2495 && kind != VN_NOWALK
2496 && vr1.vuse)
2498 ao_ref r;
2499 vn_walk_kind = kind;
2500 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2501 *vnresult =
2502 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2503 vn_reference_lookup_2,
2504 vn_reference_lookup_3,
2505 vuse_ssa_val, &vr1);
2506 gcc_checking_assert (vr1.operands == shared_lookup_references);
2509 if (*vnresult)
2510 return (*vnresult)->result;
2512 return NULL_TREE;
2515 /* Lookup OP in the current hash table, and return the resulting value
2516 number if it exists in the hash table. Return NULL_TREE if it does
2517 not exist in the hash table or if the result field of the structure
2518 was NULL.. VNRESULT will be filled in with the vn_reference_t
2519 stored in the hashtable if one exists. When TBAA_P is false assume
2520 we are looking up a store and treat it as having alias-set zero. */
2522 tree
2523 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2524 vn_reference_t *vnresult, bool tbaa_p)
2526 vec<vn_reference_op_s> operands;
2527 struct vn_reference_s vr1;
2528 tree cst;
2529 bool valuezied_anything;
2531 if (vnresult)
2532 *vnresult = NULL;
2534 vr1.vuse = vuse_ssa_val (vuse);
2535 vr1.operands = operands
2536 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2537 vr1.type = TREE_TYPE (op);
2538 vr1.set = tbaa_p ? get_alias_set (op) : 0;
2539 vr1.hashcode = vn_reference_compute_hash (&vr1);
2540 if ((cst = fully_constant_vn_reference_p (&vr1)))
2541 return cst;
2543 if (kind != VN_NOWALK
2544 && vr1.vuse)
2546 vn_reference_t wvnresult;
2547 ao_ref r;
2548 /* Make sure to use a valueized reference if we valueized anything.
2549 Otherwise preserve the full reference for advanced TBAA. */
2550 if (!valuezied_anything
2551 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2552 vr1.operands))
2553 ao_ref_init (&r, op);
2554 if (! tbaa_p)
2555 r.ref_alias_set = r.base_alias_set = 0;
2556 vn_walk_kind = kind;
2557 wvnresult =
2558 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2559 vn_reference_lookup_2,
2560 vn_reference_lookup_3,
2561 vuse_ssa_val, &vr1);
2562 gcc_checking_assert (vr1.operands == shared_lookup_references);
2563 if (wvnresult)
2565 if (vnresult)
2566 *vnresult = wvnresult;
2567 return wvnresult->result;
2570 return NULL_TREE;
2573 return vn_reference_lookup_1 (&vr1, vnresult);
2576 /* Lookup CALL in the current hash table and return the entry in
2577 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2579 void
2580 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2581 vn_reference_t vr)
2583 if (vnresult)
2584 *vnresult = NULL;
2586 tree vuse = gimple_vuse (call);
2588 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2589 vr->operands = valueize_shared_reference_ops_from_call (call);
2590 vr->type = gimple_expr_type (call);
2591 vr->set = 0;
2592 vr->hashcode = vn_reference_compute_hash (vr);
2593 vn_reference_lookup_1 (vr, vnresult);
2596 /* Insert OP into the current hash table with a value number of
2597 RESULT, and return the resulting reference structure we created. */
2599 static vn_reference_t
2600 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2602 vn_reference_s **slot;
2603 vn_reference_t vr1;
2604 bool tem;
2606 vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
2607 if (TREE_CODE (result) == SSA_NAME)
2608 vr1->value_id = VN_INFO (result)->value_id;
2609 else
2610 vr1->value_id = get_or_alloc_constant_value_id (result);
2611 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2612 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2613 vr1->type = TREE_TYPE (op);
2614 vr1->set = get_alias_set (op);
2615 vr1->hashcode = vn_reference_compute_hash (vr1);
2616 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2617 vr1->result_vdef = vdef;
2619 slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2620 INSERT);
2622 /* Because we lookup stores using vuses, and value number failures
2623 using the vdefs (see visit_reference_op_store for how and why),
2624 it's possible that on failure we may try to insert an already
2625 inserted store. This is not wrong, there is no ssa name for a
2626 store that we could use as a differentiator anyway. Thus, unlike
2627 the other lookup functions, you cannot gcc_assert (!*slot)
2628 here. */
2630 /* But free the old slot in case of a collision. */
2631 if (*slot)
2632 free_reference (*slot);
2634 *slot = vr1;
2635 vr1->next = last_inserted_ref;
2636 last_inserted_ref = vr1;
2637 return vr1;
2640 /* Insert a reference by it's pieces into the current hash table with
2641 a value number of RESULT. Return the resulting reference
2642 structure we created. */
2644 vn_reference_t
2645 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2646 vec<vn_reference_op_s> operands,
2647 tree result, unsigned int value_id)
2650 vn_reference_s **slot;
2651 vn_reference_t vr1;
2653 vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
2654 vr1->value_id = value_id;
2655 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2656 vr1->operands = valueize_refs (operands);
2657 vr1->type = type;
2658 vr1->set = set;
2659 vr1->hashcode = vn_reference_compute_hash (vr1);
2660 if (result && TREE_CODE (result) == SSA_NAME)
2661 result = SSA_VAL (result);
2662 vr1->result = result;
2664 slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2665 INSERT);
2667 /* At this point we should have all the things inserted that we have
2668 seen before, and we should never try inserting something that
2669 already exists. */
2670 gcc_assert (!*slot);
2671 if (*slot)
2672 free_reference (*slot);
2674 *slot = vr1;
2675 vr1->next = last_inserted_ref;
2676 last_inserted_ref = vr1;
2677 return vr1;
2680 /* Compute and return the hash value for nary operation VBO1. */
2682 static hashval_t
2683 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2685 inchash::hash hstate;
2686 unsigned i;
2688 for (i = 0; i < vno1->length; ++i)
2689 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2690 vno1->op[i] = SSA_VAL (vno1->op[i]);
2692 if (((vno1->length == 2
2693 && commutative_tree_code (vno1->opcode))
2694 || (vno1->length == 3
2695 && commutative_ternary_tree_code (vno1->opcode)))
2696 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2697 std::swap (vno1->op[0], vno1->op[1]);
2698 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
2699 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2701 std::swap (vno1->op[0], vno1->op[1]);
2702 vno1->opcode = swap_tree_comparison (vno1->opcode);
2705 hstate.add_int (vno1->opcode);
2706 for (i = 0; i < vno1->length; ++i)
2707 inchash::add_expr (vno1->op[i], hstate);
2709 return hstate.end ();
2712 /* Compare nary operations VNO1 and VNO2 and return true if they are
2713 equivalent. */
2715 bool
2716 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2718 unsigned i;
2720 if (vno1->hashcode != vno2->hashcode)
2721 return false;
2723 if (vno1->length != vno2->length)
2724 return false;
2726 if (vno1->opcode != vno2->opcode
2727 || !types_compatible_p (vno1->type, vno2->type))
2728 return false;
2730 for (i = 0; i < vno1->length; ++i)
2731 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2732 return false;
2734 /* BIT_INSERT_EXPR has an implict operand as the type precision
2735 of op1. Need to check to make sure they are the same. */
2736 if (vno1->opcode == BIT_INSERT_EXPR
2737 && TREE_CODE (vno1->op[1]) == INTEGER_CST
2738 && TYPE_PRECISION (TREE_TYPE (vno1->op[1]))
2739 != TYPE_PRECISION (TREE_TYPE (vno2->op[1])))
2740 return false;
2742 return true;
2745 /* Initialize VNO from the pieces provided. */
2747 static void
2748 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2749 enum tree_code code, tree type, tree *ops)
2751 vno->opcode = code;
2752 vno->length = length;
2753 vno->type = type;
2754 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2757 /* Initialize VNO from OP. */
2759 static void
2760 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2762 unsigned i;
2764 vno->opcode = TREE_CODE (op);
2765 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2766 vno->type = TREE_TYPE (op);
2767 for (i = 0; i < vno->length; ++i)
2768 vno->op[i] = TREE_OPERAND (op, i);
2771 /* Return the number of operands for a vn_nary ops structure from STMT. */
2773 static unsigned int
2774 vn_nary_length_from_stmt (gimple *stmt)
2776 switch (gimple_assign_rhs_code (stmt))
2778 case REALPART_EXPR:
2779 case IMAGPART_EXPR:
2780 case VIEW_CONVERT_EXPR:
2781 return 1;
2783 case BIT_FIELD_REF:
2784 return 3;
2786 case CONSTRUCTOR:
2787 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2789 default:
2790 return gimple_num_ops (stmt) - 1;
2794 /* Initialize VNO from STMT. */
2796 static void
2797 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
2799 unsigned i;
2801 vno->opcode = gimple_assign_rhs_code (stmt);
2802 vno->type = gimple_expr_type (stmt);
2803 switch (vno->opcode)
2805 case REALPART_EXPR:
2806 case IMAGPART_EXPR:
2807 case VIEW_CONVERT_EXPR:
2808 vno->length = 1;
2809 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2810 break;
2812 case BIT_FIELD_REF:
2813 vno->length = 3;
2814 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2815 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2816 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2817 break;
2819 case CONSTRUCTOR:
2820 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2821 for (i = 0; i < vno->length; ++i)
2822 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2823 break;
2825 default:
2826 gcc_checking_assert (!gimple_assign_single_p (stmt));
2827 vno->length = gimple_num_ops (stmt) - 1;
2828 for (i = 0; i < vno->length; ++i)
2829 vno->op[i] = gimple_op (stmt, i + 1);
2833 /* Compute the hashcode for VNO and look for it in the hash table;
2834 return the resulting value number if it exists in the hash table.
2835 Return NULL_TREE if it does not exist in the hash table or if the
2836 result field of the operation is NULL. VNRESULT will contain the
2837 vn_nary_op_t from the hashtable if it exists. */
2839 static tree
2840 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2842 vn_nary_op_s **slot;
2844 if (vnresult)
2845 *vnresult = NULL;
2847 vno->hashcode = vn_nary_op_compute_hash (vno);
2848 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2849 NO_INSERT);
2850 if (!slot)
2851 return NULL_TREE;
2852 if (vnresult)
2853 *vnresult = *slot;
2854 return (*slot)->result;
2857 /* Lookup a n-ary operation by its pieces and return the resulting value
2858 number if it exists in the hash table. Return NULL_TREE if it does
2859 not exist in the hash table or if the result field of the operation
2860 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2861 if it exists. */
2863 tree
2864 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2865 tree type, tree *ops, vn_nary_op_t *vnresult)
2867 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2868 sizeof_vn_nary_op (length));
2869 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2870 return vn_nary_op_lookup_1 (vno1, vnresult);
2873 /* Lookup OP in the current hash table, and return the resulting value
2874 number if it exists in the hash table. Return NULL_TREE if it does
2875 not exist in the hash table or if the result field of the operation
2876 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2877 if it exists. */
2879 tree
2880 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2882 vn_nary_op_t vno1
2883 = XALLOCAVAR (struct vn_nary_op_s,
2884 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2885 init_vn_nary_op_from_op (vno1, op);
2886 return vn_nary_op_lookup_1 (vno1, vnresult);
2889 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2890 value number if it exists in the hash table. Return NULL_TREE if
2891 it does not exist in the hash table. VNRESULT will contain the
2892 vn_nary_op_t from the hashtable if it exists. */
2894 tree
2895 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
2897 vn_nary_op_t vno1
2898 = XALLOCAVAR (struct vn_nary_op_s,
2899 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2900 init_vn_nary_op_from_stmt (vno1, stmt);
2901 return vn_nary_op_lookup_1 (vno1, vnresult);
2904 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2906 static vn_nary_op_t
2907 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2909 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2912 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2913 obstack. */
2915 static vn_nary_op_t
2916 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2918 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length, &vn_tables_obstack);
2920 vno1->value_id = value_id;
2921 vno1->length = length;
2922 vno1->result = result;
2924 return vno1;
2927 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2928 VNO->HASHCODE first. */
2930 static vn_nary_op_t
2931 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
2932 bool compute_hash)
2934 vn_nary_op_s **slot;
2936 if (compute_hash)
2937 vno->hashcode = vn_nary_op_compute_hash (vno);
2939 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
2940 /* While we do not want to insert things twice it's awkward to
2941 avoid it in the case where visit_nary_op pattern-matches stuff
2942 and ends up simplifying the replacement to itself. We then
2943 get two inserts, one from visit_nary_op and one from
2944 vn_nary_build_or_lookup.
2945 So allow inserts with the same value number. */
2946 if (*slot && (*slot)->result == vno->result)
2947 return *slot;
2949 gcc_assert (!*slot);
2951 *slot = vno;
2952 vno->next = last_inserted_nary;
2953 last_inserted_nary = vno;
2954 return vno;
2957 /* Insert a n-ary operation into the current hash table using it's
2958 pieces. Return the vn_nary_op_t structure we created and put in
2959 the hashtable. */
2961 vn_nary_op_t
2962 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2963 tree type, tree *ops,
2964 tree result, unsigned int value_id)
2966 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2967 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2968 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
2971 /* Insert OP into the current hash table with a value number of
2972 RESULT. Return the vn_nary_op_t structure we created and put in
2973 the hashtable. */
2975 vn_nary_op_t
2976 vn_nary_op_insert (tree op, tree result)
2978 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2979 vn_nary_op_t vno1;
2981 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2982 init_vn_nary_op_from_op (vno1, op);
2983 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
2986 /* Insert the rhs of STMT into the current hash table with a value number of
2987 RESULT. */
2989 static vn_nary_op_t
2990 vn_nary_op_insert_stmt (gimple *stmt, tree result)
2992 vn_nary_op_t vno1
2993 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2994 result, VN_INFO (result)->value_id);
2995 init_vn_nary_op_from_stmt (vno1, stmt);
2996 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
2999 /* Compute a hashcode for PHI operation VP1 and return it. */
3001 static inline hashval_t
3002 vn_phi_compute_hash (vn_phi_t vp1)
3004 inchash::hash hstate (EDGE_COUNT (vp1->block->preds) > 2
3005 ? vp1->block->index : EDGE_COUNT (vp1->block->preds));
3006 tree phi1op;
3007 tree type;
3008 edge e;
3009 edge_iterator ei;
3011 /* If all PHI arguments are constants we need to distinguish
3012 the PHI node via its type. */
3013 type = vp1->type;
3014 hstate.merge_hash (vn_hash_type (type));
3016 FOR_EACH_EDGE (e, ei, vp1->block->preds)
3018 /* Don't hash backedge values they need to be handled as VN_TOP
3019 for optimistic value-numbering. */
3020 if (e->flags & EDGE_DFS_BACK)
3021 continue;
3023 phi1op = vp1->phiargs[e->dest_idx];
3024 if (phi1op == VN_TOP)
3025 continue;
3026 inchash::add_expr (phi1op, hstate);
3029 return hstate.end ();
3033 /* Return true if COND1 and COND2 represent the same condition, set
3034 *INVERTED_P if one needs to be inverted to make it the same as
3035 the other. */
3037 static bool
3038 cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
3039 gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
3041 enum tree_code code1 = gimple_cond_code (cond1);
3042 enum tree_code code2 = gimple_cond_code (cond2);
3044 *inverted_p = false;
3045 if (code1 == code2)
3047 else if (code1 == swap_tree_comparison (code2))
3048 std::swap (lhs2, rhs2);
3049 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
3050 *inverted_p = true;
3051 else if (code1 == invert_tree_comparison
3052 (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
3054 std::swap (lhs2, rhs2);
3055 *inverted_p = true;
3057 else
3058 return false;
3060 return ((expressions_equal_p (lhs1, lhs2)
3061 && expressions_equal_p (rhs1, rhs2))
3062 || (commutative_tree_code (code1)
3063 && expressions_equal_p (lhs1, rhs2)
3064 && expressions_equal_p (rhs1, lhs2)));
3067 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
3069 static int
3070 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
3072 if (vp1->hashcode != vp2->hashcode)
3073 return false;
3075 if (vp1->block != vp2->block)
3077 if (EDGE_COUNT (vp1->block->preds) != EDGE_COUNT (vp2->block->preds))
3078 return false;
3080 switch (EDGE_COUNT (vp1->block->preds))
3082 case 1:
3083 /* Single-arg PHIs are just copies. */
3084 break;
3086 case 2:
3088 /* Rule out backedges into the PHI. */
3089 if (vp1->block->loop_father->header == vp1->block
3090 || vp2->block->loop_father->header == vp2->block)
3091 return false;
3093 /* If the PHI nodes do not have compatible types
3094 they are not the same. */
3095 if (!types_compatible_p (vp1->type, vp2->type))
3096 return false;
3098 basic_block idom1
3099 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3100 basic_block idom2
3101 = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
3102 /* If the immediate dominator end in switch stmts multiple
3103 values may end up in the same PHI arg via intermediate
3104 CFG merges. */
3105 if (EDGE_COUNT (idom1->succs) != 2
3106 || EDGE_COUNT (idom2->succs) != 2)
3107 return false;
3109 /* Verify the controlling stmt is the same. */
3110 gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1));
3111 gcond *last2 = safe_dyn_cast <gcond *> (last_stmt (idom2));
3112 if (! last1 || ! last2)
3113 return false;
3114 bool inverted_p;
3115 if (! cond_stmts_equal_p (last1, vp1->cclhs, vp1->ccrhs,
3116 last2, vp2->cclhs, vp2->ccrhs,
3117 &inverted_p))
3118 return false;
3120 /* Get at true/false controlled edges into the PHI. */
3121 edge te1, te2, fe1, fe2;
3122 if (! extract_true_false_controlled_edges (idom1, vp1->block,
3123 &te1, &fe1)
3124 || ! extract_true_false_controlled_edges (idom2, vp2->block,
3125 &te2, &fe2))
3126 return false;
3128 /* Swap edges if the second condition is the inverted of the
3129 first. */
3130 if (inverted_p)
3131 std::swap (te2, fe2);
3133 /* ??? Handle VN_TOP specially. */
3134 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
3135 vp2->phiargs[te2->dest_idx])
3136 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
3137 vp2->phiargs[fe2->dest_idx]))
3138 return false;
3140 return true;
3143 default:
3144 return false;
3148 /* If the PHI nodes do not have compatible types
3149 they are not the same. */
3150 if (!types_compatible_p (vp1->type, vp2->type))
3151 return false;
3153 /* Any phi in the same block will have it's arguments in the
3154 same edge order, because of how we store phi nodes. */
3155 for (unsigned i = 0; i < EDGE_COUNT (vp1->block->preds); ++i)
3157 tree phi1op = vp1->phiargs[i];
3158 tree phi2op = vp2->phiargs[i];
3159 if (phi1op == VN_TOP || phi2op == VN_TOP)
3160 continue;
3161 if (!expressions_equal_p (phi1op, phi2op))
3162 return false;
3165 return true;
3168 /* Lookup PHI in the current hash table, and return the resulting
3169 value number if it exists in the hash table. Return NULL_TREE if
3170 it does not exist in the hash table. */
3172 static tree
3173 vn_phi_lookup (gimple *phi)
3175 vn_phi_s **slot;
3176 struct vn_phi_s *vp1;
3177 edge e;
3178 edge_iterator ei;
3180 vp1 = XALLOCAVAR (struct vn_phi_s,
3181 sizeof (struct vn_phi_s)
3182 + (gimple_phi_num_args (phi) - 1) * sizeof (tree));
3184 /* Canonicalize the SSA_NAME's to their value number. */
3185 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3187 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3188 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3189 vp1->phiargs[e->dest_idx] = def;
3191 vp1->type = TREE_TYPE (gimple_phi_result (phi));
3192 vp1->block = gimple_bb (phi);
3193 /* Extract values of the controlling condition. */
3194 vp1->cclhs = NULL_TREE;
3195 vp1->ccrhs = NULL_TREE;
3196 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3197 if (EDGE_COUNT (idom1->succs) == 2)
3198 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3200 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
3201 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
3203 vp1->hashcode = vn_phi_compute_hash (vp1);
3204 slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode,
3205 NO_INSERT);
3206 if (!slot)
3207 return NULL_TREE;
3208 return (*slot)->result;
3211 /* Insert PHI into the current hash table with a value number of
3212 RESULT. */
3214 static vn_phi_t
3215 vn_phi_insert (gimple *phi, tree result)
3217 vn_phi_s **slot;
3218 vn_phi_t vp1 = (vn_phi_t) obstack_alloc (&vn_tables_obstack,
3219 sizeof (vn_phi_s)
3220 + ((gimple_phi_num_args (phi) - 1)
3221 * sizeof (tree)));
3222 edge e;
3223 edge_iterator ei;
3225 /* Canonicalize the SSA_NAME's to their value number. */
3226 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3228 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3229 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3230 vp1->phiargs[e->dest_idx] = def;
3232 vp1->value_id = VN_INFO (result)->value_id;
3233 vp1->type = TREE_TYPE (gimple_phi_result (phi));
3234 vp1->block = gimple_bb (phi);
3235 /* Extract values of the controlling condition. */
3236 vp1->cclhs = NULL_TREE;
3237 vp1->ccrhs = NULL_TREE;
3238 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3239 if (EDGE_COUNT (idom1->succs) == 2)
3240 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3242 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
3243 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
3245 vp1->result = result;
3246 vp1->hashcode = vn_phi_compute_hash (vp1);
3248 slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
3250 /* Because we iterate over phi operations more than once, it's
3251 possible the slot might already exist here, hence no assert.*/
3252 *slot = vp1;
3253 vp1->next = last_inserted_phi;
3254 last_inserted_phi = vp1;
3255 return vp1;
3259 /* Print set of components in strongly connected component SCC to OUT. */
3261 static void
3262 print_scc (FILE *out, vec<tree> scc)
3264 tree var;
3265 unsigned int i;
3267 fprintf (out, "SCC consists of %u:", scc.length ());
3268 FOR_EACH_VEC_ELT (scc, i, var)
3270 fprintf (out, " ");
3271 print_generic_expr (out, var);
3273 fprintf (out, "\n");
3276 /* Return true if BB1 is dominated by BB2 taking into account edges
3277 that are not executable. */
3279 static bool
3280 dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
3282 edge_iterator ei;
3283 edge e;
3285 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3286 return true;
3288 /* Before iterating we'd like to know if there exists a
3289 (executable) path from bb2 to bb1 at all, if not we can
3290 directly return false. For now simply iterate once. */
3292 /* Iterate to the single executable bb1 predecessor. */
3293 if (EDGE_COUNT (bb1->preds) > 1)
3295 edge prede = NULL;
3296 FOR_EACH_EDGE (e, ei, bb1->preds)
3297 if (e->flags & EDGE_EXECUTABLE)
3299 if (prede)
3301 prede = NULL;
3302 break;
3304 prede = e;
3306 if (prede)
3308 bb1 = prede->src;
3310 /* Re-do the dominance check with changed bb1. */
3311 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3312 return true;
3316 /* Iterate to the single executable bb2 successor. */
3317 edge succe = NULL;
3318 FOR_EACH_EDGE (e, ei, bb2->succs)
3319 if (e->flags & EDGE_EXECUTABLE)
3321 if (succe)
3323 succe = NULL;
3324 break;
3326 succe = e;
3328 if (succe)
3330 /* Verify the reached block is only reached through succe.
3331 If there is only one edge we can spare us the dominator
3332 check and iterate directly. */
3333 if (EDGE_COUNT (succe->dest->preds) > 1)
3335 FOR_EACH_EDGE (e, ei, succe->dest->preds)
3336 if (e != succe
3337 && (e->flags & EDGE_EXECUTABLE))
3339 succe = NULL;
3340 break;
3343 if (succe)
3345 bb2 = succe->dest;
3347 /* Re-do the dominance check with changed bb2. */
3348 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3349 return true;
3353 /* We could now iterate updating bb1 / bb2. */
3354 return false;
3357 /* Set the value number of FROM to TO, return true if it has changed
3358 as a result. */
3360 static inline bool
3361 set_ssa_val_to (tree from, tree to)
3363 tree currval = SSA_VAL (from);
3364 poly_int64 toff, coff;
3366 /* The only thing we allow as value numbers are ssa_names
3367 and invariants. So assert that here. We don't allow VN_TOP
3368 as visiting a stmt should produce a value-number other than
3369 that.
3370 ??? Still VN_TOP can happen for unreachable code, so force
3371 it to varying in that case. Not all code is prepared to
3372 get VN_TOP on valueization. */
3373 if (to == VN_TOP)
3375 if (dump_file && (dump_flags & TDF_DETAILS))
3376 fprintf (dump_file, "Forcing value number to varying on "
3377 "receiving VN_TOP\n");
3378 to = from;
3381 gcc_assert (to != NULL_TREE
3382 && ((TREE_CODE (to) == SSA_NAME
3383 && (to == from || SSA_VAL (to) == to))
3384 || is_gimple_min_invariant (to)));
3386 if (from != to)
3388 if (currval == from)
3390 if (dump_file && (dump_flags & TDF_DETAILS))
3392 fprintf (dump_file, "Not changing value number of ");
3393 print_generic_expr (dump_file, from);
3394 fprintf (dump_file, " from VARYING to ");
3395 print_generic_expr (dump_file, to);
3396 fprintf (dump_file, "\n");
3398 return false;
3400 else if (currval != VN_TOP
3401 && ! is_gimple_min_invariant (currval)
3402 && is_gimple_min_invariant (to))
3404 if (dump_file && (dump_flags & TDF_DETAILS))
3406 fprintf (dump_file, "Forcing VARYING instead of changing "
3407 "value number of ");
3408 print_generic_expr (dump_file, from);
3409 fprintf (dump_file, " from ");
3410 print_generic_expr (dump_file, currval);
3411 fprintf (dump_file, " (non-constant) to ");
3412 print_generic_expr (dump_file, to);
3413 fprintf (dump_file, " (constant)\n");
3415 to = from;
3417 else if (TREE_CODE (to) == SSA_NAME
3418 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
3419 to = from;
3422 if (dump_file && (dump_flags & TDF_DETAILS))
3424 fprintf (dump_file, "Setting value number of ");
3425 print_generic_expr (dump_file, from);
3426 fprintf (dump_file, " to ");
3427 print_generic_expr (dump_file, to);
3430 if (currval != to
3431 && !operand_equal_p (currval, to, 0)
3432 /* Different undefined SSA names are not actually different. See
3433 PR82320 for a testcase were we'd otherwise not terminate iteration. */
3434 && !(TREE_CODE (currval) == SSA_NAME
3435 && TREE_CODE (to) == SSA_NAME
3436 && ssa_undefined_value_p (currval, false)
3437 && ssa_undefined_value_p (to, false))
3438 /* ??? For addresses involving volatile objects or types operand_equal_p
3439 does not reliably detect ADDR_EXPRs as equal. We know we are only
3440 getting invariant gimple addresses here, so can use
3441 get_addr_base_and_unit_offset to do this comparison. */
3442 && !(TREE_CODE (currval) == ADDR_EXPR
3443 && TREE_CODE (to) == ADDR_EXPR
3444 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
3445 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
3446 && known_eq (coff, toff)))
3448 if (dump_file && (dump_flags & TDF_DETAILS))
3449 fprintf (dump_file, " (changed)\n");
3451 /* If we equate two SSA names we have to make the side-band info
3452 of the leader conservative (and remember whatever original value
3453 was present). */
3454 if (TREE_CODE (to) == SSA_NAME)
3456 if (INTEGRAL_TYPE_P (TREE_TYPE (to))
3457 && SSA_NAME_RANGE_INFO (to))
3459 if (SSA_NAME_IS_DEFAULT_DEF (to)
3460 || dominated_by_p_w_unex
3461 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3462 gimple_bb (SSA_NAME_DEF_STMT (to))))
3463 /* Keep the info from the dominator. */
3465 else
3467 /* Save old info. */
3468 if (! VN_INFO (to)->info.range_info)
3470 VN_INFO (to)->info.range_info = SSA_NAME_RANGE_INFO (to);
3471 VN_INFO (to)->range_info_anti_range_p
3472 = SSA_NAME_ANTI_RANGE_P (to);
3474 /* Rather than allocating memory and unioning the info
3475 just clear it. */
3476 if (dump_file && (dump_flags & TDF_DETAILS))
3478 fprintf (dump_file, "clearing range info of ");
3479 print_generic_expr (dump_file, to);
3480 fprintf (dump_file, "\n");
3482 SSA_NAME_RANGE_INFO (to) = NULL;
3485 else if (POINTER_TYPE_P (TREE_TYPE (to))
3486 && SSA_NAME_PTR_INFO (to))
3488 if (SSA_NAME_IS_DEFAULT_DEF (to)
3489 || dominated_by_p_w_unex
3490 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3491 gimple_bb (SSA_NAME_DEF_STMT (to))))
3492 /* Keep the info from the dominator. */
3494 else if (! SSA_NAME_PTR_INFO (from)
3495 /* Handle the case of trivially equivalent info. */
3496 || memcmp (SSA_NAME_PTR_INFO (to),
3497 SSA_NAME_PTR_INFO (from),
3498 sizeof (ptr_info_def)) != 0)
3500 /* Save old info. */
3501 if (! VN_INFO (to)->info.ptr_info)
3502 VN_INFO (to)->info.ptr_info = SSA_NAME_PTR_INFO (to);
3503 /* Rather than allocating memory and unioning the info
3504 just clear it. */
3505 if (dump_file && (dump_flags & TDF_DETAILS))
3507 fprintf (dump_file, "clearing points-to info of ");
3508 print_generic_expr (dump_file, to);
3509 fprintf (dump_file, "\n");
3511 SSA_NAME_PTR_INFO (to) = NULL;
3516 VN_INFO (from)->valnum = to;
3517 return true;
3519 if (dump_file && (dump_flags & TDF_DETAILS))
3520 fprintf (dump_file, "\n");
3521 return false;
3524 /* Mark as processed all the definitions in the defining stmt of USE, or
3525 the USE itself. */
3527 static void
3528 mark_use_processed (tree use)
3530 ssa_op_iter iter;
3531 def_operand_p defp;
3532 gimple *stmt = SSA_NAME_DEF_STMT (use);
3534 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
3536 VN_INFO (use)->use_processed = true;
3537 return;
3540 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3542 tree def = DEF_FROM_PTR (defp);
3544 VN_INFO (def)->use_processed = true;
3548 /* Set all definitions in STMT to value number to themselves.
3549 Return true if a value number changed. */
3551 static bool
3552 defs_to_varying (gimple *stmt)
3554 bool changed = false;
3555 ssa_op_iter iter;
3556 def_operand_p defp;
3558 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3560 tree def = DEF_FROM_PTR (defp);
3561 changed |= set_ssa_val_to (def, def);
3563 return changed;
3566 /* Visit a copy between LHS and RHS, return true if the value number
3567 changed. */
3569 static bool
3570 visit_copy (tree lhs, tree rhs)
3572 /* Valueize. */
3573 rhs = SSA_VAL (rhs);
3575 return set_ssa_val_to (lhs, rhs);
3578 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
3579 is the same. */
3581 static tree
3582 valueized_wider_op (tree wide_type, tree op)
3584 if (TREE_CODE (op) == SSA_NAME)
3585 op = SSA_VAL (op);
3587 /* Either we have the op widened available. */
3588 tree ops[3] = {};
3589 ops[0] = op;
3590 tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
3591 wide_type, ops, NULL);
3592 if (tem)
3593 return tem;
3595 /* Or the op is truncated from some existing value. */
3596 if (TREE_CODE (op) == SSA_NAME)
3598 gimple *def = SSA_NAME_DEF_STMT (op);
3599 if (is_gimple_assign (def)
3600 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3602 tem = gimple_assign_rhs1 (def);
3603 if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)))
3605 if (TREE_CODE (tem) == SSA_NAME)
3606 tem = SSA_VAL (tem);
3607 return tem;
3612 /* For constants simply extend it. */
3613 if (TREE_CODE (op) == INTEGER_CST)
3614 return wide_int_to_tree (wide_type, wi::to_wide (op));
3616 return NULL_TREE;
3619 /* Visit a nary operator RHS, value number it, and return true if the
3620 value number of LHS has changed as a result. */
3622 static bool
3623 visit_nary_op (tree lhs, gassign *stmt)
3625 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
3626 if (result)
3627 return set_ssa_val_to (lhs, result);
3629 /* Do some special pattern matching for redundancies of operations
3630 in different types. */
3631 enum tree_code code = gimple_assign_rhs_code (stmt);
3632 tree type = TREE_TYPE (lhs);
3633 tree rhs1 = gimple_assign_rhs1 (stmt);
3634 switch (code)
3636 CASE_CONVERT:
3637 /* Match arithmetic done in a different type where we can easily
3638 substitute the result from some earlier sign-changed or widened
3639 operation. */
3640 if (INTEGRAL_TYPE_P (type)
3641 && TREE_CODE (rhs1) == SSA_NAME
3642 /* We only handle sign-changes or zero-extension -> & mask. */
3643 && ((TYPE_UNSIGNED (TREE_TYPE (rhs1))
3644 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs1)))
3645 || TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (rhs1))))
3647 gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
3648 if (def
3649 && (gimple_assign_rhs_code (def) == PLUS_EXPR
3650 || gimple_assign_rhs_code (def) == MINUS_EXPR
3651 || gimple_assign_rhs_code (def) == MULT_EXPR))
3653 tree ops[3] = {};
3654 /* Either we have the op widened available. */
3655 ops[0] = valueized_wider_op (type,
3656 gimple_assign_rhs1 (def));
3657 if (ops[0])
3658 ops[1] = valueized_wider_op (type,
3659 gimple_assign_rhs2 (def));
3660 if (ops[0] && ops[1])
3662 ops[0] = vn_nary_op_lookup_pieces
3663 (2, gimple_assign_rhs_code (def), type, ops, NULL);
3664 /* We have wider operation available. */
3665 if (ops[0])
3667 unsigned lhs_prec = TYPE_PRECISION (type);
3668 unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1));
3669 if (lhs_prec == rhs_prec)
3671 gimple_match_op match_op (gimple_match_cond::UNCOND,
3672 NOP_EXPR, type, ops[0]);
3673 result = vn_nary_build_or_lookup (&match_op);
3674 if (result)
3676 bool changed = set_ssa_val_to (lhs, result);
3677 vn_nary_op_insert_stmt (stmt, result);
3678 return changed;
3681 else
3683 tree mask = wide_int_to_tree
3684 (type, wi::mask (rhs_prec, false, lhs_prec));
3685 gimple_match_op match_op (gimple_match_cond::UNCOND,
3686 BIT_AND_EXPR,
3687 TREE_TYPE (lhs),
3688 ops[0], mask);
3689 result = vn_nary_build_or_lookup (&match_op);
3690 if (result)
3692 bool changed = set_ssa_val_to (lhs, result);
3693 vn_nary_op_insert_stmt (stmt, result);
3694 return changed;
3701 default:;
3704 bool changed = set_ssa_val_to (lhs, lhs);
3705 vn_nary_op_insert_stmt (stmt, lhs);
3706 return changed;
3709 /* Visit a call STMT storing into LHS. Return true if the value number
3710 of the LHS has changed as a result. */
3712 static bool
3713 visit_reference_op_call (tree lhs, gcall *stmt)
3715 bool changed = false;
3716 struct vn_reference_s vr1;
3717 vn_reference_t vnresult = NULL;
3718 tree vdef = gimple_vdef (stmt);
3720 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
3721 if (lhs && TREE_CODE (lhs) != SSA_NAME)
3722 lhs = NULL_TREE;
3724 vn_reference_lookup_call (stmt, &vnresult, &vr1);
3725 if (vnresult)
3727 if (vnresult->result_vdef && vdef)
3728 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
3729 else if (vdef)
3730 /* If the call was discovered to be pure or const reflect
3731 that as far as possible. */
3732 changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt)));
3734 if (!vnresult->result && lhs)
3735 vnresult->result = lhs;
3737 if (vnresult->result && lhs)
3738 changed |= set_ssa_val_to (lhs, vnresult->result);
3740 else
3742 vn_reference_t vr2;
3743 vn_reference_s **slot;
3744 tree vdef_val = vdef;
3745 if (vdef)
3747 /* If we value numbered an indirect functions function to
3748 one not clobbering memory value number its VDEF to its
3749 VUSE. */
3750 tree fn = gimple_call_fn (stmt);
3751 if (fn && TREE_CODE (fn) == SSA_NAME)
3753 fn = SSA_VAL (fn);
3754 if (TREE_CODE (fn) == ADDR_EXPR
3755 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
3756 && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
3757 & (ECF_CONST | ECF_PURE)))
3758 vdef_val = vuse_ssa_val (gimple_vuse (stmt));
3760 changed |= set_ssa_val_to (vdef, vdef_val);
3762 if (lhs)
3763 changed |= set_ssa_val_to (lhs, lhs);
3764 vr2 = XOBNEW (&vn_tables_obstack, vn_reference_s);
3765 vr2->vuse = vr1.vuse;
3766 /* As we are not walking the virtual operand chain we know the
3767 shared_lookup_references are still original so we can re-use
3768 them here. */
3769 vr2->operands = vr1.operands.copy ();
3770 vr2->type = vr1.type;
3771 vr2->set = vr1.set;
3772 vr2->hashcode = vr1.hashcode;
3773 vr2->result = lhs;
3774 vr2->result_vdef = vdef_val;
3775 slot = valid_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3776 INSERT);
3777 gcc_assert (!*slot);
3778 *slot = vr2;
3779 vr2->next = last_inserted_ref;
3780 last_inserted_ref = vr2;
3783 return changed;
3786 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3787 and return true if the value number of the LHS has changed as a result. */
3789 static bool
3790 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
3792 bool changed = false;
3793 tree last_vuse;
3794 tree result;
3796 last_vuse = gimple_vuse (stmt);
3797 last_vuse_ptr = &last_vuse;
3798 result = vn_reference_lookup (op, gimple_vuse (stmt),
3799 default_vn_walk_kind, NULL, true);
3800 last_vuse_ptr = NULL;
3802 /* We handle type-punning through unions by value-numbering based
3803 on offset and size of the access. Be prepared to handle a
3804 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3805 if (result
3806 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3808 /* We will be setting the value number of lhs to the value number
3809 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3810 So first simplify and lookup this expression to see if it
3811 is already available. */
3812 gimple_match_op res_op (gimple_match_cond::UNCOND,
3813 VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
3814 result = vn_nary_build_or_lookup (&res_op);
3817 if (result)
3818 changed = set_ssa_val_to (lhs, result);
3819 else
3821 changed = set_ssa_val_to (lhs, lhs);
3822 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
3825 return changed;
3829 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3830 and return true if the value number of the LHS has changed as a result. */
3832 static bool
3833 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
3835 bool changed = false;
3836 vn_reference_t vnresult = NULL;
3837 tree assign;
3838 bool resultsame = false;
3839 tree vuse = gimple_vuse (stmt);
3840 tree vdef = gimple_vdef (stmt);
3842 if (TREE_CODE (op) == SSA_NAME)
3843 op = SSA_VAL (op);
3845 /* First we want to lookup using the *vuses* from the store and see
3846 if there the last store to this location with the same address
3847 had the same value.
3849 The vuses represent the memory state before the store. If the
3850 memory state, address, and value of the store is the same as the
3851 last store to this location, then this store will produce the
3852 same memory state as that store.
3854 In this case the vdef versions for this store are value numbered to those
3855 vuse versions, since they represent the same memory state after
3856 this store.
3858 Otherwise, the vdefs for the store are used when inserting into
3859 the table, since the store generates a new memory state. */
3861 vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
3862 if (vnresult
3863 && vnresult->result)
3865 tree result = vnresult->result;
3866 if (TREE_CODE (result) == SSA_NAME)
3867 result = SSA_VAL (result);
3868 resultsame = expressions_equal_p (result, op);
3869 if (resultsame)
3871 /* If the TBAA state isn't compatible for downstream reads
3872 we cannot value-number the VDEFs the same. */
3873 alias_set_type set = get_alias_set (lhs);
3874 if (vnresult->set != set
3875 && ! alias_set_subset_of (set, vnresult->set))
3876 resultsame = false;
3880 if (!resultsame)
3882 /* Only perform the following when being called from PRE
3883 which embeds tail merging. */
3884 if (default_vn_walk_kind == VN_WALK)
3886 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3887 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
3888 if (vnresult)
3890 VN_INFO (vdef)->use_processed = true;
3891 return set_ssa_val_to (vdef, vnresult->result_vdef);
3895 if (dump_file && (dump_flags & TDF_DETAILS))
3897 fprintf (dump_file, "No store match\n");
3898 fprintf (dump_file, "Value numbering store ");
3899 print_generic_expr (dump_file, lhs);
3900 fprintf (dump_file, " to ");
3901 print_generic_expr (dump_file, op);
3902 fprintf (dump_file, "\n");
3904 /* Have to set value numbers before insert, since insert is
3905 going to valueize the references in-place. */
3906 if (vdef)
3907 changed |= set_ssa_val_to (vdef, vdef);
3909 /* Do not insert structure copies into the tables. */
3910 if (is_gimple_min_invariant (op)
3911 || is_gimple_reg (op))
3912 vn_reference_insert (lhs, op, vdef, NULL);
3914 /* Only perform the following when being called from PRE
3915 which embeds tail merging. */
3916 if (default_vn_walk_kind == VN_WALK)
3918 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3919 vn_reference_insert (assign, lhs, vuse, vdef);
3922 else
3924 /* We had a match, so value number the vdef to have the value
3925 number of the vuse it came from. */
3927 if (dump_file && (dump_flags & TDF_DETAILS))
3928 fprintf (dump_file, "Store matched earlier value, "
3929 "value numbering store vdefs to matching vuses.\n");
3931 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3934 return changed;
3937 /* Visit and value number PHI, return true if the value number
3938 changed. */
3940 static bool
3941 visit_phi (gimple *phi)
3943 tree result, sameval = VN_TOP, seen_undef = NULL_TREE;
3944 unsigned n_executable = 0;
3945 bool allsame = true;
3946 edge_iterator ei;
3947 edge e;
3949 /* TODO: We could check for this in init_sccvn, and replace this
3950 with a gcc_assert. */
3951 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3952 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3954 /* See if all non-TOP arguments have the same value. TOP is
3955 equivalent to everything, so we can ignore it. */
3956 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3957 if (e->flags & EDGE_EXECUTABLE)
3959 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3961 ++n_executable;
3962 if (TREE_CODE (def) == SSA_NAME)
3963 def = SSA_VAL (def);
3964 if (def == VN_TOP)
3966 /* Ignore undefined defs for sameval but record one. */
3967 else if (TREE_CODE (def) == SSA_NAME
3968 && ssa_undefined_value_p (def, false))
3969 seen_undef = def;
3970 else if (sameval == VN_TOP)
3971 sameval = def;
3972 else if (!expressions_equal_p (def, sameval))
3974 allsame = false;
3975 break;
3980 /* If none of the edges was executable keep the value-number at VN_TOP,
3981 if only a single edge is exectuable use its value. */
3982 if (n_executable <= 1)
3983 result = seen_undef ? seen_undef : sameval;
3984 /* If we saw only undefined values and VN_TOP use one of the
3985 undefined values. */
3986 else if (sameval == VN_TOP)
3987 result = seen_undef ? seen_undef : sameval;
3988 /* First see if it is equivalent to a phi node in this block. We prefer
3989 this as it allows IV elimination - see PRs 66502 and 67167. */
3990 else if ((result = vn_phi_lookup (phi)))
3992 /* If all values are the same use that, unless we've seen undefined
3993 values as well and the value isn't constant.
3994 CCP/copyprop have the same restriction to not remove uninit warnings. */
3995 else if (allsame
3996 && (! seen_undef || is_gimple_min_invariant (sameval)))
3997 result = sameval;
3998 else
4000 result = PHI_RESULT (phi);
4001 /* Only insert PHIs that are varying, for constant value numbers
4002 we mess up equivalences otherwise as we are only comparing
4003 the immediate controlling predicates. */
4004 vn_phi_insert (phi, result);
4007 return set_ssa_val_to (PHI_RESULT (phi), result);
4010 /* Try to simplify RHS using equivalences and constant folding. */
4012 static tree
4013 try_to_simplify (gassign *stmt)
4015 enum tree_code code = gimple_assign_rhs_code (stmt);
4016 tree tem;
4018 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
4019 in this case, there is no point in doing extra work. */
4020 if (code == SSA_NAME)
4021 return NULL_TREE;
4023 /* First try constant folding based on our current lattice. */
4024 mprts_hook = vn_lookup_simplify_result;
4025 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
4026 mprts_hook = NULL;
4027 if (tem
4028 && (TREE_CODE (tem) == SSA_NAME
4029 || is_gimple_min_invariant (tem)))
4030 return tem;
4032 return NULL_TREE;
4035 /* Visit and value number USE, return true if the value number
4036 changed. */
4038 static bool
4039 visit_use (tree use)
4041 bool changed = false;
4042 gimple *stmt = SSA_NAME_DEF_STMT (use);
4044 mark_use_processed (use);
4046 gcc_assert (!SSA_NAME_IN_FREE_LIST (use)
4047 && !SSA_NAME_IS_DEFAULT_DEF (use));
4049 if (dump_file && (dump_flags & TDF_DETAILS))
4051 fprintf (dump_file, "Value numbering ");
4052 print_generic_expr (dump_file, use);
4053 fprintf (dump_file, " stmt = ");
4054 print_gimple_stmt (dump_file, stmt, 0);
4057 if (gimple_code (stmt) == GIMPLE_PHI)
4058 changed = visit_phi (stmt);
4059 else if (gimple_has_volatile_ops (stmt))
4060 changed = defs_to_varying (stmt);
4061 else if (gassign *ass = dyn_cast <gassign *> (stmt))
4063 enum tree_code code = gimple_assign_rhs_code (ass);
4064 tree lhs = gimple_assign_lhs (ass);
4065 tree rhs1 = gimple_assign_rhs1 (ass);
4066 tree simplified;
4068 /* Shortcut for copies. Simplifying copies is pointless,
4069 since we copy the expression and value they represent. */
4070 if (code == SSA_NAME
4071 && TREE_CODE (lhs) == SSA_NAME)
4073 changed = visit_copy (lhs, rhs1);
4074 goto done;
4076 simplified = try_to_simplify (ass);
4077 if (simplified)
4079 if (dump_file && (dump_flags & TDF_DETAILS))
4081 fprintf (dump_file, "RHS ");
4082 print_gimple_expr (dump_file, ass, 0);
4083 fprintf (dump_file, " simplified to ");
4084 print_generic_expr (dump_file, simplified);
4085 fprintf (dump_file, "\n");
4088 /* Setting value numbers to constants will occasionally
4089 screw up phi congruence because constants are not
4090 uniquely associated with a single ssa name that can be
4091 looked up. */
4092 if (simplified
4093 && is_gimple_min_invariant (simplified)
4094 && TREE_CODE (lhs) == SSA_NAME)
4096 changed = set_ssa_val_to (lhs, simplified);
4097 goto done;
4099 else if (simplified
4100 && TREE_CODE (simplified) == SSA_NAME
4101 && TREE_CODE (lhs) == SSA_NAME)
4103 changed = visit_copy (lhs, simplified);
4104 goto done;
4107 if ((TREE_CODE (lhs) == SSA_NAME
4108 /* We can substitute SSA_NAMEs that are live over
4109 abnormal edges with their constant value. */
4110 && !(gimple_assign_copy_p (ass)
4111 && is_gimple_min_invariant (rhs1))
4112 && !(simplified
4113 && is_gimple_min_invariant (simplified))
4114 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4115 /* Stores or copies from SSA_NAMEs that are live over
4116 abnormal edges are a problem. */
4117 || (code == SSA_NAME
4118 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
4119 changed = defs_to_varying (ass);
4120 else if (REFERENCE_CLASS_P (lhs)
4121 || DECL_P (lhs))
4122 changed = visit_reference_op_store (lhs, rhs1, ass);
4123 else if (TREE_CODE (lhs) == SSA_NAME)
4125 if ((gimple_assign_copy_p (ass)
4126 && is_gimple_min_invariant (rhs1))
4127 || (simplified
4128 && is_gimple_min_invariant (simplified)))
4130 if (simplified)
4131 changed = set_ssa_val_to (lhs, simplified);
4132 else
4133 changed = set_ssa_val_to (lhs, rhs1);
4135 else
4137 /* Visit the original statement. */
4138 switch (vn_get_stmt_kind (ass))
4140 case VN_NARY:
4141 changed = visit_nary_op (lhs, ass);
4142 break;
4143 case VN_REFERENCE:
4144 changed = visit_reference_op_load (lhs, rhs1, ass);
4145 break;
4146 default:
4147 changed = defs_to_varying (ass);
4148 break;
4152 else
4153 changed = defs_to_varying (ass);
4155 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
4157 tree lhs = gimple_call_lhs (call_stmt);
4158 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4160 /* Try constant folding based on our current lattice. */
4161 tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
4162 vn_valueize);
4163 if (simplified)
4165 if (dump_file && (dump_flags & TDF_DETAILS))
4167 fprintf (dump_file, "call ");
4168 print_gimple_expr (dump_file, call_stmt, 0);
4169 fprintf (dump_file, " simplified to ");
4170 print_generic_expr (dump_file, simplified);
4171 fprintf (dump_file, "\n");
4174 /* Setting value numbers to constants will occasionally
4175 screw up phi congruence because constants are not
4176 uniquely associated with a single ssa name that can be
4177 looked up. */
4178 if (simplified
4179 && is_gimple_min_invariant (simplified))
4181 changed = set_ssa_val_to (lhs, simplified);
4182 if (gimple_vdef (call_stmt))
4183 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4184 SSA_VAL (gimple_vuse (call_stmt)));
4185 goto done;
4187 else if (simplified
4188 && TREE_CODE (simplified) == SSA_NAME)
4190 changed = visit_copy (lhs, simplified);
4191 if (gimple_vdef (call_stmt))
4192 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4193 SSA_VAL (gimple_vuse (call_stmt)));
4194 goto done;
4196 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4198 changed = defs_to_varying (call_stmt);
4199 goto done;
4203 /* Pick up flags from a devirtualization target. */
4204 tree fn = gimple_call_fn (stmt);
4205 int extra_fnflags = 0;
4206 if (fn && TREE_CODE (fn) == SSA_NAME)
4208 fn = SSA_VAL (fn);
4209 if (TREE_CODE (fn) == ADDR_EXPR
4210 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
4211 extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0));
4213 if (!gimple_call_internal_p (call_stmt)
4214 && (/* Calls to the same function with the same vuse
4215 and the same operands do not necessarily return the same
4216 value, unless they're pure or const. */
4217 ((gimple_call_flags (call_stmt) | extra_fnflags)
4218 & (ECF_PURE | ECF_CONST))
4219 /* If calls have a vdef, subsequent calls won't have
4220 the same incoming vuse. So, if 2 calls with vdef have the
4221 same vuse, we know they're not subsequent.
4222 We can value number 2 calls to the same function with the
4223 same vuse and the same operands which are not subsequent
4224 the same, because there is no code in the program that can
4225 compare the 2 values... */
4226 || (gimple_vdef (call_stmt)
4227 /* ... unless the call returns a pointer which does
4228 not alias with anything else. In which case the
4229 information that the values are distinct are encoded
4230 in the IL. */
4231 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
4232 /* Only perform the following when being called from PRE
4233 which embeds tail merging. */
4234 && default_vn_walk_kind == VN_WALK)))
4235 changed = visit_reference_op_call (lhs, call_stmt);
4236 else
4237 changed = defs_to_varying (call_stmt);
4239 else
4240 changed = defs_to_varying (stmt);
4241 done:
4242 return changed;
4245 /* Compare two operands by reverse postorder index */
4247 static int
4248 compare_ops (const void *pa, const void *pb)
4250 const tree opa = *((const tree *)pa);
4251 const tree opb = *((const tree *)pb);
4252 gimple *opstmta = SSA_NAME_DEF_STMT (opa);
4253 gimple *opstmtb = SSA_NAME_DEF_STMT (opb);
4254 basic_block bba;
4255 basic_block bbb;
4257 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
4258 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4259 else if (gimple_nop_p (opstmta))
4260 return -1;
4261 else if (gimple_nop_p (opstmtb))
4262 return 1;
4264 bba = gimple_bb (opstmta);
4265 bbb = gimple_bb (opstmtb);
4267 if (!bba && !bbb)
4268 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4269 else if (!bba)
4270 return -1;
4271 else if (!bbb)
4272 return 1;
4274 if (bba == bbb)
4276 if (gimple_code (opstmta) == GIMPLE_PHI
4277 && gimple_code (opstmtb) == GIMPLE_PHI)
4278 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4279 else if (gimple_code (opstmta) == GIMPLE_PHI)
4280 return -1;
4281 else if (gimple_code (opstmtb) == GIMPLE_PHI)
4282 return 1;
4283 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
4284 return gimple_uid (opstmta) - gimple_uid (opstmtb);
4285 else
4286 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4288 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
4291 /* Sort an array containing members of a strongly connected component
4292 SCC so that the members are ordered by RPO number.
4293 This means that when the sort is complete, iterating through the
4294 array will give you the members in RPO order. */
4296 static void
4297 sort_scc (vec<tree> scc)
4299 scc.qsort (compare_ops);
4302 /* Process a strongly connected component in the SSA graph. */
4304 static void
4305 process_scc (vec<tree> scc)
4307 tree var;
4308 unsigned int i;
4309 unsigned int iterations = 0;
4310 bool changed = true;
4312 /* If the SCC has a single member, just visit it. */
4313 if (scc.length () == 1)
4315 tree use = scc[0];
4316 if (VN_INFO (use)->use_processed)
4317 return;
4318 /* We need to make sure it doesn't form a cycle itself, which can
4319 happen for self-referential PHI nodes. In that case we would
4320 end up inserting an expression with VN_TOP operands into the
4321 valid table which makes us derive bogus equivalences later.
4322 The cheapest way to check this is to assume it for all PHI nodes. */
4323 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
4324 /* Fallthru to iteration. */ ;
4325 else
4327 visit_use (use);
4328 return;
4332 if (dump_file && (dump_flags & TDF_DETAILS))
4333 print_scc (dump_file, scc);
4335 /* Iterate over the SCC with the optimistic table until it stops
4336 changing. */
4337 while (changed)
4339 changed = false;
4340 iterations++;
4341 if (dump_file && (dump_flags & TDF_DETAILS))
4342 fprintf (dump_file, "Starting iteration %d\n", iterations);
4343 /* As we are value-numbering optimistically we have to
4344 clear the expression tables and the simplified expressions
4345 in each iteration until we converge. */
4346 void *ob_top = obstack_alloc (&vn_tables_obstack, 0);
4347 vn_reference_t ref_top = last_inserted_ref;
4348 vn_phi_t phi_top = last_inserted_phi;
4349 vn_nary_op_t nary_top = last_inserted_nary;
4350 FOR_EACH_VEC_ELT (scc, i, var)
4351 gcc_assert (!VN_INFO (var)->needs_insertion
4352 && VN_INFO (var)->expr == NULL);
4353 FOR_EACH_VEC_ELT (scc, i, var)
4354 changed |= visit_use (var);
4355 if (changed)
4357 for (; last_inserted_nary != nary_top;
4358 last_inserted_nary = last_inserted_nary->next)
4360 vn_nary_op_t *slot;
4361 slot = valid_info->nary->find_slot_with_hash (last_inserted_nary,
4362 last_inserted_nary->hashcode,
4363 NO_INSERT);
4364 gcc_assert (slot);
4365 valid_info->nary->clear_slot (slot);
4367 for (; last_inserted_phi != phi_top;
4368 last_inserted_phi = last_inserted_phi->next)
4370 vn_phi_t *slot;
4371 slot = valid_info->phis->find_slot_with_hash (last_inserted_phi,
4372 last_inserted_phi->hashcode,
4373 NO_INSERT);
4374 gcc_assert (slot);
4375 valid_info->phis->clear_slot (slot);
4377 for (; last_inserted_ref != ref_top;
4378 last_inserted_ref = last_inserted_ref->next)
4380 vn_reference_t *slot;
4381 slot = valid_info->references->find_slot_with_hash (last_inserted_ref,
4382 last_inserted_ref->hashcode,
4383 NO_INSERT);
4384 gcc_assert (slot);
4385 (*slot)->operands.release ();
4386 valid_info->references->clear_slot (slot);
4388 obstack_free (&vn_tables_obstack, ob_top);
4392 if (dump_file && (dump_flags & TDF_DETAILS))
4393 fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
4394 statistics_histogram_event (cfun, "SCC iterations", iterations);
4398 /* Pop the components of the found SCC for NAME off the SCC stack
4399 and process them. Returns true if all went well, false if
4400 we run into resource limits. */
4402 static void
4403 extract_and_process_scc_for_name (tree name)
4405 auto_vec<tree> scc;
4406 tree x;
4408 /* Found an SCC, pop the components off the SCC stack and
4409 process them. */
4412 x = sccstack.pop ();
4414 VN_INFO (x)->on_sccstack = false;
4415 scc.safe_push (x);
4416 } while (x != name);
4418 /* Drop all defs in the SCC to varying in case a SCC turns out to be
4419 incredibly large.
4420 ??? Just switch to a non-optimistic mode that avoids any iteration. */
4421 if (scc.length () > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
4423 if (dump_file)
4425 print_scc (dump_file, scc);
4426 fprintf (dump_file, "WARNING: Giving up value-numbering SCC due to "
4427 "size %u exceeding %u\n", scc.length (),
4428 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
4430 tree var;
4431 unsigned i;
4432 FOR_EACH_VEC_ELT (scc, i, var)
4434 gimple *def = SSA_NAME_DEF_STMT (var);
4435 mark_use_processed (var);
4436 if (SSA_NAME_IS_DEFAULT_DEF (var)
4437 || gimple_code (def) == GIMPLE_PHI)
4438 set_ssa_val_to (var, var);
4439 else
4440 defs_to_varying (def);
4442 return;
4445 if (scc.length () > 1)
4446 sort_scc (scc);
4448 process_scc (scc);
4451 /* Depth first search on NAME to discover and process SCC's in the SSA
4452 graph.
4453 Execution of this algorithm relies on the fact that the SCC's are
4454 popped off the stack in topological order.
4455 Returns true if successful, false if we stopped processing SCC's due
4456 to resource constraints. */
4458 static void
4459 DFS (tree name)
4461 auto_vec<ssa_op_iter> itervec;
4462 auto_vec<tree> namevec;
4463 use_operand_p usep = NULL;
4464 gimple *defstmt;
4465 tree use;
4466 ssa_op_iter iter;
4468 start_over:
4469 /* SCC info */
4470 VN_INFO (name)->dfsnum = next_dfs_num++;
4471 VN_INFO (name)->visited = true;
4472 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
4474 sccstack.safe_push (name);
4475 VN_INFO (name)->on_sccstack = true;
4476 defstmt = SSA_NAME_DEF_STMT (name);
4478 /* Recursively DFS on our operands, looking for SCC's. */
4479 if (!gimple_nop_p (defstmt))
4481 /* Push a new iterator. */
4482 if (gphi *phi = dyn_cast <gphi *> (defstmt))
4483 usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
4484 else
4485 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
4487 else
4488 clear_and_done_ssa_iter (&iter);
4490 while (1)
4492 /* If we are done processing uses of a name, go up the stack
4493 of iterators and process SCCs as we found them. */
4494 if (op_iter_done (&iter))
4496 /* See if we found an SCC. */
4497 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
4498 extract_and_process_scc_for_name (name);
4500 /* Check if we are done. */
4501 if (namevec.is_empty ())
4502 return;
4504 /* Restore the last use walker and continue walking there. */
4505 use = name;
4506 name = namevec.pop ();
4507 memcpy (&iter, &itervec.last (),
4508 sizeof (ssa_op_iter));
4509 itervec.pop ();
4510 goto continue_walking;
4513 use = USE_FROM_PTR (usep);
4515 /* Since we handle phi nodes, we will sometimes get
4516 invariants in the use expression. */
4517 if (TREE_CODE (use) == SSA_NAME)
4519 if (! (VN_INFO (use)->visited))
4521 /* Recurse by pushing the current use walking state on
4522 the stack and starting over. */
4523 itervec.safe_push (iter);
4524 namevec.safe_push (name);
4525 name = use;
4526 goto start_over;
4528 continue_walking:
4529 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
4530 VN_INFO (use)->low);
4532 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
4533 && VN_INFO (use)->on_sccstack)
4535 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
4536 VN_INFO (name)->low);
4540 usep = op_iter_next_use (&iter);
4544 /* Allocate a value number table. */
4546 static void
4547 allocate_vn_table (vn_tables_t table)
4549 table->phis = new vn_phi_table_type (23);
4550 table->nary = new vn_nary_op_table_type (23);
4551 table->references = new vn_reference_table_type (23);
4554 /* Free a value number table. */
4556 static void
4557 free_vn_table (vn_tables_t table)
4559 /* Walk over elements and release vectors. */
4560 vn_reference_iterator_type hir;
4561 vn_reference_t vr;
4562 FOR_EACH_HASH_TABLE_ELEMENT (*table->references, vr, vn_reference_t, hir)
4563 vr->operands.release ();
4564 delete table->phis;
4565 table->phis = NULL;
4566 delete table->nary;
4567 table->nary = NULL;
4568 delete table->references;
4569 table->references = NULL;
4572 static void
4573 init_scc_vn (void)
4575 int j;
4576 int *rpo_numbers_temp;
4578 calculate_dominance_info (CDI_DOMINATORS);
4579 mark_dfs_back_edges ();
4581 sccstack.create (0);
4582 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
4584 constant_value_ids = BITMAP_ALLOC (NULL);
4586 next_dfs_num = 1;
4587 next_value_id = 1;
4589 vn_ssa_aux_table.create (num_ssa_names + 1);
4590 /* VEC_alloc doesn't actually grow it to the right size, it just
4591 preallocates the space to do so. */
4592 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
4593 gcc_obstack_init (&vn_ssa_aux_obstack);
4595 shared_lookup_references.create (0);
4596 rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
4597 rpo_numbers_temp =
4598 XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
4599 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4601 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4602 the i'th block in RPO order is bb. We want to map bb's to RPO
4603 numbers, so we need to rearrange this array. */
4604 for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
4605 rpo_numbers[rpo_numbers_temp[j]] = j;
4607 XDELETE (rpo_numbers_temp);
4609 VN_TOP = build_decl (UNKNOWN_LOCATION, VAR_DECL,
4610 get_identifier ("VN_TOP"), error_mark_node);
4612 renumber_gimple_stmt_uids ();
4614 /* Create the valid and optimistic value numbering tables. */
4615 gcc_obstack_init (&vn_tables_obstack);
4616 gcc_obstack_init (&vn_tables_insert_obstack);
4617 valid_info = XCNEW (struct vn_tables_s);
4618 allocate_vn_table (valid_info);
4619 last_inserted_ref = NULL;
4620 last_inserted_phi = NULL;
4621 last_inserted_nary = NULL;
4623 /* Create the VN_INFO structures, and initialize value numbers to
4624 TOP or VARYING for parameters. */
4625 size_t i;
4626 tree name;
4628 FOR_EACH_SSA_NAME (i, name, cfun)
4630 VN_INFO_GET (name)->valnum = VN_TOP;
4631 VN_INFO (name)->needs_insertion = false;
4632 VN_INFO (name)->expr = NULL;
4633 VN_INFO (name)->value_id = 0;
4635 if (!SSA_NAME_IS_DEFAULT_DEF (name))
4636 continue;
4638 switch (TREE_CODE (SSA_NAME_VAR (name)))
4640 case VAR_DECL:
4641 /* All undefined vars are VARYING. */
4642 VN_INFO (name)->valnum = name;
4643 VN_INFO (name)->visited = true;
4644 break;
4646 case PARM_DECL:
4647 /* Parameters are VARYING but we can record a condition
4648 if we know it is a non-NULL pointer. */
4649 VN_INFO (name)->visited = true;
4650 VN_INFO (name)->valnum = name;
4651 if (POINTER_TYPE_P (TREE_TYPE (name))
4652 && nonnull_arg_p (SSA_NAME_VAR (name)))
4654 tree ops[2];
4655 ops[0] = name;
4656 ops[1] = build_int_cst (TREE_TYPE (name), 0);
4657 vn_nary_op_insert_pieces (2, NE_EXPR, boolean_type_node, ops,
4658 boolean_true_node, 0);
4659 if (dump_file && (dump_flags & TDF_DETAILS))
4661 fprintf (dump_file, "Recording ");
4662 print_generic_expr (dump_file, name, TDF_SLIM);
4663 fprintf (dump_file, " != 0\n");
4666 break;
4668 case RESULT_DECL:
4669 /* If the result is passed by invisible reference the default
4670 def is initialized, otherwise it's uninitialized. Still
4671 undefined is varying. */
4672 VN_INFO (name)->visited = true;
4673 VN_INFO (name)->valnum = name;
4674 break;
4676 default:
4677 gcc_unreachable ();
4682 /* Restore SSA info that has been reset on value leaders. */
4684 void
4685 scc_vn_restore_ssa_info (void)
4687 unsigned i;
4688 tree name;
4690 FOR_EACH_SSA_NAME (i, name, cfun)
4692 if (has_VN_INFO (name))
4694 if (VN_INFO (name)->needs_insertion)
4696 else if (POINTER_TYPE_P (TREE_TYPE (name))
4697 && VN_INFO (name)->info.ptr_info)
4698 SSA_NAME_PTR_INFO (name) = VN_INFO (name)->info.ptr_info;
4699 else if (INTEGRAL_TYPE_P (TREE_TYPE (name))
4700 && VN_INFO (name)->info.range_info)
4702 SSA_NAME_RANGE_INFO (name) = VN_INFO (name)->info.range_info;
4703 SSA_NAME_ANTI_RANGE_P (name)
4704 = VN_INFO (name)->range_info_anti_range_p;
4710 void
4711 free_scc_vn (void)
4713 size_t i;
4714 tree name;
4716 delete constant_to_value_id;
4717 constant_to_value_id = NULL;
4718 BITMAP_FREE (constant_value_ids);
4719 shared_lookup_references.release ();
4720 XDELETEVEC (rpo_numbers);
4722 FOR_EACH_SSA_NAME (i, name, cfun)
4724 if (has_VN_INFO (name)
4725 && VN_INFO (name)->needs_insertion)
4726 release_ssa_name (name);
4728 obstack_free (&vn_ssa_aux_obstack, NULL);
4729 vn_ssa_aux_table.release ();
4731 sccstack.release ();
4732 free_vn_table (valid_info);
4733 XDELETE (valid_info);
4734 obstack_free (&vn_tables_obstack, NULL);
4735 obstack_free (&vn_tables_insert_obstack, NULL);
4737 BITMAP_FREE (const_parms);
4740 /* Set *ID according to RESULT. */
4742 static void
4743 set_value_id_for_result (tree result, unsigned int *id)
4745 if (result && TREE_CODE (result) == SSA_NAME)
4746 *id = VN_INFO (result)->value_id;
4747 else if (result && is_gimple_min_invariant (result))
4748 *id = get_or_alloc_constant_value_id (result);
4749 else
4750 *id = get_next_value_id ();
4753 /* Set the value ids in the valid hash tables. */
4755 static void
4756 set_hashtable_value_ids (void)
4758 vn_nary_op_iterator_type hin;
4759 vn_phi_iterator_type hip;
4760 vn_reference_iterator_type hir;
4761 vn_nary_op_t vno;
4762 vn_reference_t vr;
4763 vn_phi_t vp;
4765 /* Now set the value ids of the things we had put in the hash
4766 table. */
4768 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4769 set_value_id_for_result (vno->result, &vno->value_id);
4771 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4772 set_value_id_for_result (vp->result, &vp->value_id);
4774 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4775 hir)
4776 set_value_id_for_result (vr->result, &vr->value_id);
4779 class sccvn_dom_walker : public dom_walker
4781 public:
4782 sccvn_dom_walker ()
4783 : dom_walker (CDI_DOMINATORS, REACHABLE_BLOCKS), cond_stack (0) {}
4785 virtual edge before_dom_children (basic_block);
4786 virtual void after_dom_children (basic_block);
4788 void record_cond (basic_block,
4789 enum tree_code code, tree lhs, tree rhs, bool value);
4790 void record_conds (basic_block,
4791 enum tree_code code, tree lhs, tree rhs, bool value);
4793 auto_vec<std::pair <basic_block, std::pair <vn_nary_op_t, vn_nary_op_t> > >
4794 cond_stack;
4797 /* Record a temporary condition for the BB and its dominated blocks. */
4799 void
4800 sccvn_dom_walker::record_cond (basic_block bb,
4801 enum tree_code code, tree lhs, tree rhs,
4802 bool value)
4804 tree ops[2] = { lhs, rhs };
4805 vn_nary_op_t old = NULL;
4806 if (vn_nary_op_lookup_pieces (2, code, boolean_type_node, ops, &old))
4807 valid_info->nary->remove_elt_with_hash (old, old->hashcode);
4808 vn_nary_op_t cond
4809 = vn_nary_op_insert_pieces (2, code, boolean_type_node, ops,
4810 value
4811 ? boolean_true_node
4812 : boolean_false_node, 0);
4813 if (dump_file && (dump_flags & TDF_DETAILS))
4815 fprintf (dump_file, "Recording temporarily ");
4816 print_generic_expr (dump_file, ops[0], TDF_SLIM);
4817 fprintf (dump_file, " %s ", get_tree_code_name (code));
4818 print_generic_expr (dump_file, ops[1], TDF_SLIM);
4819 fprintf (dump_file, " == %s%s\n",
4820 value ? "true" : "false",
4821 old ? " (old entry saved)" : "");
4823 cond_stack.safe_push (std::make_pair (bb, std::make_pair (cond, old)));
4826 /* Record temporary conditions for the BB and its dominated blocks
4827 according to LHS CODE RHS == VALUE and its dominated conditions. */
4829 void
4830 sccvn_dom_walker::record_conds (basic_block bb,
4831 enum tree_code code, tree lhs, tree rhs,
4832 bool value)
4834 /* Record the original condition. */
4835 record_cond (bb, code, lhs, rhs, value);
4837 if (!value)
4838 return;
4840 /* Record dominated conditions if the condition is true. Note that
4841 the inversion is already recorded. */
4842 switch (code)
4844 case LT_EXPR:
4845 case GT_EXPR:
4846 record_cond (bb, code == LT_EXPR ? LE_EXPR : GE_EXPR, lhs, rhs, true);
4847 record_cond (bb, NE_EXPR, lhs, rhs, true);
4848 record_cond (bb, EQ_EXPR, lhs, rhs, false);
4849 break;
4851 case EQ_EXPR:
4852 record_cond (bb, LE_EXPR, lhs, rhs, true);
4853 record_cond (bb, GE_EXPR, lhs, rhs, true);
4854 record_cond (bb, LT_EXPR, lhs, rhs, false);
4855 record_cond (bb, GT_EXPR, lhs, rhs, false);
4856 break;
4858 default:
4859 break;
4863 /* Restore expressions and values derived from conditionals. */
4865 void
4866 sccvn_dom_walker::after_dom_children (basic_block bb)
4868 while (!cond_stack.is_empty ()
4869 && cond_stack.last ().first == bb)
4871 vn_nary_op_t cond = cond_stack.last ().second.first;
4872 vn_nary_op_t old = cond_stack.last ().second.second;
4873 valid_info->nary->remove_elt_with_hash (cond, cond->hashcode);
4874 if (old)
4875 vn_nary_op_insert_into (old, valid_info->nary, false);
4876 cond_stack.pop ();
4880 /* Value number all statements in BB. */
4882 edge
4883 sccvn_dom_walker::before_dom_children (basic_block bb)
4885 if (dump_file && (dump_flags & TDF_DETAILS))
4886 fprintf (dump_file, "Visiting BB %d\n", bb->index);
4888 /* If we have a single predecessor record the equivalence from a
4889 possible condition on the predecessor edge. */
4890 edge pred_e = single_pred_edge_ignoring_loop_edges (bb, false);
4891 if (pred_e)
4893 /* Check if there are multiple executable successor edges in
4894 the source block. Otherwise there is no additional info
4895 to be recorded. */
4896 edge_iterator ei;
4897 edge e2;
4898 FOR_EACH_EDGE (e2, ei, pred_e->src->succs)
4899 if (e2 != pred_e
4900 && e2->flags & EDGE_EXECUTABLE)
4901 break;
4902 if (e2 && (e2->flags & EDGE_EXECUTABLE))
4904 gimple *stmt = last_stmt (pred_e->src);
4905 if (stmt
4906 && gimple_code (stmt) == GIMPLE_COND)
4908 enum tree_code code = gimple_cond_code (stmt);
4909 tree lhs = gimple_cond_lhs (stmt);
4910 tree rhs = gimple_cond_rhs (stmt);
4911 record_conds (bb, code, lhs, rhs,
4912 (pred_e->flags & EDGE_TRUE_VALUE) != 0);
4913 code = invert_tree_comparison (code, HONOR_NANS (lhs));
4914 if (code != ERROR_MARK)
4915 record_conds (bb, code, lhs, rhs,
4916 (pred_e->flags & EDGE_TRUE_VALUE) == 0);
4921 /* Value-number all defs in the basic-block. */
4922 for (gphi_iterator gsi = gsi_start_phis (bb);
4923 !gsi_end_p (gsi); gsi_next (&gsi))
4925 gphi *phi = gsi.phi ();
4926 tree res = PHI_RESULT (phi);
4927 if (!VN_INFO (res)->visited)
4928 DFS (res);
4930 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
4931 !gsi_end_p (gsi); gsi_next (&gsi))
4933 ssa_op_iter i;
4934 tree op;
4935 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
4936 if (!VN_INFO (op)->visited)
4937 DFS (op);
4940 /* Finally look at the last stmt. */
4941 gimple *stmt = last_stmt (bb);
4942 if (!stmt)
4943 return NULL;
4945 enum gimple_code code = gimple_code (stmt);
4946 if (code != GIMPLE_COND
4947 && code != GIMPLE_SWITCH
4948 && code != GIMPLE_GOTO)
4949 return NULL;
4951 if (dump_file && (dump_flags & TDF_DETAILS))
4953 fprintf (dump_file, "Visiting control stmt ending BB %d: ", bb->index);
4954 print_gimple_stmt (dump_file, stmt, 0);
4957 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
4958 if value-numbering can prove they are not reachable. Handling
4959 computed gotos is also possible. */
4960 tree val;
4961 switch (code)
4963 case GIMPLE_COND:
4965 tree lhs = vn_valueize (gimple_cond_lhs (stmt));
4966 tree rhs = vn_valueize (gimple_cond_rhs (stmt));
4967 val = gimple_simplify (gimple_cond_code (stmt),
4968 boolean_type_node, lhs, rhs,
4969 NULL, vn_valueize);
4970 /* If that didn't simplify to a constant see if we have recorded
4971 temporary expressions from taken edges. */
4972 if (!val || TREE_CODE (val) != INTEGER_CST)
4974 tree ops[2];
4975 ops[0] = lhs;
4976 ops[1] = rhs;
4977 val = vn_nary_op_lookup_pieces (2, gimple_cond_code (stmt),
4978 boolean_type_node, ops, NULL);
4980 break;
4982 case GIMPLE_SWITCH:
4983 val = gimple_switch_index (as_a <gswitch *> (stmt));
4984 break;
4985 case GIMPLE_GOTO:
4986 val = gimple_goto_dest (stmt);
4987 break;
4988 default:
4989 gcc_unreachable ();
4991 if (!val)
4992 return NULL;
4994 edge taken = find_taken_edge (bb, vn_valueize (val));
4995 if (!taken)
4996 return NULL;
4998 if (dump_file && (dump_flags & TDF_DETAILS))
4999 fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
5000 "not executable\n", bb->index, bb->index, taken->dest->index);
5002 return taken;
5005 /* Do SCCVN. Returns true if it finished, false if we bailed out
5006 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
5007 how we use the alias oracle walking during the VN process. */
5009 void
5010 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
5012 size_t i;
5014 default_vn_walk_kind = default_vn_walk_kind_;
5016 init_scc_vn ();
5018 /* Collect pointers we know point to readonly memory. */
5019 const_parms = BITMAP_ALLOC (NULL);
5020 tree fnspec = lookup_attribute ("fn spec",
5021 TYPE_ATTRIBUTES (TREE_TYPE (cfun->decl)));
5022 if (fnspec)
5024 fnspec = TREE_VALUE (TREE_VALUE (fnspec));
5025 i = 1;
5026 for (tree arg = DECL_ARGUMENTS (cfun->decl);
5027 arg; arg = DECL_CHAIN (arg), ++i)
5029 if (i >= (unsigned) TREE_STRING_LENGTH (fnspec))
5030 break;
5031 if (TREE_STRING_POINTER (fnspec)[i] == 'R'
5032 || TREE_STRING_POINTER (fnspec)[i] == 'r')
5034 tree name = ssa_default_def (cfun, arg);
5035 if (name)
5036 bitmap_set_bit (const_parms, SSA_NAME_VERSION (name));
5041 /* Walk all blocks in dominator order, value-numbering stmts
5042 SSA defs and decide whether outgoing edges are not executable. */
5043 sccvn_dom_walker walker;
5044 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5046 /* Initialize the value ids and prune out remaining VN_TOPs
5047 from dead code. */
5048 tree name;
5049 FOR_EACH_SSA_NAME (i, name, cfun)
5051 vn_ssa_aux_t info = VN_INFO (name);
5052 if (!info->visited
5053 || info->valnum == VN_TOP)
5054 info->valnum = name;
5055 if (info->valnum == name)
5056 info->value_id = get_next_value_id ();
5057 else if (is_gimple_min_invariant (info->valnum))
5058 info->value_id = get_or_alloc_constant_value_id (info->valnum);
5061 /* Propagate. */
5062 FOR_EACH_SSA_NAME (i, name, cfun)
5064 vn_ssa_aux_t info = VN_INFO (name);
5065 if (TREE_CODE (info->valnum) == SSA_NAME
5066 && info->valnum != name
5067 && info->value_id != VN_INFO (info->valnum)->value_id)
5068 info->value_id = VN_INFO (info->valnum)->value_id;
5071 set_hashtable_value_ids ();
5073 if (dump_file && (dump_flags & TDF_DETAILS))
5075 fprintf (dump_file, "Value numbers:\n");
5076 FOR_EACH_SSA_NAME (i, name, cfun)
5078 if (VN_INFO (name)->visited
5079 && SSA_VAL (name) != name)
5081 print_generic_expr (dump_file, name);
5082 fprintf (dump_file, " = ");
5083 print_generic_expr (dump_file, SSA_VAL (name));
5084 fprintf (dump_file, "\n");
5090 /* Return the maximum value id we have ever seen. */
5092 unsigned int
5093 get_max_value_id (void)
5095 return next_value_id;
5098 /* Return the next unique value id. */
5100 unsigned int
5101 get_next_value_id (void)
5103 return next_value_id++;
5107 /* Compare two expressions E1 and E2 and return true if they are equal. */
5109 bool
5110 expressions_equal_p (tree e1, tree e2)
5112 /* The obvious case. */
5113 if (e1 == e2)
5114 return true;
5116 /* If either one is VN_TOP consider them equal. */
5117 if (e1 == VN_TOP || e2 == VN_TOP)
5118 return true;
5120 /* If only one of them is null, they cannot be equal. */
5121 if (!e1 || !e2)
5122 return false;
5124 /* Now perform the actual comparison. */
5125 if (TREE_CODE (e1) == TREE_CODE (e2)
5126 && operand_equal_p (e1, e2, OEP_PURE_SAME))
5127 return true;
5129 return false;
5133 /* Return true if the nary operation NARY may trap. This is a copy
5134 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
5136 bool
5137 vn_nary_may_trap (vn_nary_op_t nary)
5139 tree type;
5140 tree rhs2 = NULL_TREE;
5141 bool honor_nans = false;
5142 bool honor_snans = false;
5143 bool fp_operation = false;
5144 bool honor_trapv = false;
5145 bool handled, ret;
5146 unsigned i;
5148 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
5149 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
5150 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
5152 type = nary->type;
5153 fp_operation = FLOAT_TYPE_P (type);
5154 if (fp_operation)
5156 honor_nans = flag_trapping_math && !flag_finite_math_only;
5157 honor_snans = flag_signaling_nans != 0;
5159 else if (INTEGRAL_TYPE_P (type)
5160 && TYPE_OVERFLOW_TRAPS (type))
5161 honor_trapv = true;
5163 if (nary->length >= 2)
5164 rhs2 = nary->op[1];
5165 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
5166 honor_trapv,
5167 honor_nans, honor_snans, rhs2,
5168 &handled);
5169 if (handled
5170 && ret)
5171 return true;
5173 for (i = 0; i < nary->length; ++i)
5174 if (tree_could_trap_p (nary->op[i]))
5175 return true;
5177 return false;
5181 class eliminate_dom_walker : public dom_walker
5183 public:
5184 eliminate_dom_walker (cdi_direction, bitmap);
5185 ~eliminate_dom_walker ();
5187 virtual edge before_dom_children (basic_block);
5188 virtual void after_dom_children (basic_block);
5190 tree eliminate_avail (tree op);
5191 void eliminate_push_avail (tree op);
5192 tree eliminate_insert (gimple_stmt_iterator *gsi, tree val);
5194 bool do_pre;
5195 unsigned int el_todo;
5196 unsigned int eliminations;
5197 unsigned int insertions;
5199 /* SSA names that had their defs inserted by PRE if do_pre. */
5200 bitmap inserted_exprs;
5202 /* Blocks with statements that have had their EH properties changed. */
5203 bitmap need_eh_cleanup;
5205 /* Blocks with statements that have had their AB properties changed. */
5206 bitmap need_ab_cleanup;
5208 auto_vec<gimple *> to_remove;
5209 auto_vec<gimple *> to_fixup;
5210 auto_vec<tree> avail;
5211 auto_vec<tree> avail_stack;
5214 eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction,
5215 bitmap inserted_exprs_)
5216 : dom_walker (direction), do_pre (inserted_exprs_ != NULL),
5217 el_todo (0), eliminations (0), insertions (0),
5218 inserted_exprs (inserted_exprs_)
5220 need_eh_cleanup = BITMAP_ALLOC (NULL);
5221 need_ab_cleanup = BITMAP_ALLOC (NULL);
5224 eliminate_dom_walker::~eliminate_dom_walker ()
5226 BITMAP_FREE (need_eh_cleanup);
5227 BITMAP_FREE (need_ab_cleanup);
5230 /* Return a leader for OP that is available at the current point of the
5231 eliminate domwalk. */
5233 tree
5234 eliminate_dom_walker::eliminate_avail (tree op)
5236 tree valnum = VN_INFO (op)->valnum;
5237 if (TREE_CODE (valnum) == SSA_NAME)
5239 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
5240 return valnum;
5241 if (avail.length () > SSA_NAME_VERSION (valnum))
5242 return avail[SSA_NAME_VERSION (valnum)];
5244 else if (is_gimple_min_invariant (valnum))
5245 return valnum;
5246 return NULL_TREE;
5249 /* At the current point of the eliminate domwalk make OP available. */
5251 void
5252 eliminate_dom_walker::eliminate_push_avail (tree op)
5254 tree valnum = VN_INFO (op)->valnum;
5255 if (TREE_CODE (valnum) == SSA_NAME)
5257 if (avail.length () <= SSA_NAME_VERSION (valnum))
5258 avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1);
5259 tree pushop = op;
5260 if (avail[SSA_NAME_VERSION (valnum)])
5261 pushop = avail[SSA_NAME_VERSION (valnum)];
5262 avail_stack.safe_push (pushop);
5263 avail[SSA_NAME_VERSION (valnum)] = op;
5267 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
5268 the leader for the expression if insertion was successful. */
5270 tree
5271 eliminate_dom_walker::eliminate_insert (gimple_stmt_iterator *gsi, tree val)
5273 /* We can insert a sequence with a single assignment only. */
5274 gimple_seq stmts = VN_INFO (val)->expr;
5275 if (!gimple_seq_singleton_p (stmts))
5276 return NULL_TREE;
5277 gassign *stmt = dyn_cast <gassign *> (gimple_seq_first_stmt (stmts));
5278 if (!stmt
5279 || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
5280 && gimple_assign_rhs_code (stmt) != VIEW_CONVERT_EXPR
5281 && gimple_assign_rhs_code (stmt) != BIT_FIELD_REF
5282 && (gimple_assign_rhs_code (stmt) != BIT_AND_EXPR
5283 || TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)))
5284 return NULL_TREE;
5286 tree op = gimple_assign_rhs1 (stmt);
5287 if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR
5288 || gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5289 op = TREE_OPERAND (op, 0);
5290 tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (op) : op;
5291 if (!leader)
5292 return NULL_TREE;
5294 tree res;
5295 stmts = NULL;
5296 if (gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5297 res = gimple_build (&stmts, BIT_FIELD_REF,
5298 TREE_TYPE (val), leader,
5299 TREE_OPERAND (gimple_assign_rhs1 (stmt), 1),
5300 TREE_OPERAND (gimple_assign_rhs1 (stmt), 2));
5301 else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
5302 res = gimple_build (&stmts, BIT_AND_EXPR,
5303 TREE_TYPE (val), leader, gimple_assign_rhs2 (stmt));
5304 else
5305 res = gimple_build (&stmts, gimple_assign_rhs_code (stmt),
5306 TREE_TYPE (val), leader);
5307 if (TREE_CODE (res) != SSA_NAME
5308 || SSA_NAME_IS_DEFAULT_DEF (res)
5309 || gimple_bb (SSA_NAME_DEF_STMT (res)))
5311 gimple_seq_discard (stmts);
5313 /* During propagation we have to treat SSA info conservatively
5314 and thus we can end up simplifying the inserted expression
5315 at elimination time to sth not defined in stmts. */
5316 /* But then this is a redundancy we failed to detect. Which means
5317 res now has two values. That doesn't play well with how
5318 we track availability here, so give up. */
5319 if (dump_file && (dump_flags & TDF_DETAILS))
5321 if (TREE_CODE (res) == SSA_NAME)
5322 res = eliminate_avail (res);
5323 if (res)
5325 fprintf (dump_file, "Failed to insert expression for value ");
5326 print_generic_expr (dump_file, val);
5327 fprintf (dump_file, " which is really fully redundant to ");
5328 print_generic_expr (dump_file, res);
5329 fprintf (dump_file, "\n");
5333 return NULL_TREE;
5335 else
5337 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
5338 VN_INFO_GET (res)->valnum = val;
5341 insertions++;
5342 if (dump_file && (dump_flags & TDF_DETAILS))
5344 fprintf (dump_file, "Inserted ");
5345 print_gimple_stmt (dump_file, SSA_NAME_DEF_STMT (res), 0);
5348 return res;
5353 /* Perform elimination for the basic-block B during the domwalk. */
5355 edge
5356 eliminate_dom_walker::before_dom_children (basic_block b)
5358 /* Mark new bb. */
5359 avail_stack.safe_push (NULL_TREE);
5361 /* Skip unreachable blocks marked unreachable during the SCCVN domwalk. */
5362 edge_iterator ei;
5363 edge e;
5364 FOR_EACH_EDGE (e, ei, b->preds)
5365 if (e->flags & EDGE_EXECUTABLE)
5366 break;
5367 if (! e)
5368 return NULL;
5370 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
5372 gphi *phi = gsi.phi ();
5373 tree res = PHI_RESULT (phi);
5375 if (virtual_operand_p (res))
5377 gsi_next (&gsi);
5378 continue;
5381 tree sprime = eliminate_avail (res);
5382 if (sprime
5383 && sprime != res)
5385 if (dump_file && (dump_flags & TDF_DETAILS))
5387 fprintf (dump_file, "Replaced redundant PHI node defining ");
5388 print_generic_expr (dump_file, res);
5389 fprintf (dump_file, " with ");
5390 print_generic_expr (dump_file, sprime);
5391 fprintf (dump_file, "\n");
5394 /* If we inserted this PHI node ourself, it's not an elimination. */
5395 if (! inserted_exprs
5396 || ! bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
5397 eliminations++;
5399 /* If we will propagate into all uses don't bother to do
5400 anything. */
5401 if (may_propagate_copy (res, sprime))
5403 /* Mark the PHI for removal. */
5404 to_remove.safe_push (phi);
5405 gsi_next (&gsi);
5406 continue;
5409 remove_phi_node (&gsi, false);
5411 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
5412 sprime = fold_convert (TREE_TYPE (res), sprime);
5413 gimple *stmt = gimple_build_assign (res, sprime);
5414 gimple_stmt_iterator gsi2 = gsi_after_labels (b);
5415 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
5416 continue;
5419 eliminate_push_avail (res);
5420 gsi_next (&gsi);
5423 for (gimple_stmt_iterator gsi = gsi_start_bb (b);
5424 !gsi_end_p (gsi);
5425 gsi_next (&gsi))
5427 tree sprime = NULL_TREE;
5428 gimple *stmt = gsi_stmt (gsi);
5429 tree lhs = gimple_get_lhs (stmt);
5430 if (lhs && TREE_CODE (lhs) == SSA_NAME
5431 && !gimple_has_volatile_ops (stmt)
5432 /* See PR43491. Do not replace a global register variable when
5433 it is a the RHS of an assignment. Do replace local register
5434 variables since gcc does not guarantee a local variable will
5435 be allocated in register.
5436 ??? The fix isn't effective here. This should instead
5437 be ensured by not value-numbering them the same but treating
5438 them like volatiles? */
5439 && !(gimple_assign_single_p (stmt)
5440 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
5441 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))
5442 && is_global_var (gimple_assign_rhs1 (stmt)))))
5444 sprime = eliminate_avail (lhs);
5445 if (!sprime)
5447 /* If there is no existing usable leader but SCCVN thinks
5448 it has an expression it wants to use as replacement,
5449 insert that. */
5450 tree val = VN_INFO (lhs)->valnum;
5451 if (val != VN_TOP
5452 && TREE_CODE (val) == SSA_NAME
5453 && VN_INFO (val)->needs_insertion
5454 && VN_INFO (val)->expr != NULL
5455 && (sprime = eliminate_insert (&gsi, val)) != NULL_TREE)
5456 eliminate_push_avail (sprime);
5459 /* If this now constitutes a copy duplicate points-to
5460 and range info appropriately. This is especially
5461 important for inserted code. See tree-ssa-copy.c
5462 for similar code. */
5463 if (sprime
5464 && TREE_CODE (sprime) == SSA_NAME)
5466 basic_block sprime_b = gimple_bb (SSA_NAME_DEF_STMT (sprime));
5467 if (POINTER_TYPE_P (TREE_TYPE (lhs))
5468 && VN_INFO_PTR_INFO (lhs)
5469 && ! VN_INFO_PTR_INFO (sprime))
5471 duplicate_ssa_name_ptr_info (sprime,
5472 VN_INFO_PTR_INFO (lhs));
5473 if (b != sprime_b)
5474 mark_ptr_info_alignment_unknown
5475 (SSA_NAME_PTR_INFO (sprime));
5477 else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
5478 && VN_INFO_RANGE_INFO (lhs)
5479 && ! VN_INFO_RANGE_INFO (sprime)
5480 && b == sprime_b)
5481 duplicate_ssa_name_range_info (sprime,
5482 VN_INFO_RANGE_TYPE (lhs),
5483 VN_INFO_RANGE_INFO (lhs));
5486 /* Inhibit the use of an inserted PHI on a loop header when
5487 the address of the memory reference is a simple induction
5488 variable. In other cases the vectorizer won't do anything
5489 anyway (either it's loop invariant or a complicated
5490 expression). */
5491 if (sprime
5492 && TREE_CODE (sprime) == SSA_NAME
5493 && do_pre
5494 && (flag_tree_loop_vectorize || flag_tree_parallelize_loops > 1)
5495 && loop_outer (b->loop_father)
5496 && has_zero_uses (sprime)
5497 && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
5498 && gimple_assign_load_p (stmt))
5500 gimple *def_stmt = SSA_NAME_DEF_STMT (sprime);
5501 basic_block def_bb = gimple_bb (def_stmt);
5502 if (gimple_code (def_stmt) == GIMPLE_PHI
5503 && def_bb->loop_father->header == def_bb)
5505 loop_p loop = def_bb->loop_father;
5506 ssa_op_iter iter;
5507 tree op;
5508 bool found = false;
5509 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5511 affine_iv iv;
5512 def_bb = gimple_bb (SSA_NAME_DEF_STMT (op));
5513 if (def_bb
5514 && flow_bb_inside_loop_p (loop, def_bb)
5515 && simple_iv (loop, loop, op, &iv, true))
5517 found = true;
5518 break;
5521 if (found)
5523 if (dump_file && (dump_flags & TDF_DETAILS))
5525 fprintf (dump_file, "Not replacing ");
5526 print_gimple_expr (dump_file, stmt, 0);
5527 fprintf (dump_file, " with ");
5528 print_generic_expr (dump_file, sprime);
5529 fprintf (dump_file, " which would add a loop"
5530 " carried dependence to loop %d\n",
5531 loop->num);
5533 /* Don't keep sprime available. */
5534 sprime = NULL_TREE;
5539 if (sprime)
5541 /* If we can propagate the value computed for LHS into
5542 all uses don't bother doing anything with this stmt. */
5543 if (may_propagate_copy (lhs, sprime))
5545 /* Mark it for removal. */
5546 to_remove.safe_push (stmt);
5548 /* ??? Don't count copy/constant propagations. */
5549 if (gimple_assign_single_p (stmt)
5550 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5551 || gimple_assign_rhs1 (stmt) == sprime))
5552 continue;
5554 if (dump_file && (dump_flags & TDF_DETAILS))
5556 fprintf (dump_file, "Replaced ");
5557 print_gimple_expr (dump_file, stmt, 0);
5558 fprintf (dump_file, " with ");
5559 print_generic_expr (dump_file, sprime);
5560 fprintf (dump_file, " in all uses of ");
5561 print_gimple_stmt (dump_file, stmt, 0);
5564 eliminations++;
5565 continue;
5568 /* If this is an assignment from our leader (which
5569 happens in the case the value-number is a constant)
5570 then there is nothing to do. */
5571 if (gimple_assign_single_p (stmt)
5572 && sprime == gimple_assign_rhs1 (stmt))
5573 continue;
5575 /* Else replace its RHS. */
5576 bool can_make_abnormal_goto
5577 = is_gimple_call (stmt)
5578 && stmt_can_make_abnormal_goto (stmt);
5580 if (dump_file && (dump_flags & TDF_DETAILS))
5582 fprintf (dump_file, "Replaced ");
5583 print_gimple_expr (dump_file, stmt, 0);
5584 fprintf (dump_file, " with ");
5585 print_generic_expr (dump_file, sprime);
5586 fprintf (dump_file, " in ");
5587 print_gimple_stmt (dump_file, stmt, 0);
5590 eliminations++;
5591 gimple *orig_stmt = stmt;
5592 if (!useless_type_conversion_p (TREE_TYPE (lhs),
5593 TREE_TYPE (sprime)))
5594 sprime = fold_convert (TREE_TYPE (lhs), sprime);
5595 tree vdef = gimple_vdef (stmt);
5596 tree vuse = gimple_vuse (stmt);
5597 propagate_tree_value_into_stmt (&gsi, sprime);
5598 stmt = gsi_stmt (gsi);
5599 update_stmt (stmt);
5600 if (vdef != gimple_vdef (stmt))
5601 VN_INFO (vdef)->valnum = vuse;
5603 /* If we removed EH side-effects from the statement, clean
5604 its EH information. */
5605 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
5607 bitmap_set_bit (need_eh_cleanup,
5608 gimple_bb (stmt)->index);
5609 if (dump_file && (dump_flags & TDF_DETAILS))
5610 fprintf (dump_file, " Removed EH side-effects.\n");
5613 /* Likewise for AB side-effects. */
5614 if (can_make_abnormal_goto
5615 && !stmt_can_make_abnormal_goto (stmt))
5617 bitmap_set_bit (need_ab_cleanup,
5618 gimple_bb (stmt)->index);
5619 if (dump_file && (dump_flags & TDF_DETAILS))
5620 fprintf (dump_file, " Removed AB side-effects.\n");
5623 continue;
5627 /* If the statement is a scalar store, see if the expression
5628 has the same value number as its rhs. If so, the store is
5629 dead. */
5630 if (gimple_assign_single_p (stmt)
5631 && !gimple_has_volatile_ops (stmt)
5632 && !is_gimple_reg (gimple_assign_lhs (stmt))
5633 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5634 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
5636 tree val;
5637 tree rhs = gimple_assign_rhs1 (stmt);
5638 vn_reference_t vnresult;
5639 val = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_WALKREWRITE,
5640 &vnresult, false);
5641 if (TREE_CODE (rhs) == SSA_NAME)
5642 rhs = VN_INFO (rhs)->valnum;
5643 if (val
5644 && operand_equal_p (val, rhs, 0))
5646 /* We can only remove the later store if the former aliases
5647 at least all accesses the later one does or if the store
5648 was to readonly memory storing the same value. */
5649 alias_set_type set = get_alias_set (lhs);
5650 if (! vnresult
5651 || vnresult->set == set
5652 || alias_set_subset_of (set, vnresult->set))
5654 if (dump_file && (dump_flags & TDF_DETAILS))
5656 fprintf (dump_file, "Deleted redundant store ");
5657 print_gimple_stmt (dump_file, stmt, 0);
5660 /* Queue stmt for removal. */
5661 to_remove.safe_push (stmt);
5662 continue;
5667 /* If this is a control statement value numbering left edges
5668 unexecuted on force the condition in a way consistent with
5669 that. */
5670 if (gcond *cond = dyn_cast <gcond *> (stmt))
5672 if ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE)
5673 ^ (EDGE_SUCC (b, 1)->flags & EDGE_EXECUTABLE))
5675 if (dump_file && (dump_flags & TDF_DETAILS))
5677 fprintf (dump_file, "Removing unexecutable edge from ");
5678 print_gimple_stmt (dump_file, stmt, 0);
5680 if (((EDGE_SUCC (b, 0)->flags & EDGE_TRUE_VALUE) != 0)
5681 == ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE) != 0))
5682 gimple_cond_make_true (cond);
5683 else
5684 gimple_cond_make_false (cond);
5685 update_stmt (cond);
5686 el_todo |= TODO_cleanup_cfg;
5687 continue;
5691 bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
5692 bool was_noreturn = (is_gimple_call (stmt)
5693 && gimple_call_noreturn_p (stmt));
5694 tree vdef = gimple_vdef (stmt);
5695 tree vuse = gimple_vuse (stmt);
5697 /* If we didn't replace the whole stmt (or propagate the result
5698 into all uses), replace all uses on this stmt with their
5699 leaders. */
5700 bool modified = false;
5701 use_operand_p use_p;
5702 ssa_op_iter iter;
5703 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5705 tree use = USE_FROM_PTR (use_p);
5706 /* ??? The call code above leaves stmt operands un-updated. */
5707 if (TREE_CODE (use) != SSA_NAME)
5708 continue;
5709 tree sprime = eliminate_avail (use);
5710 if (sprime && sprime != use
5711 && may_propagate_copy (use, sprime)
5712 /* We substitute into debug stmts to avoid excessive
5713 debug temporaries created by removed stmts, but we need
5714 to avoid doing so for inserted sprimes as we never want
5715 to create debug temporaries for them. */
5716 && (!inserted_exprs
5717 || TREE_CODE (sprime) != SSA_NAME
5718 || !is_gimple_debug (stmt)
5719 || !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
5721 propagate_value (use_p, sprime);
5722 modified = true;
5726 /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
5727 into which is a requirement for the IPA devirt machinery. */
5728 gimple *old_stmt = stmt;
5729 if (modified)
5731 /* If a formerly non-invariant ADDR_EXPR is turned into an
5732 invariant one it was on a separate stmt. */
5733 if (gimple_assign_single_p (stmt)
5734 && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
5735 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
5736 gimple_stmt_iterator prev = gsi;
5737 gsi_prev (&prev);
5738 if (fold_stmt (&gsi))
5740 /* fold_stmt may have created new stmts inbetween
5741 the previous stmt and the folded stmt. Mark
5742 all defs created there as varying to not confuse
5743 the SCCVN machinery as we're using that even during
5744 elimination. */
5745 if (gsi_end_p (prev))
5746 prev = gsi_start_bb (b);
5747 else
5748 gsi_next (&prev);
5749 if (gsi_stmt (prev) != gsi_stmt (gsi))
5752 tree def;
5753 ssa_op_iter dit;
5754 FOR_EACH_SSA_TREE_OPERAND (def, gsi_stmt (prev),
5755 dit, SSA_OP_ALL_DEFS)
5756 /* As existing DEFs may move between stmts
5757 we have to guard VN_INFO_GET. */
5758 if (! has_VN_INFO (def))
5759 VN_INFO_GET (def)->valnum = def;
5760 if (gsi_stmt (prev) == gsi_stmt (gsi))
5761 break;
5762 gsi_next (&prev);
5764 while (1);
5766 stmt = gsi_stmt (gsi);
5767 /* In case we folded the stmt away schedule the NOP for removal. */
5768 if (gimple_nop_p (stmt))
5769 to_remove.safe_push (stmt);
5772 /* Visit indirect calls and turn them into direct calls if
5773 possible using the devirtualization machinery. Do this before
5774 checking for required EH/abnormal/noreturn cleanup as devird
5775 may expose more of those. */
5776 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
5778 tree fn = gimple_call_fn (call_stmt);
5779 if (fn
5780 && flag_devirtualize
5781 && virtual_method_call_p (fn))
5783 tree otr_type = obj_type_ref_class (fn);
5784 unsigned HOST_WIDE_INT otr_tok
5785 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn));
5786 tree instance;
5787 ipa_polymorphic_call_context context (current_function_decl,
5788 fn, stmt, &instance);
5789 context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn),
5790 otr_type, stmt);
5791 bool final;
5792 vec <cgraph_node *> targets
5793 = possible_polymorphic_call_targets (obj_type_ref_class (fn),
5794 otr_tok, context, &final);
5795 if (dump_file)
5796 dump_possible_polymorphic_call_targets (dump_file,
5797 obj_type_ref_class (fn),
5798 otr_tok, context);
5799 if (final && targets.length () <= 1 && dbg_cnt (devirt))
5801 tree fn;
5802 if (targets.length () == 1)
5803 fn = targets[0]->decl;
5804 else
5805 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5806 if (dump_enabled_p ())
5808 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
5809 "converting indirect call to "
5810 "function %s\n",
5811 lang_hooks.decl_printable_name (fn, 2));
5813 gimple_call_set_fndecl (call_stmt, fn);
5814 /* If changing the call to __builtin_unreachable
5815 or similar noreturn function, adjust gimple_call_fntype
5816 too. */
5817 if (gimple_call_noreturn_p (call_stmt)
5818 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
5819 && TYPE_ARG_TYPES (TREE_TYPE (fn))
5820 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn)))
5821 == void_type_node))
5822 gimple_call_set_fntype (call_stmt, TREE_TYPE (fn));
5823 maybe_remove_unused_call_args (cfun, call_stmt);
5824 modified = true;
5829 if (modified)
5831 /* When changing a call into a noreturn call, cfg cleanup
5832 is needed to fix up the noreturn call. */
5833 if (!was_noreturn
5834 && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
5835 to_fixup.safe_push (stmt);
5836 /* When changing a condition or switch into one we know what
5837 edge will be executed, schedule a cfg cleanup. */
5838 if ((gimple_code (stmt) == GIMPLE_COND
5839 && (gimple_cond_true_p (as_a <gcond *> (stmt))
5840 || gimple_cond_false_p (as_a <gcond *> (stmt))))
5841 || (gimple_code (stmt) == GIMPLE_SWITCH
5842 && TREE_CODE (gimple_switch_index
5843 (as_a <gswitch *> (stmt))) == INTEGER_CST))
5844 el_todo |= TODO_cleanup_cfg;
5845 /* If we removed EH side-effects from the statement, clean
5846 its EH information. */
5847 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
5849 bitmap_set_bit (need_eh_cleanup,
5850 gimple_bb (stmt)->index);
5851 if (dump_file && (dump_flags & TDF_DETAILS))
5852 fprintf (dump_file, " Removed EH side-effects.\n");
5854 /* Likewise for AB side-effects. */
5855 if (can_make_abnormal_goto
5856 && !stmt_can_make_abnormal_goto (stmt))
5858 bitmap_set_bit (need_ab_cleanup,
5859 gimple_bb (stmt)->index);
5860 if (dump_file && (dump_flags & TDF_DETAILS))
5861 fprintf (dump_file, " Removed AB side-effects.\n");
5863 update_stmt (stmt);
5864 if (vdef != gimple_vdef (stmt))
5865 VN_INFO (vdef)->valnum = vuse;
5868 /* Make new values available - for fully redundant LHS we
5869 continue with the next stmt above and skip this. */
5870 def_operand_p defp;
5871 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
5872 eliminate_push_avail (DEF_FROM_PTR (defp));
5875 /* Replace destination PHI arguments. */
5876 FOR_EACH_EDGE (e, ei, b->succs)
5877 if (e->flags & EDGE_EXECUTABLE)
5878 for (gphi_iterator gsi = gsi_start_phis (e->dest);
5879 !gsi_end_p (gsi);
5880 gsi_next (&gsi))
5882 gphi *phi = gsi.phi ();
5883 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
5884 tree arg = USE_FROM_PTR (use_p);
5885 if (TREE_CODE (arg) != SSA_NAME
5886 || virtual_operand_p (arg))
5887 continue;
5888 tree sprime = eliminate_avail (arg);
5889 if (sprime && may_propagate_copy (arg, sprime))
5890 propagate_value (use_p, sprime);
5892 return NULL;
5895 /* Make no longer available leaders no longer available. */
5897 void
5898 eliminate_dom_walker::after_dom_children (basic_block)
5900 tree entry;
5901 while ((entry = avail_stack.pop ()) != NULL_TREE)
5903 tree valnum = VN_INFO (entry)->valnum;
5904 tree old = avail[SSA_NAME_VERSION (valnum)];
5905 if (old == entry)
5906 avail[SSA_NAME_VERSION (valnum)] = NULL_TREE;
5907 else
5908 avail[SSA_NAME_VERSION (valnum)] = entry;
5912 /* Eliminate fully redundant computations. */
5914 unsigned int
5915 vn_eliminate (bitmap inserted_exprs)
5917 eliminate_dom_walker el (CDI_DOMINATORS, inserted_exprs);
5918 el.avail.reserve (num_ssa_names);
5920 el.walk (cfun->cfg->x_entry_block_ptr);
5922 /* We cannot remove stmts during BB walk, especially not release SSA
5923 names there as this confuses the VN machinery. The stmts ending
5924 up in to_remove are either stores or simple copies.
5925 Remove stmts in reverse order to make debug stmt creation possible. */
5926 while (!el.to_remove.is_empty ())
5928 gimple *stmt = el.to_remove.pop ();
5930 if (dump_file && (dump_flags & TDF_DETAILS))
5932 fprintf (dump_file, "Removing dead stmt ");
5933 print_gimple_stmt (dump_file, stmt, 0, TDF_NONE);
5936 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5937 if (gimple_code (stmt) == GIMPLE_PHI)
5938 remove_phi_node (&gsi, true);
5939 else
5941 basic_block bb = gimple_bb (stmt);
5942 unlink_stmt_vdef (stmt);
5943 if (gsi_remove (&gsi, true))
5944 bitmap_set_bit (el.need_eh_cleanup, bb->index);
5945 if (is_gimple_call (stmt) && stmt_can_make_abnormal_goto (stmt))
5946 bitmap_set_bit (el.need_ab_cleanup, bb->index);
5947 release_defs (stmt);
5950 /* Removing a stmt may expose a forwarder block. */
5951 el.el_todo |= TODO_cleanup_cfg;
5954 /* Fixup stmts that became noreturn calls. This may require splitting
5955 blocks and thus isn't possible during the dominator walk. Do this
5956 in reverse order so we don't inadvertedly remove a stmt we want to
5957 fixup by visiting a dominating now noreturn call first. */
5958 while (!el.to_fixup.is_empty ())
5960 gimple *stmt = el.to_fixup.pop ();
5962 if (dump_file && (dump_flags & TDF_DETAILS))
5964 fprintf (dump_file, "Fixing up noreturn call ");
5965 print_gimple_stmt (dump_file, stmt, 0);
5968 if (fixup_noreturn_call (stmt))
5969 el.el_todo |= TODO_cleanup_cfg;
5972 bool do_eh_cleanup = !bitmap_empty_p (el.need_eh_cleanup);
5973 bool do_ab_cleanup = !bitmap_empty_p (el.need_ab_cleanup);
5975 if (do_eh_cleanup)
5976 gimple_purge_all_dead_eh_edges (el.need_eh_cleanup);
5978 if (do_ab_cleanup)
5979 gimple_purge_all_dead_abnormal_call_edges (el.need_ab_cleanup);
5981 if (do_eh_cleanup || do_ab_cleanup)
5982 el.el_todo |= TODO_cleanup_cfg;
5984 statistics_counter_event (cfun, "Eliminated", el.eliminations);
5985 statistics_counter_event (cfun, "Insertions", el.insertions);
5987 return el.el_todo;
5991 namespace {
5993 const pass_data pass_data_fre =
5995 GIMPLE_PASS, /* type */
5996 "fre", /* name */
5997 OPTGROUP_NONE, /* optinfo_flags */
5998 TV_TREE_FRE, /* tv_id */
5999 ( PROP_cfg | PROP_ssa ), /* properties_required */
6000 0, /* properties_provided */
6001 0, /* properties_destroyed */
6002 0, /* todo_flags_start */
6003 0, /* todo_flags_finish */
6006 class pass_fre : public gimple_opt_pass
6008 public:
6009 pass_fre (gcc::context *ctxt)
6010 : gimple_opt_pass (pass_data_fre, ctxt)
6013 /* opt_pass methods: */
6014 opt_pass * clone () { return new pass_fre (m_ctxt); }
6015 virtual bool gate (function *) { return flag_tree_fre != 0; }
6016 virtual unsigned int execute (function *);
6018 }; // class pass_fre
6020 unsigned int
6021 pass_fre::execute (function *)
6023 unsigned int todo = 0;
6025 run_scc_vn (VN_WALKREWRITE);
6027 /* Remove all the redundant expressions. */
6028 todo |= vn_eliminate (NULL);
6030 scc_vn_restore_ssa_info ();
6031 free_scc_vn ();
6033 return todo;
6036 } // anon namespace
6038 gimple_opt_pass *
6039 make_pass_fre (gcc::context *ctxt)
6041 return new pass_fre (ctxt);