[ARM] Add source mode to coprocessor pattern SETs
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blobbce247a1489b10ddcc2089c9b1f7c1150fad126f
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2017 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "insn-config.h"
32 #include "memmodel.h"
33 #include "emit-rtl.h"
34 #include "cgraph.h"
35 #include "gimple-pretty-print.h"
36 #include "alias.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "cfganal.h"
40 #include "tree-inline.h"
41 #include "internal-fn.h"
42 #include "gimple-fold.h"
43 #include "tree-eh.h"
44 #include "gimplify.h"
45 #include "flags.h"
46 #include "dojump.h"
47 #include "explow.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "stmt.h"
51 #include "expr.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "dumpfile.h"
55 #include "cfgloop.h"
56 #include "params.h"
57 #include "tree-ssa-propagate.h"
58 #include "tree-ssa-sccvn.h"
59 #include "tree-cfg.h"
60 #include "domwalk.h"
61 #include "gimple-iterator.h"
62 #include "gimple-match.h"
64 /* This algorithm is based on the SCC algorithm presented by Keith
65 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
66 (http://citeseer.ist.psu.edu/41805.html). In
67 straight line code, it is equivalent to a regular hash based value
68 numbering that is performed in reverse postorder.
70 For code with cycles, there are two alternatives, both of which
71 require keeping the hashtables separate from the actual list of
72 value numbers for SSA names.
74 1. Iterate value numbering in an RPO walk of the blocks, removing
75 all the entries from the hashtable after each iteration (but
76 keeping the SSA name->value number mapping between iterations).
77 Iterate until it does not change.
79 2. Perform value numbering as part of an SCC walk on the SSA graph,
80 iterating only the cycles in the SSA graph until they do not change
81 (using a separate, optimistic hashtable for value numbering the SCC
82 operands).
84 The second is not just faster in practice (because most SSA graph
85 cycles do not involve all the variables in the graph), it also has
86 some nice properties.
88 One of these nice properties is that when we pop an SCC off the
89 stack, we are guaranteed to have processed all the operands coming from
90 *outside of that SCC*, so we do not need to do anything special to
91 ensure they have value numbers.
93 Another nice property is that the SCC walk is done as part of a DFS
94 of the SSA graph, which makes it easy to perform combining and
95 simplifying operations at the same time.
97 The code below is deliberately written in a way that makes it easy
98 to separate the SCC walk from the other work it does.
100 In order to propagate constants through the code, we track which
101 expressions contain constants, and use those while folding. In
102 theory, we could also track expressions whose value numbers are
103 replaced, in case we end up folding based on expression
104 identities.
106 In order to value number memory, we assign value numbers to vuses.
107 This enables us to note that, for example, stores to the same
108 address of the same value from the same starting memory states are
109 equivalent.
110 TODO:
112 1. We can iterate only the changing portions of the SCC's, but
113 I have not seen an SCC big enough for this to be a win.
114 2. If you differentiate between phi nodes for loops and phi nodes
115 for if-then-else, you can properly consider phi nodes in different
116 blocks for equivalence.
117 3. We could value number vuses in more cases, particularly, whole
118 structure copies.
122 static tree *last_vuse_ptr;
123 static vn_lookup_kind vn_walk_kind;
124 static vn_lookup_kind default_vn_walk_kind;
125 bitmap const_parms;
127 /* vn_nary_op hashtable helpers. */
129 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
131 typedef vn_nary_op_s *compare_type;
132 static inline hashval_t hash (const vn_nary_op_s *);
133 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
136 /* Return the computed hashcode for nary operation P1. */
138 inline hashval_t
139 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
141 return vno1->hashcode;
144 /* Compare nary operations P1 and P2 and return true if they are
145 equivalent. */
147 inline bool
148 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
150 return vn_nary_op_eq (vno1, vno2);
153 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
154 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
157 /* vn_phi hashtable helpers. */
159 static int
160 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
162 struct vn_phi_hasher : pointer_hash <vn_phi_s>
164 static inline hashval_t hash (const vn_phi_s *);
165 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
166 static inline void remove (vn_phi_s *);
169 /* Return the computed hashcode for phi operation P1. */
171 inline hashval_t
172 vn_phi_hasher::hash (const vn_phi_s *vp1)
174 return vp1->hashcode;
177 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
179 inline bool
180 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
182 return vn_phi_eq (vp1, vp2);
185 /* Free a phi operation structure VP. */
187 inline void
188 vn_phi_hasher::remove (vn_phi_s *phi)
190 phi->phiargs.release ();
193 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
194 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
197 /* Compare two reference operands P1 and P2 for equality. Return true if
198 they are equal, and false otherwise. */
200 static int
201 vn_reference_op_eq (const void *p1, const void *p2)
203 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
204 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
206 return (vro1->opcode == vro2->opcode
207 /* We do not care for differences in type qualification. */
208 && (vro1->type == vro2->type
209 || (vro1->type && vro2->type
210 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
211 TYPE_MAIN_VARIANT (vro2->type))))
212 && expressions_equal_p (vro1->op0, vro2->op0)
213 && expressions_equal_p (vro1->op1, vro2->op1)
214 && expressions_equal_p (vro1->op2, vro2->op2));
217 /* Free a reference operation structure VP. */
219 static inline void
220 free_reference (vn_reference_s *vr)
222 vr->operands.release ();
226 /* vn_reference hashtable helpers. */
228 struct vn_reference_hasher : pointer_hash <vn_reference_s>
230 static inline hashval_t hash (const vn_reference_s *);
231 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
232 static inline void remove (vn_reference_s *);
235 /* Return the hashcode for a given reference operation P1. */
237 inline hashval_t
238 vn_reference_hasher::hash (const vn_reference_s *vr1)
240 return vr1->hashcode;
243 inline bool
244 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
246 return vn_reference_eq (v, c);
249 inline void
250 vn_reference_hasher::remove (vn_reference_s *v)
252 free_reference (v);
255 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
256 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
259 /* The set of hashtables and alloc_pool's for their items. */
261 typedef struct vn_tables_s
263 vn_nary_op_table_type *nary;
264 vn_phi_table_type *phis;
265 vn_reference_table_type *references;
266 struct obstack nary_obstack;
267 object_allocator<vn_phi_s> *phis_pool;
268 object_allocator<vn_reference_s> *references_pool;
269 } *vn_tables_t;
272 /* vn_constant hashtable helpers. */
274 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
276 static inline hashval_t hash (const vn_constant_s *);
277 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
280 /* Hash table hash function for vn_constant_t. */
282 inline hashval_t
283 vn_constant_hasher::hash (const vn_constant_s *vc1)
285 return vc1->hashcode;
288 /* Hash table equality function for vn_constant_t. */
290 inline bool
291 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
293 if (vc1->hashcode != vc2->hashcode)
294 return false;
296 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
299 static hash_table<vn_constant_hasher> *constant_to_value_id;
300 static bitmap constant_value_ids;
303 /* Valid hashtables storing information we have proven to be
304 correct. */
306 static vn_tables_t valid_info;
308 /* Optimistic hashtables storing information we are making assumptions about
309 during iterations. */
311 static vn_tables_t optimistic_info;
313 /* Pointer to the set of hashtables that is currently being used.
314 Should always point to either the optimistic_info, or the
315 valid_info. */
317 static vn_tables_t current_info;
320 /* Reverse post order index for each basic block. */
322 static int *rpo_numbers;
324 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
326 /* Return the SSA value of the VUSE x, supporting released VDEFs
327 during elimination which will value-number the VDEF to the
328 associated VUSE (but not substitute in the whole lattice). */
330 static inline tree
331 vuse_ssa_val (tree x)
333 if (!x)
334 return NULL_TREE;
338 x = SSA_VAL (x);
340 while (SSA_NAME_IN_FREE_LIST (x));
342 return x;
345 /* This represents the top of the VN lattice, which is the universal
346 value. */
348 tree VN_TOP;
350 /* Unique counter for our value ids. */
352 static unsigned int next_value_id;
354 /* Next DFS number and the stack for strongly connected component
355 detection. */
357 static unsigned int next_dfs_num;
358 static vec<tree> sccstack;
362 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
363 are allocated on an obstack for locality reasons, and to free them
364 without looping over the vec. */
366 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
367 static struct obstack vn_ssa_aux_obstack;
369 /* Return whether there is value numbering information for a given SSA name. */
371 bool
372 has_VN_INFO (tree name)
374 if (SSA_NAME_VERSION (name) < vn_ssa_aux_table.length ())
375 return vn_ssa_aux_table[SSA_NAME_VERSION (name)] != NULL;
376 return false;
379 /* Return the value numbering information for a given SSA name. */
381 vn_ssa_aux_t
382 VN_INFO (tree name)
384 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
385 gcc_checking_assert (res);
386 return res;
389 /* Set the value numbering info for a given SSA name to a given
390 value. */
392 static inline void
393 VN_INFO_SET (tree name, vn_ssa_aux_t value)
395 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
398 /* Initialize the value numbering info for a given SSA name.
399 This should be called just once for every SSA name. */
401 vn_ssa_aux_t
402 VN_INFO_GET (tree name)
404 vn_ssa_aux_t newinfo;
406 gcc_assert (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ()
407 || vn_ssa_aux_table[SSA_NAME_VERSION (name)] == NULL);
408 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
409 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
410 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
411 vn_ssa_aux_table.safe_grow_cleared (SSA_NAME_VERSION (name) + 1);
412 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
413 return newinfo;
417 /* Return the vn_kind the expression computed by the stmt should be
418 associated with. */
420 enum vn_kind
421 vn_get_stmt_kind (gimple *stmt)
423 switch (gimple_code (stmt))
425 case GIMPLE_CALL:
426 return VN_REFERENCE;
427 case GIMPLE_PHI:
428 return VN_PHI;
429 case GIMPLE_ASSIGN:
431 enum tree_code code = gimple_assign_rhs_code (stmt);
432 tree rhs1 = gimple_assign_rhs1 (stmt);
433 switch (get_gimple_rhs_class (code))
435 case GIMPLE_UNARY_RHS:
436 case GIMPLE_BINARY_RHS:
437 case GIMPLE_TERNARY_RHS:
438 return VN_NARY;
439 case GIMPLE_SINGLE_RHS:
440 switch (TREE_CODE_CLASS (code))
442 case tcc_reference:
443 /* VOP-less references can go through unary case. */
444 if ((code == REALPART_EXPR
445 || code == IMAGPART_EXPR
446 || code == VIEW_CONVERT_EXPR
447 || code == BIT_FIELD_REF)
448 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
449 return VN_NARY;
451 /* Fallthrough. */
452 case tcc_declaration:
453 return VN_REFERENCE;
455 case tcc_constant:
456 return VN_CONSTANT;
458 default:
459 if (code == ADDR_EXPR)
460 return (is_gimple_min_invariant (rhs1)
461 ? VN_CONSTANT : VN_REFERENCE);
462 else if (code == CONSTRUCTOR)
463 return VN_NARY;
464 return VN_NONE;
466 default:
467 return VN_NONE;
470 default:
471 return VN_NONE;
475 /* Lookup a value id for CONSTANT and return it. If it does not
476 exist returns 0. */
478 unsigned int
479 get_constant_value_id (tree constant)
481 vn_constant_s **slot;
482 struct vn_constant_s vc;
484 vc.hashcode = vn_hash_constant_with_type (constant);
485 vc.constant = constant;
486 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
487 if (slot)
488 return (*slot)->value_id;
489 return 0;
492 /* Lookup a value id for CONSTANT, and if it does not exist, create a
493 new one and return it. If it does exist, return it. */
495 unsigned int
496 get_or_alloc_constant_value_id (tree constant)
498 vn_constant_s **slot;
499 struct vn_constant_s vc;
500 vn_constant_t vcp;
502 vc.hashcode = vn_hash_constant_with_type (constant);
503 vc.constant = constant;
504 slot = constant_to_value_id->find_slot (&vc, INSERT);
505 if (*slot)
506 return (*slot)->value_id;
508 vcp = XNEW (struct vn_constant_s);
509 vcp->hashcode = vc.hashcode;
510 vcp->constant = constant;
511 vcp->value_id = get_next_value_id ();
512 *slot = vcp;
513 bitmap_set_bit (constant_value_ids, vcp->value_id);
514 return vcp->value_id;
517 /* Return true if V is a value id for a constant. */
519 bool
520 value_id_constant_p (unsigned int v)
522 return bitmap_bit_p (constant_value_ids, v);
525 /* Compute the hash for a reference operand VRO1. */
527 static void
528 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
530 hstate.add_int (vro1->opcode);
531 if (vro1->op0)
532 inchash::add_expr (vro1->op0, hstate);
533 if (vro1->op1)
534 inchash::add_expr (vro1->op1, hstate);
535 if (vro1->op2)
536 inchash::add_expr (vro1->op2, hstate);
539 /* Compute a hash for the reference operation VR1 and return it. */
541 static hashval_t
542 vn_reference_compute_hash (const vn_reference_t vr1)
544 inchash::hash hstate;
545 hashval_t result;
546 int i;
547 vn_reference_op_t vro;
548 HOST_WIDE_INT off = -1;
549 bool deref = false;
551 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
553 if (vro->opcode == MEM_REF)
554 deref = true;
555 else if (vro->opcode != ADDR_EXPR)
556 deref = false;
557 if (vro->off != -1)
559 if (off == -1)
560 off = 0;
561 off += vro->off;
563 else
565 if (off != -1
566 && off != 0)
567 hstate.add_int (off);
568 off = -1;
569 if (deref
570 && vro->opcode == ADDR_EXPR)
572 if (vro->op0)
574 tree op = TREE_OPERAND (vro->op0, 0);
575 hstate.add_int (TREE_CODE (op));
576 inchash::add_expr (op, hstate);
579 else
580 vn_reference_op_compute_hash (vro, hstate);
583 result = hstate.end ();
584 /* ??? We would ICE later if we hash instead of adding that in. */
585 if (vr1->vuse)
586 result += SSA_NAME_VERSION (vr1->vuse);
588 return result;
591 /* Return true if reference operations VR1 and VR2 are equivalent. This
592 means they have the same set of operands and vuses. */
594 bool
595 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
597 unsigned i, j;
599 /* Early out if this is not a hash collision. */
600 if (vr1->hashcode != vr2->hashcode)
601 return false;
603 /* The VOP needs to be the same. */
604 if (vr1->vuse != vr2->vuse)
605 return false;
607 /* If the operands are the same we are done. */
608 if (vr1->operands == vr2->operands)
609 return true;
611 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
612 return false;
614 if (INTEGRAL_TYPE_P (vr1->type)
615 && INTEGRAL_TYPE_P (vr2->type))
617 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
618 return false;
620 else if (INTEGRAL_TYPE_P (vr1->type)
621 && (TYPE_PRECISION (vr1->type)
622 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
623 return false;
624 else if (INTEGRAL_TYPE_P (vr2->type)
625 && (TYPE_PRECISION (vr2->type)
626 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
627 return false;
629 i = 0;
630 j = 0;
633 HOST_WIDE_INT off1 = 0, off2 = 0;
634 vn_reference_op_t vro1, vro2;
635 vn_reference_op_s tem1, tem2;
636 bool deref1 = false, deref2 = false;
637 for (; vr1->operands.iterate (i, &vro1); i++)
639 if (vro1->opcode == MEM_REF)
640 deref1 = true;
641 /* Do not look through a storage order barrier. */
642 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
643 return false;
644 if (vro1->off == -1)
645 break;
646 off1 += vro1->off;
648 for (; vr2->operands.iterate (j, &vro2); j++)
650 if (vro2->opcode == MEM_REF)
651 deref2 = true;
652 /* Do not look through a storage order barrier. */
653 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
654 return false;
655 if (vro2->off == -1)
656 break;
657 off2 += vro2->off;
659 if (off1 != off2)
660 return false;
661 if (deref1 && vro1->opcode == ADDR_EXPR)
663 memset (&tem1, 0, sizeof (tem1));
664 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
665 tem1.type = TREE_TYPE (tem1.op0);
666 tem1.opcode = TREE_CODE (tem1.op0);
667 vro1 = &tem1;
668 deref1 = false;
670 if (deref2 && vro2->opcode == ADDR_EXPR)
672 memset (&tem2, 0, sizeof (tem2));
673 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
674 tem2.type = TREE_TYPE (tem2.op0);
675 tem2.opcode = TREE_CODE (tem2.op0);
676 vro2 = &tem2;
677 deref2 = false;
679 if (deref1 != deref2)
680 return false;
681 if (!vn_reference_op_eq (vro1, vro2))
682 return false;
683 ++j;
684 ++i;
686 while (vr1->operands.length () != i
687 || vr2->operands.length () != j);
689 return true;
692 /* Copy the operations present in load/store REF into RESULT, a vector of
693 vn_reference_op_s's. */
695 static void
696 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
698 if (TREE_CODE (ref) == TARGET_MEM_REF)
700 vn_reference_op_s temp;
702 result->reserve (3);
704 memset (&temp, 0, sizeof (temp));
705 temp.type = TREE_TYPE (ref);
706 temp.opcode = TREE_CODE (ref);
707 temp.op0 = TMR_INDEX (ref);
708 temp.op1 = TMR_STEP (ref);
709 temp.op2 = TMR_OFFSET (ref);
710 temp.off = -1;
711 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
712 temp.base = MR_DEPENDENCE_BASE (ref);
713 result->quick_push (temp);
715 memset (&temp, 0, sizeof (temp));
716 temp.type = NULL_TREE;
717 temp.opcode = ERROR_MARK;
718 temp.op0 = TMR_INDEX2 (ref);
719 temp.off = -1;
720 result->quick_push (temp);
722 memset (&temp, 0, sizeof (temp));
723 temp.type = NULL_TREE;
724 temp.opcode = TREE_CODE (TMR_BASE (ref));
725 temp.op0 = TMR_BASE (ref);
726 temp.off = -1;
727 result->quick_push (temp);
728 return;
731 /* For non-calls, store the information that makes up the address. */
732 tree orig = ref;
733 while (ref)
735 vn_reference_op_s temp;
737 memset (&temp, 0, sizeof (temp));
738 temp.type = TREE_TYPE (ref);
739 temp.opcode = TREE_CODE (ref);
740 temp.off = -1;
742 switch (temp.opcode)
744 case MODIFY_EXPR:
745 temp.op0 = TREE_OPERAND (ref, 1);
746 break;
747 case WITH_SIZE_EXPR:
748 temp.op0 = TREE_OPERAND (ref, 1);
749 temp.off = 0;
750 break;
751 case MEM_REF:
752 /* The base address gets its own vn_reference_op_s structure. */
753 temp.op0 = TREE_OPERAND (ref, 1);
755 offset_int off = mem_ref_offset (ref);
756 if (wi::fits_shwi_p (off))
757 temp.off = off.to_shwi ();
759 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
760 temp.base = MR_DEPENDENCE_BASE (ref);
761 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
762 break;
763 case BIT_FIELD_REF:
764 /* Record bits, position and storage order. */
765 temp.op0 = TREE_OPERAND (ref, 1);
766 temp.op1 = TREE_OPERAND (ref, 2);
767 if (tree_fits_shwi_p (TREE_OPERAND (ref, 2)))
769 HOST_WIDE_INT off = tree_to_shwi (TREE_OPERAND (ref, 2));
770 if (off % BITS_PER_UNIT == 0)
771 temp.off = off / BITS_PER_UNIT;
773 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
774 break;
775 case COMPONENT_REF:
776 /* The field decl is enough to unambiguously specify the field,
777 a matching type is not necessary and a mismatching type
778 is always a spurious difference. */
779 temp.type = NULL_TREE;
780 temp.op0 = TREE_OPERAND (ref, 1);
781 temp.op1 = TREE_OPERAND (ref, 2);
783 tree this_offset = component_ref_field_offset (ref);
784 if (this_offset
785 && TREE_CODE (this_offset) == INTEGER_CST)
787 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
788 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
790 offset_int off
791 = (wi::to_offset (this_offset)
792 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
793 if (wi::fits_shwi_p (off)
794 /* Probibit value-numbering zero offset components
795 of addresses the same before the pass folding
796 __builtin_object_size had a chance to run
797 (checking cfun->after_inlining does the
798 trick here). */
799 && (TREE_CODE (orig) != ADDR_EXPR
800 || off != 0
801 || cfun->after_inlining))
802 temp.off = off.to_shwi ();
806 break;
807 case ARRAY_RANGE_REF:
808 case ARRAY_REF:
810 tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
811 /* Record index as operand. */
812 temp.op0 = TREE_OPERAND (ref, 1);
813 /* Always record lower bounds and element size. */
814 temp.op1 = array_ref_low_bound (ref);
815 /* But record element size in units of the type alignment. */
816 temp.op2 = TREE_OPERAND (ref, 3);
817 temp.align = eltype->type_common.align;
818 if (! temp.op2)
819 temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
820 size_int (TYPE_ALIGN_UNIT (eltype)));
821 if (TREE_CODE (temp.op0) == INTEGER_CST
822 && TREE_CODE (temp.op1) == INTEGER_CST
823 && TREE_CODE (temp.op2) == INTEGER_CST)
825 offset_int off = ((wi::to_offset (temp.op0)
826 - wi::to_offset (temp.op1))
827 * wi::to_offset (temp.op2)
828 * vn_ref_op_align_unit (&temp));
829 if (wi::fits_shwi_p (off))
830 temp.off = off.to_shwi();
833 break;
834 case VAR_DECL:
835 if (DECL_HARD_REGISTER (ref))
837 temp.op0 = ref;
838 break;
840 /* Fallthru. */
841 case PARM_DECL:
842 case CONST_DECL:
843 case RESULT_DECL:
844 /* Canonicalize decls to MEM[&decl] which is what we end up with
845 when valueizing MEM[ptr] with ptr = &decl. */
846 temp.opcode = MEM_REF;
847 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
848 temp.off = 0;
849 result->safe_push (temp);
850 temp.opcode = ADDR_EXPR;
851 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
852 temp.type = TREE_TYPE (temp.op0);
853 temp.off = -1;
854 break;
855 case STRING_CST:
856 case INTEGER_CST:
857 case COMPLEX_CST:
858 case VECTOR_CST:
859 case REAL_CST:
860 case FIXED_CST:
861 case CONSTRUCTOR:
862 case SSA_NAME:
863 temp.op0 = ref;
864 break;
865 case ADDR_EXPR:
866 if (is_gimple_min_invariant (ref))
868 temp.op0 = ref;
869 break;
871 break;
872 /* These are only interesting for their operands, their
873 existence, and their type. They will never be the last
874 ref in the chain of references (IE they require an
875 operand), so we don't have to put anything
876 for op* as it will be handled by the iteration */
877 case REALPART_EXPR:
878 temp.off = 0;
879 break;
880 case VIEW_CONVERT_EXPR:
881 temp.off = 0;
882 temp.reverse = storage_order_barrier_p (ref);
883 break;
884 case IMAGPART_EXPR:
885 /* This is only interesting for its constant offset. */
886 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
887 break;
888 default:
889 gcc_unreachable ();
891 result->safe_push (temp);
893 if (REFERENCE_CLASS_P (ref)
894 || TREE_CODE (ref) == MODIFY_EXPR
895 || TREE_CODE (ref) == WITH_SIZE_EXPR
896 || (TREE_CODE (ref) == ADDR_EXPR
897 && !is_gimple_min_invariant (ref)))
898 ref = TREE_OPERAND (ref, 0);
899 else
900 ref = NULL_TREE;
904 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
905 operands in *OPS, the reference alias set SET and the reference type TYPE.
906 Return true if something useful was produced. */
908 bool
909 ao_ref_init_from_vn_reference (ao_ref *ref,
910 alias_set_type set, tree type,
911 vec<vn_reference_op_s> ops)
913 vn_reference_op_t op;
914 unsigned i;
915 tree base = NULL_TREE;
916 tree *op0_p = &base;
917 offset_int offset = 0;
918 offset_int max_size;
919 offset_int size = -1;
920 tree size_tree = NULL_TREE;
921 alias_set_type base_alias_set = -1;
923 /* First get the final access size from just the outermost expression. */
924 op = &ops[0];
925 if (op->opcode == COMPONENT_REF)
926 size_tree = DECL_SIZE (op->op0);
927 else if (op->opcode == BIT_FIELD_REF)
928 size_tree = op->op0;
929 else
931 machine_mode mode = TYPE_MODE (type);
932 if (mode == BLKmode)
933 size_tree = TYPE_SIZE (type);
934 else
935 size = int (GET_MODE_BITSIZE (mode));
937 if (size_tree != NULL_TREE
938 && TREE_CODE (size_tree) == INTEGER_CST)
939 size = wi::to_offset (size_tree);
941 /* Initially, maxsize is the same as the accessed element size.
942 In the following it will only grow (or become -1). */
943 max_size = size;
945 /* Compute cumulative bit-offset for nested component-refs and array-refs,
946 and find the ultimate containing object. */
947 FOR_EACH_VEC_ELT (ops, i, op)
949 switch (op->opcode)
951 /* These may be in the reference ops, but we cannot do anything
952 sensible with them here. */
953 case ADDR_EXPR:
954 /* Apart from ADDR_EXPR arguments to MEM_REF. */
955 if (base != NULL_TREE
956 && TREE_CODE (base) == MEM_REF
957 && op->op0
958 && DECL_P (TREE_OPERAND (op->op0, 0)))
960 vn_reference_op_t pop = &ops[i-1];
961 base = TREE_OPERAND (op->op0, 0);
962 if (pop->off == -1)
964 max_size = -1;
965 offset = 0;
967 else
968 offset += pop->off * BITS_PER_UNIT;
969 op0_p = NULL;
970 break;
972 /* Fallthru. */
973 case CALL_EXPR:
974 return false;
976 /* Record the base objects. */
977 case MEM_REF:
978 base_alias_set = get_deref_alias_set (op->op0);
979 *op0_p = build2 (MEM_REF, op->type,
980 NULL_TREE, op->op0);
981 MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
982 MR_DEPENDENCE_BASE (*op0_p) = op->base;
983 op0_p = &TREE_OPERAND (*op0_p, 0);
984 break;
986 case VAR_DECL:
987 case PARM_DECL:
988 case RESULT_DECL:
989 case SSA_NAME:
990 *op0_p = op->op0;
991 op0_p = NULL;
992 break;
994 /* And now the usual component-reference style ops. */
995 case BIT_FIELD_REF:
996 offset += wi::to_offset (op->op1);
997 break;
999 case COMPONENT_REF:
1001 tree field = op->op0;
1002 /* We do not have a complete COMPONENT_REF tree here so we
1003 cannot use component_ref_field_offset. Do the interesting
1004 parts manually. */
1005 tree this_offset = DECL_FIELD_OFFSET (field);
1007 if (op->op1 || TREE_CODE (this_offset) != INTEGER_CST)
1008 max_size = -1;
1009 else
1011 offset_int woffset = (wi::to_offset (this_offset)
1012 << LOG2_BITS_PER_UNIT);
1013 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1014 offset += woffset;
1016 break;
1019 case ARRAY_RANGE_REF:
1020 case ARRAY_REF:
1021 /* We recorded the lower bound and the element size. */
1022 if (TREE_CODE (op->op0) != INTEGER_CST
1023 || TREE_CODE (op->op1) != INTEGER_CST
1024 || TREE_CODE (op->op2) != INTEGER_CST)
1025 max_size = -1;
1026 else
1028 offset_int woffset
1029 = wi::sext (wi::to_offset (op->op0) - wi::to_offset (op->op1),
1030 TYPE_PRECISION (TREE_TYPE (op->op0)));
1031 woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
1032 woffset <<= LOG2_BITS_PER_UNIT;
1033 offset += woffset;
1035 break;
1037 case REALPART_EXPR:
1038 break;
1040 case IMAGPART_EXPR:
1041 offset += size;
1042 break;
1044 case VIEW_CONVERT_EXPR:
1045 break;
1047 case STRING_CST:
1048 case INTEGER_CST:
1049 case COMPLEX_CST:
1050 case VECTOR_CST:
1051 case REAL_CST:
1052 case CONSTRUCTOR:
1053 case CONST_DECL:
1054 return false;
1056 default:
1057 return false;
1061 if (base == NULL_TREE)
1062 return false;
1064 ref->ref = NULL_TREE;
1065 ref->base = base;
1066 ref->ref_alias_set = set;
1067 if (base_alias_set != -1)
1068 ref->base_alias_set = base_alias_set;
1069 else
1070 ref->base_alias_set = get_alias_set (base);
1071 /* We discount volatiles from value-numbering elsewhere. */
1072 ref->volatile_p = false;
1074 if (!wi::fits_shwi_p (size) || wi::neg_p (size))
1076 ref->offset = 0;
1077 ref->size = -1;
1078 ref->max_size = -1;
1079 return true;
1082 ref->size = size.to_shwi ();
1084 if (!wi::fits_shwi_p (offset))
1086 ref->offset = 0;
1087 ref->max_size = -1;
1088 return true;
1091 ref->offset = offset.to_shwi ();
1093 if (!wi::fits_shwi_p (max_size) || wi::neg_p (max_size))
1094 ref->max_size = -1;
1095 else
1096 ref->max_size = max_size.to_shwi ();
1098 return true;
1101 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1102 vn_reference_op_s's. */
1104 static void
1105 copy_reference_ops_from_call (gcall *call,
1106 vec<vn_reference_op_s> *result)
1108 vn_reference_op_s temp;
1109 unsigned i;
1110 tree lhs = gimple_call_lhs (call);
1111 int lr;
1113 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1114 different. By adding the lhs here in the vector, we ensure that the
1115 hashcode is different, guaranteeing a different value number. */
1116 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1118 memset (&temp, 0, sizeof (temp));
1119 temp.opcode = MODIFY_EXPR;
1120 temp.type = TREE_TYPE (lhs);
1121 temp.op0 = lhs;
1122 temp.off = -1;
1123 result->safe_push (temp);
1126 /* Copy the type, opcode, function, static chain and EH region, if any. */
1127 memset (&temp, 0, sizeof (temp));
1128 temp.type = gimple_call_return_type (call);
1129 temp.opcode = CALL_EXPR;
1130 temp.op0 = gimple_call_fn (call);
1131 temp.op1 = gimple_call_chain (call);
1132 if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1133 temp.op2 = size_int (lr);
1134 temp.off = -1;
1135 if (gimple_call_with_bounds_p (call))
1136 temp.with_bounds = 1;
1137 result->safe_push (temp);
1139 /* Copy the call arguments. As they can be references as well,
1140 just chain them together. */
1141 for (i = 0; i < gimple_call_num_args (call); ++i)
1143 tree callarg = gimple_call_arg (call, i);
1144 copy_reference_ops_from_ref (callarg, result);
1148 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1149 *I_P to point to the last element of the replacement. */
1150 static bool
1151 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1152 unsigned int *i_p)
1154 unsigned int i = *i_p;
1155 vn_reference_op_t op = &(*ops)[i];
1156 vn_reference_op_t mem_op = &(*ops)[i - 1];
1157 tree addr_base;
1158 HOST_WIDE_INT addr_offset = 0;
1160 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1161 from .foo.bar to the preceding MEM_REF offset and replace the
1162 address with &OBJ. */
1163 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1164 &addr_offset);
1165 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1166 if (addr_base != TREE_OPERAND (op->op0, 0))
1168 offset_int off = offset_int::from (mem_op->op0, SIGNED);
1169 off += addr_offset;
1170 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1171 op->op0 = build_fold_addr_expr (addr_base);
1172 if (tree_fits_shwi_p (mem_op->op0))
1173 mem_op->off = tree_to_shwi (mem_op->op0);
1174 else
1175 mem_op->off = -1;
1176 return true;
1178 return false;
1181 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1182 *I_P to point to the last element of the replacement. */
1183 static bool
1184 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1185 unsigned int *i_p)
1187 unsigned int i = *i_p;
1188 vn_reference_op_t op = &(*ops)[i];
1189 vn_reference_op_t mem_op = &(*ops)[i - 1];
1190 gimple *def_stmt;
1191 enum tree_code code;
1192 offset_int off;
1194 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1195 if (!is_gimple_assign (def_stmt))
1196 return false;
1198 code = gimple_assign_rhs_code (def_stmt);
1199 if (code != ADDR_EXPR
1200 && code != POINTER_PLUS_EXPR)
1201 return false;
1203 off = offset_int::from (mem_op->op0, SIGNED);
1205 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1206 from .foo.bar to the preceding MEM_REF offset and replace the
1207 address with &OBJ. */
1208 if (code == ADDR_EXPR)
1210 tree addr, addr_base;
1211 HOST_WIDE_INT addr_offset;
1213 addr = gimple_assign_rhs1 (def_stmt);
1214 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1215 &addr_offset);
1216 /* If that didn't work because the address isn't invariant propagate
1217 the reference tree from the address operation in case the current
1218 dereference isn't offsetted. */
1219 if (!addr_base
1220 && *i_p == ops->length () - 1
1221 && off == 0
1222 /* This makes us disable this transform for PRE where the
1223 reference ops might be also used for code insertion which
1224 is invalid. */
1225 && default_vn_walk_kind == VN_WALKREWRITE)
1227 auto_vec<vn_reference_op_s, 32> tem;
1228 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1229 /* Make sure to preserve TBAA info. The only objects not
1230 wrapped in MEM_REFs that can have their address taken are
1231 STRING_CSTs. */
1232 if (tem.length () >= 2
1233 && tem[tem.length () - 2].opcode == MEM_REF)
1235 vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1236 new_mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0),
1237 new_mem_op->op0);
1239 else
1240 gcc_assert (tem.last ().opcode == STRING_CST);
1241 ops->pop ();
1242 ops->pop ();
1243 ops->safe_splice (tem);
1244 --*i_p;
1245 return true;
1247 if (!addr_base
1248 || TREE_CODE (addr_base) != MEM_REF)
1249 return false;
1251 off += addr_offset;
1252 off += mem_ref_offset (addr_base);
1253 op->op0 = TREE_OPERAND (addr_base, 0);
1255 else
1257 tree ptr, ptroff;
1258 ptr = gimple_assign_rhs1 (def_stmt);
1259 ptroff = gimple_assign_rhs2 (def_stmt);
1260 if (TREE_CODE (ptr) != SSA_NAME
1261 || TREE_CODE (ptroff) != INTEGER_CST)
1262 return false;
1264 off += wi::to_offset (ptroff);
1265 op->op0 = ptr;
1268 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1269 if (tree_fits_shwi_p (mem_op->op0))
1270 mem_op->off = tree_to_shwi (mem_op->op0);
1271 else
1272 mem_op->off = -1;
1273 if (TREE_CODE (op->op0) == SSA_NAME)
1274 op->op0 = SSA_VAL (op->op0);
1275 if (TREE_CODE (op->op0) != SSA_NAME)
1276 op->opcode = TREE_CODE (op->op0);
1278 /* And recurse. */
1279 if (TREE_CODE (op->op0) == SSA_NAME)
1280 vn_reference_maybe_forwprop_address (ops, i_p);
1281 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1282 vn_reference_fold_indirect (ops, i_p);
1283 return true;
1286 /* Optimize the reference REF to a constant if possible or return
1287 NULL_TREE if not. */
1289 tree
1290 fully_constant_vn_reference_p (vn_reference_t ref)
1292 vec<vn_reference_op_s> operands = ref->operands;
1293 vn_reference_op_t op;
1295 /* Try to simplify the translated expression if it is
1296 a call to a builtin function with at most two arguments. */
1297 op = &operands[0];
1298 if (op->opcode == CALL_EXPR
1299 && TREE_CODE (op->op0) == ADDR_EXPR
1300 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1301 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1302 && operands.length () >= 2
1303 && operands.length () <= 3)
1305 vn_reference_op_t arg0, arg1 = NULL;
1306 bool anyconst = false;
1307 arg0 = &operands[1];
1308 if (operands.length () > 2)
1309 arg1 = &operands[2];
1310 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1311 || (arg0->opcode == ADDR_EXPR
1312 && is_gimple_min_invariant (arg0->op0)))
1313 anyconst = true;
1314 if (arg1
1315 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1316 || (arg1->opcode == ADDR_EXPR
1317 && is_gimple_min_invariant (arg1->op0))))
1318 anyconst = true;
1319 if (anyconst)
1321 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1322 arg1 ? 2 : 1,
1323 arg0->op0,
1324 arg1 ? arg1->op0 : NULL);
1325 if (folded
1326 && TREE_CODE (folded) == NOP_EXPR)
1327 folded = TREE_OPERAND (folded, 0);
1328 if (folded
1329 && is_gimple_min_invariant (folded))
1330 return folded;
1334 /* Simplify reads from constants or constant initializers. */
1335 else if (BITS_PER_UNIT == 8
1336 && is_gimple_reg_type (ref->type)
1337 && (!INTEGRAL_TYPE_P (ref->type)
1338 || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
1340 HOST_WIDE_INT off = 0;
1341 HOST_WIDE_INT size;
1342 if (INTEGRAL_TYPE_P (ref->type))
1343 size = TYPE_PRECISION (ref->type);
1344 else
1345 size = tree_to_shwi (TYPE_SIZE (ref->type));
1346 if (size % BITS_PER_UNIT != 0
1347 || size > MAX_BITSIZE_MODE_ANY_MODE)
1348 return NULL_TREE;
1349 size /= BITS_PER_UNIT;
1350 unsigned i;
1351 for (i = 0; i < operands.length (); ++i)
1353 if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1355 ++i;
1356 break;
1358 if (operands[i].off == -1)
1359 return NULL_TREE;
1360 off += operands[i].off;
1361 if (operands[i].opcode == MEM_REF)
1363 ++i;
1364 break;
1367 vn_reference_op_t base = &operands[--i];
1368 tree ctor = error_mark_node;
1369 tree decl = NULL_TREE;
1370 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1371 ctor = base->op0;
1372 else if (base->opcode == MEM_REF
1373 && base[1].opcode == ADDR_EXPR
1374 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1375 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL))
1377 decl = TREE_OPERAND (base[1].op0, 0);
1378 ctor = ctor_for_folding (decl);
1380 if (ctor == NULL_TREE)
1381 return build_zero_cst (ref->type);
1382 else if (ctor != error_mark_node)
1384 if (decl)
1386 tree res = fold_ctor_reference (ref->type, ctor,
1387 off * BITS_PER_UNIT,
1388 size * BITS_PER_UNIT, decl);
1389 if (res)
1391 STRIP_USELESS_TYPE_CONVERSION (res);
1392 if (is_gimple_min_invariant (res))
1393 return res;
1396 else
1398 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1399 int len = native_encode_expr (ctor, buf, size, off);
1400 if (len > 0)
1401 return native_interpret_expr (ref->type, buf, len);
1406 return NULL_TREE;
1409 /* Return true if OPS contain a storage order barrier. */
1411 static bool
1412 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1414 vn_reference_op_t op;
1415 unsigned i;
1417 FOR_EACH_VEC_ELT (ops, i, op)
1418 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1419 return true;
1421 return false;
1424 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1425 structures into their value numbers. This is done in-place, and
1426 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1427 whether any operands were valueized. */
1429 static vec<vn_reference_op_s>
1430 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1432 vn_reference_op_t vro;
1433 unsigned int i;
1435 *valueized_anything = false;
1437 FOR_EACH_VEC_ELT (orig, i, vro)
1439 if (vro->opcode == SSA_NAME
1440 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1442 tree tem = SSA_VAL (vro->op0);
1443 if (tem != vro->op0)
1445 *valueized_anything = true;
1446 vro->op0 = tem;
1448 /* If it transforms from an SSA_NAME to a constant, update
1449 the opcode. */
1450 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1451 vro->opcode = TREE_CODE (vro->op0);
1453 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1455 tree tem = SSA_VAL (vro->op1);
1456 if (tem != vro->op1)
1458 *valueized_anything = true;
1459 vro->op1 = tem;
1462 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1464 tree tem = SSA_VAL (vro->op2);
1465 if (tem != vro->op2)
1467 *valueized_anything = true;
1468 vro->op2 = tem;
1471 /* If it transforms from an SSA_NAME to an address, fold with
1472 a preceding indirect reference. */
1473 if (i > 0
1474 && vro->op0
1475 && TREE_CODE (vro->op0) == ADDR_EXPR
1476 && orig[i - 1].opcode == MEM_REF)
1478 if (vn_reference_fold_indirect (&orig, &i))
1479 *valueized_anything = true;
1481 else if (i > 0
1482 && vro->opcode == SSA_NAME
1483 && orig[i - 1].opcode == MEM_REF)
1485 if (vn_reference_maybe_forwprop_address (&orig, &i))
1486 *valueized_anything = true;
1488 /* If it transforms a non-constant ARRAY_REF into a constant
1489 one, adjust the constant offset. */
1490 else if (vro->opcode == ARRAY_REF
1491 && vro->off == -1
1492 && TREE_CODE (vro->op0) == INTEGER_CST
1493 && TREE_CODE (vro->op1) == INTEGER_CST
1494 && TREE_CODE (vro->op2) == INTEGER_CST)
1496 offset_int off = ((wi::to_offset (vro->op0)
1497 - wi::to_offset (vro->op1))
1498 * wi::to_offset (vro->op2)
1499 * vn_ref_op_align_unit (vro));
1500 if (wi::fits_shwi_p (off))
1501 vro->off = off.to_shwi ();
1505 return orig;
1508 static vec<vn_reference_op_s>
1509 valueize_refs (vec<vn_reference_op_s> orig)
1511 bool tem;
1512 return valueize_refs_1 (orig, &tem);
1515 static vec<vn_reference_op_s> shared_lookup_references;
1517 /* Create a vector of vn_reference_op_s structures from REF, a
1518 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1519 this function. *VALUEIZED_ANYTHING will specify whether any
1520 operands were valueized. */
1522 static vec<vn_reference_op_s>
1523 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1525 if (!ref)
1526 return vNULL;
1527 shared_lookup_references.truncate (0);
1528 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1529 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1530 valueized_anything);
1531 return shared_lookup_references;
1534 /* Create a vector of vn_reference_op_s structures from CALL, a
1535 call statement. The vector is shared among all callers of
1536 this function. */
1538 static vec<vn_reference_op_s>
1539 valueize_shared_reference_ops_from_call (gcall *call)
1541 if (!call)
1542 return vNULL;
1543 shared_lookup_references.truncate (0);
1544 copy_reference_ops_from_call (call, &shared_lookup_references);
1545 shared_lookup_references = valueize_refs (shared_lookup_references);
1546 return shared_lookup_references;
1549 /* Lookup a SCCVN reference operation VR in the current hash table.
1550 Returns the resulting value number if it exists in the hash table,
1551 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1552 vn_reference_t stored in the hashtable if something is found. */
1554 static tree
1555 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1557 vn_reference_s **slot;
1558 hashval_t hash;
1560 hash = vr->hashcode;
1561 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1562 if (!slot && current_info == optimistic_info)
1563 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1564 if (slot)
1566 if (vnresult)
1567 *vnresult = (vn_reference_t)*slot;
1568 return ((vn_reference_t)*slot)->result;
1571 return NULL_TREE;
1574 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1575 with the current VUSE and performs the expression lookup. */
1577 static void *
1578 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1579 unsigned int cnt, void *vr_)
1581 vn_reference_t vr = (vn_reference_t)vr_;
1582 vn_reference_s **slot;
1583 hashval_t hash;
1585 /* This bounds the stmt walks we perform on reference lookups
1586 to O(1) instead of O(N) where N is the number of dominating
1587 stores. */
1588 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1589 return (void *)-1;
1591 if (last_vuse_ptr)
1592 *last_vuse_ptr = vuse;
1594 /* Fixup vuse and hash. */
1595 if (vr->vuse)
1596 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1597 vr->vuse = vuse_ssa_val (vuse);
1598 if (vr->vuse)
1599 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1601 hash = vr->hashcode;
1602 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1603 if (!slot && current_info == optimistic_info)
1604 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1605 if (slot)
1606 return *slot;
1608 return NULL;
1611 /* Lookup an existing or insert a new vn_reference entry into the
1612 value table for the VUSE, SET, TYPE, OPERANDS reference which
1613 has the value VALUE which is either a constant or an SSA name. */
1615 static vn_reference_t
1616 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1617 alias_set_type set,
1618 tree type,
1619 vec<vn_reference_op_s,
1620 va_heap> operands,
1621 tree value)
1623 vn_reference_s vr1;
1624 vn_reference_t result;
1625 unsigned value_id;
1626 vr1.vuse = vuse;
1627 vr1.operands = operands;
1628 vr1.type = type;
1629 vr1.set = set;
1630 vr1.hashcode = vn_reference_compute_hash (&vr1);
1631 if (vn_reference_lookup_1 (&vr1, &result))
1632 return result;
1633 if (TREE_CODE (value) == SSA_NAME)
1634 value_id = VN_INFO (value)->value_id;
1635 else
1636 value_id = get_or_alloc_constant_value_id (value);
1637 return vn_reference_insert_pieces (vuse, set, type,
1638 operands.copy (), value, value_id);
1641 static vn_nary_op_t vn_nary_op_insert_stmt (gimple *stmt, tree result);
1643 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
1645 static tree
1646 vn_lookup_simplify_result (code_helper rcode, tree type, tree *ops)
1648 if (!rcode.is_tree_code ())
1649 return NULL_TREE;
1650 vn_nary_op_t vnresult = NULL;
1651 return vn_nary_op_lookup_pieces (TREE_CODE_LENGTH ((tree_code) rcode),
1652 (tree_code) rcode, type, ops, &vnresult);
1655 /* Return a value-number for RCODE OPS... either by looking up an existing
1656 value-number for the simplified result or by inserting the operation if
1657 INSERT is true. */
1659 static tree
1660 vn_nary_build_or_lookup_1 (code_helper rcode, tree type, tree *ops,
1661 bool insert)
1663 tree result = NULL_TREE;
1664 /* We will be creating a value number for
1665 RCODE (OPS...).
1666 So first simplify and lookup this expression to see if it
1667 is already available. */
1668 mprts_hook = vn_lookup_simplify_result;
1669 bool res = false;
1670 switch (TREE_CODE_LENGTH ((tree_code) rcode))
1672 case 1:
1673 res = gimple_resimplify1 (NULL, &rcode, type, ops, vn_valueize);
1674 break;
1675 case 2:
1676 res = gimple_resimplify2 (NULL, &rcode, type, ops, vn_valueize);
1677 break;
1678 case 3:
1679 res = gimple_resimplify3 (NULL, &rcode, type, ops, vn_valueize);
1680 break;
1682 mprts_hook = NULL;
1683 gimple *new_stmt = NULL;
1684 if (res
1685 && gimple_simplified_result_is_gimple_val (rcode, ops))
1686 /* The expression is already available. */
1687 result = ops[0];
1688 else
1690 tree val = vn_lookup_simplify_result (rcode, type, ops);
1691 if (!val && insert)
1693 gimple_seq stmts = NULL;
1694 result = maybe_push_res_to_seq (rcode, type, ops, &stmts);
1695 if (result)
1697 gcc_assert (gimple_seq_singleton_p (stmts));
1698 new_stmt = gimple_seq_first_stmt (stmts);
1701 else
1702 /* The expression is already available. */
1703 result = val;
1705 if (new_stmt)
1707 /* The expression is not yet available, value-number lhs to
1708 the new SSA_NAME we created. */
1709 /* Initialize value-number information properly. */
1710 VN_INFO_GET (result)->valnum = result;
1711 VN_INFO (result)->value_id = get_next_value_id ();
1712 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
1713 new_stmt);
1714 VN_INFO (result)->needs_insertion = true;
1715 /* ??? PRE phi-translation inserts NARYs without corresponding
1716 SSA name result. Re-use those but set their result according
1717 to the stmt we just built. */
1718 vn_nary_op_t nary = NULL;
1719 vn_nary_op_lookup_stmt (new_stmt, &nary);
1720 if (nary)
1722 gcc_assert (nary->result == NULL_TREE);
1723 nary->result = gimple_assign_lhs (new_stmt);
1725 /* As all "inserted" statements are singleton SCCs, insert
1726 to the valid table. This is strictly needed to
1727 avoid re-generating new value SSA_NAMEs for the same
1728 expression during SCC iteration over and over (the
1729 optimistic table gets cleared after each iteration).
1730 We do not need to insert into the optimistic table, as
1731 lookups there will fall back to the valid table. */
1732 else if (current_info == optimistic_info)
1734 current_info = valid_info;
1735 vn_nary_op_insert_stmt (new_stmt, result);
1736 current_info = optimistic_info;
1738 else
1739 vn_nary_op_insert_stmt (new_stmt, result);
1740 if (dump_file && (dump_flags & TDF_DETAILS))
1742 fprintf (dump_file, "Inserting name ");
1743 print_generic_expr (dump_file, result, 0);
1744 fprintf (dump_file, " for expression ");
1745 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
1746 fprintf (dump_file, "\n");
1749 return result;
1752 /* Return a value-number for RCODE OPS... either by looking up an existing
1753 value-number for the simplified result or by inserting the operation. */
1755 static tree
1756 vn_nary_build_or_lookup (code_helper rcode, tree type, tree *ops)
1758 return vn_nary_build_or_lookup_1 (rcode, type, ops, true);
1761 /* Try to simplify the expression RCODE OPS... of type TYPE and return
1762 its value if present. */
1764 tree
1765 vn_nary_simplify (vn_nary_op_t nary)
1767 if (nary->length > 3)
1768 return NULL_TREE;
1769 tree ops[3];
1770 memcpy (ops, nary->op, sizeof (tree) * nary->length);
1771 return vn_nary_build_or_lookup_1 (nary->opcode, nary->type, ops, false);
1775 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1776 from the statement defining VUSE and if not successful tries to
1777 translate *REFP and VR_ through an aggregate copy at the definition
1778 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
1779 of *REF and *VR. If only disambiguation was performed then
1780 *DISAMBIGUATE_ONLY is set to true. */
1782 static void *
1783 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1784 bool *disambiguate_only)
1786 vn_reference_t vr = (vn_reference_t)vr_;
1787 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
1788 tree base = ao_ref_base (ref);
1789 HOST_WIDE_INT offset, maxsize;
1790 static vec<vn_reference_op_s> lhs_ops;
1791 ao_ref lhs_ref;
1792 bool lhs_ref_ok = false;
1794 /* If the reference is based on a parameter that was determined as
1795 pointing to readonly memory it doesn't change. */
1796 if (TREE_CODE (base) == MEM_REF
1797 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
1798 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
1799 && bitmap_bit_p (const_parms,
1800 SSA_NAME_VERSION (TREE_OPERAND (base, 0))))
1802 *disambiguate_only = true;
1803 return NULL;
1806 /* First try to disambiguate after value-replacing in the definitions LHS. */
1807 if (is_gimple_assign (def_stmt))
1809 tree lhs = gimple_assign_lhs (def_stmt);
1810 bool valueized_anything = false;
1811 /* Avoid re-allocation overhead. */
1812 lhs_ops.truncate (0);
1813 copy_reference_ops_from_ref (lhs, &lhs_ops);
1814 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1815 if (valueized_anything)
1817 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1818 get_alias_set (lhs),
1819 TREE_TYPE (lhs), lhs_ops);
1820 if (lhs_ref_ok
1821 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1823 *disambiguate_only = true;
1824 return NULL;
1827 else
1829 ao_ref_init (&lhs_ref, lhs);
1830 lhs_ref_ok = true;
1833 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1834 && gimple_call_num_args (def_stmt) <= 4)
1836 /* For builtin calls valueize its arguments and call the
1837 alias oracle again. Valueization may improve points-to
1838 info of pointers and constify size and position arguments.
1839 Originally this was motivated by PR61034 which has
1840 conditional calls to free falsely clobbering ref because
1841 of imprecise points-to info of the argument. */
1842 tree oldargs[4];
1843 bool valueized_anything = false;
1844 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1846 oldargs[i] = gimple_call_arg (def_stmt, i);
1847 if (TREE_CODE (oldargs[i]) == SSA_NAME
1848 && VN_INFO (oldargs[i])->valnum != oldargs[i])
1850 gimple_call_set_arg (def_stmt, i, VN_INFO (oldargs[i])->valnum);
1851 valueized_anything = true;
1854 if (valueized_anything)
1856 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1857 ref);
1858 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1859 gimple_call_set_arg (def_stmt, i, oldargs[i]);
1860 if (!res)
1862 *disambiguate_only = true;
1863 return NULL;
1868 if (*disambiguate_only)
1869 return (void *)-1;
1871 offset = ref->offset;
1872 maxsize = ref->max_size;
1874 /* If we cannot constrain the size of the reference we cannot
1875 test if anything kills it. */
1876 if (maxsize == -1)
1877 return (void *)-1;
1879 /* We can't deduce anything useful from clobbers. */
1880 if (gimple_clobber_p (def_stmt))
1881 return (void *)-1;
1883 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1884 from that definition.
1885 1) Memset. */
1886 if (is_gimple_reg_type (vr->type)
1887 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1888 && integer_zerop (gimple_call_arg (def_stmt, 1))
1889 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2))
1890 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1892 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1893 tree base2;
1894 HOST_WIDE_INT offset2, size2, maxsize2;
1895 bool reverse;
1896 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
1897 &reverse);
1898 size2 = tree_to_uhwi (gimple_call_arg (def_stmt, 2)) * 8;
1899 if ((unsigned HOST_WIDE_INT)size2 / 8
1900 == tree_to_uhwi (gimple_call_arg (def_stmt, 2))
1901 && maxsize2 != -1
1902 && operand_equal_p (base, base2, 0)
1903 && offset2 <= offset
1904 && offset2 + size2 >= offset + maxsize)
1906 tree val = build_zero_cst (vr->type);
1907 return vn_reference_lookup_or_insert_for_pieces
1908 (vuse, vr->set, vr->type, vr->operands, val);
1912 /* 2) Assignment from an empty CONSTRUCTOR. */
1913 else if (is_gimple_reg_type (vr->type)
1914 && gimple_assign_single_p (def_stmt)
1915 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1916 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1918 tree base2;
1919 HOST_WIDE_INT offset2, size2, maxsize2;
1920 bool reverse;
1921 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1922 &offset2, &size2, &maxsize2, &reverse);
1923 if (maxsize2 != -1
1924 && operand_equal_p (base, base2, 0)
1925 && offset2 <= offset
1926 && offset2 + size2 >= offset + maxsize)
1928 tree val = build_zero_cst (vr->type);
1929 return vn_reference_lookup_or_insert_for_pieces
1930 (vuse, vr->set, vr->type, vr->operands, val);
1934 /* 3) Assignment from a constant. We can use folds native encode/interpret
1935 routines to extract the assigned bits. */
1936 else if (ref->size == maxsize
1937 && is_gimple_reg_type (vr->type)
1938 && !contains_storage_order_barrier_p (vr->operands)
1939 && gimple_assign_single_p (def_stmt)
1940 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1941 && maxsize % BITS_PER_UNIT == 0
1942 && offset % BITS_PER_UNIT == 0
1943 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
1944 || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
1945 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
1947 tree base2;
1948 HOST_WIDE_INT offset2, size2, maxsize2;
1949 bool reverse;
1950 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1951 &offset2, &size2, &maxsize2, &reverse);
1952 if (!reverse
1953 && maxsize2 != -1
1954 && maxsize2 == size2
1955 && size2 % BITS_PER_UNIT == 0
1956 && offset2 % BITS_PER_UNIT == 0
1957 && operand_equal_p (base, base2, 0)
1958 && offset2 <= offset
1959 && offset2 + size2 >= offset + maxsize)
1961 /* We support up to 512-bit values (for V8DFmode). */
1962 unsigned char buffer[64];
1963 int len;
1965 tree rhs = gimple_assign_rhs1 (def_stmt);
1966 if (TREE_CODE (rhs) == SSA_NAME)
1967 rhs = SSA_VAL (rhs);
1968 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1969 buffer, sizeof (buffer));
1970 if (len > 0)
1972 tree type = vr->type;
1973 /* Make sure to interpret in a type that has a range
1974 covering the whole access size. */
1975 if (INTEGRAL_TYPE_P (vr->type)
1976 && ref->size != TYPE_PRECISION (vr->type))
1977 type = build_nonstandard_integer_type (ref->size,
1978 TYPE_UNSIGNED (type));
1979 tree val = native_interpret_expr (type,
1980 buffer
1981 + ((offset - offset2)
1982 / BITS_PER_UNIT),
1983 ref->size / BITS_PER_UNIT);
1984 /* If we chop off bits because the types precision doesn't
1985 match the memory access size this is ok when optimizing
1986 reads but not when called from the DSE code during
1987 elimination. */
1988 if (val
1989 && type != vr->type)
1991 if (! int_fits_type_p (val, vr->type))
1992 val = NULL_TREE;
1993 else
1994 val = fold_convert (vr->type, val);
1997 if (val)
1998 return vn_reference_lookup_or_insert_for_pieces
1999 (vuse, vr->set, vr->type, vr->operands, val);
2004 /* 4) Assignment from an SSA name which definition we may be able
2005 to access pieces from. */
2006 else if (ref->size == maxsize
2007 && is_gimple_reg_type (vr->type)
2008 && !contains_storage_order_barrier_p (vr->operands)
2009 && gimple_assign_single_p (def_stmt)
2010 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
2012 tree base2;
2013 HOST_WIDE_INT offset2, size2, maxsize2;
2014 bool reverse;
2015 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2016 &offset2, &size2, &maxsize2,
2017 &reverse);
2018 if (!reverse
2019 && maxsize2 != -1
2020 && maxsize2 == size2
2021 && operand_equal_p (base, base2, 0)
2022 && offset2 <= offset
2023 && offset2 + size2 >= offset + maxsize
2024 /* ??? We can't handle bitfield precision extracts without
2025 either using an alternate type for the BIT_FIELD_REF and
2026 then doing a conversion or possibly adjusting the offset
2027 according to endianness. */
2028 && (! INTEGRAL_TYPE_P (vr->type)
2029 || ref->size == TYPE_PRECISION (vr->type))
2030 && ref->size % BITS_PER_UNIT == 0)
2032 code_helper rcode = BIT_FIELD_REF;
2033 tree ops[3];
2034 ops[0] = SSA_VAL (gimple_assign_rhs1 (def_stmt));
2035 ops[1] = bitsize_int (ref->size);
2036 ops[2] = bitsize_int (offset - offset2);
2037 tree val = vn_nary_build_or_lookup (rcode, vr->type, ops);
2038 if (val)
2040 vn_reference_t res = vn_reference_lookup_or_insert_for_pieces
2041 (vuse, vr->set, vr->type, vr->operands, val);
2042 return res;
2047 /* 5) For aggregate copies translate the reference through them if
2048 the copy kills ref. */
2049 else if (vn_walk_kind == VN_WALKREWRITE
2050 && gimple_assign_single_p (def_stmt)
2051 && (DECL_P (gimple_assign_rhs1 (def_stmt))
2052 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
2053 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
2055 tree base2;
2056 HOST_WIDE_INT maxsize2;
2057 int i, j, k;
2058 auto_vec<vn_reference_op_s> rhs;
2059 vn_reference_op_t vro;
2060 ao_ref r;
2062 if (!lhs_ref_ok)
2063 return (void *)-1;
2065 /* See if the assignment kills REF. */
2066 base2 = ao_ref_base (&lhs_ref);
2067 maxsize2 = lhs_ref.max_size;
2068 if (maxsize2 == -1
2069 || (base != base2
2070 && (TREE_CODE (base) != MEM_REF
2071 || TREE_CODE (base2) != MEM_REF
2072 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
2073 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
2074 TREE_OPERAND (base2, 1))))
2075 || !stmt_kills_ref_p (def_stmt, ref))
2076 return (void *)-1;
2078 /* Find the common base of ref and the lhs. lhs_ops already
2079 contains valueized operands for the lhs. */
2080 i = vr->operands.length () - 1;
2081 j = lhs_ops.length () - 1;
2082 while (j >= 0 && i >= 0
2083 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
2085 i--;
2086 j--;
2089 /* ??? The innermost op should always be a MEM_REF and we already
2090 checked that the assignment to the lhs kills vr. Thus for
2091 aggregate copies using char[] types the vn_reference_op_eq
2092 may fail when comparing types for compatibility. But we really
2093 don't care here - further lookups with the rewritten operands
2094 will simply fail if we messed up types too badly. */
2095 HOST_WIDE_INT extra_off = 0;
2096 if (j == 0 && i >= 0
2097 && lhs_ops[0].opcode == MEM_REF
2098 && lhs_ops[0].off != -1)
2100 if (lhs_ops[0].off == vr->operands[i].off)
2101 i--, j--;
2102 else if (vr->operands[i].opcode == MEM_REF
2103 && vr->operands[i].off != -1)
2105 extra_off = vr->operands[i].off - lhs_ops[0].off;
2106 i--, j--;
2110 /* i now points to the first additional op.
2111 ??? LHS may not be completely contained in VR, one or more
2112 VIEW_CONVERT_EXPRs could be in its way. We could at least
2113 try handling outermost VIEW_CONVERT_EXPRs. */
2114 if (j != -1)
2115 return (void *)-1;
2117 /* Punt if the additional ops contain a storage order barrier. */
2118 for (k = i; k >= 0; k--)
2120 vro = &vr->operands[k];
2121 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
2122 return (void *)-1;
2125 /* Now re-write REF to be based on the rhs of the assignment. */
2126 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
2128 /* Apply an extra offset to the inner MEM_REF of the RHS. */
2129 if (extra_off != 0)
2131 if (rhs.length () < 2
2132 || rhs[0].opcode != MEM_REF
2133 || rhs[0].off == -1)
2134 return (void *)-1;
2135 rhs[0].off += extra_off;
2136 rhs[0].op0 = int_const_binop (PLUS_EXPR, rhs[0].op0,
2137 build_int_cst (TREE_TYPE (rhs[0].op0),
2138 extra_off));
2141 /* We need to pre-pend vr->operands[0..i] to rhs. */
2142 vec<vn_reference_op_s> old = vr->operands;
2143 if (i + 1 + rhs.length () > vr->operands.length ())
2144 vr->operands.safe_grow (i + 1 + rhs.length ());
2145 else
2146 vr->operands.truncate (i + 1 + rhs.length ());
2147 FOR_EACH_VEC_ELT (rhs, j, vro)
2148 vr->operands[i + 1 + j] = *vro;
2149 vr->operands = valueize_refs (vr->operands);
2150 if (old == shared_lookup_references)
2151 shared_lookup_references = vr->operands;
2152 vr->hashcode = vn_reference_compute_hash (vr);
2154 /* Try folding the new reference to a constant. */
2155 tree val = fully_constant_vn_reference_p (vr);
2156 if (val)
2157 return vn_reference_lookup_or_insert_for_pieces
2158 (vuse, vr->set, vr->type, vr->operands, val);
2160 /* Adjust *ref from the new operands. */
2161 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2162 return (void *)-1;
2163 /* This can happen with bitfields. */
2164 if (ref->size != r.size)
2165 return (void *)-1;
2166 *ref = r;
2168 /* Do not update last seen VUSE after translating. */
2169 last_vuse_ptr = NULL;
2171 /* Keep looking for the adjusted *REF / VR pair. */
2172 return NULL;
2175 /* 6) For memcpy copies translate the reference through them if
2176 the copy kills ref. */
2177 else if (vn_walk_kind == VN_WALKREWRITE
2178 && is_gimple_reg_type (vr->type)
2179 /* ??? Handle BCOPY as well. */
2180 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2181 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2182 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2183 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2184 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2185 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2186 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2187 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2)))
2189 tree lhs, rhs;
2190 ao_ref r;
2191 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
2192 vn_reference_op_s op;
2193 HOST_WIDE_INT at;
2195 /* Only handle non-variable, addressable refs. */
2196 if (ref->size != maxsize
2197 || offset % BITS_PER_UNIT != 0
2198 || ref->size % BITS_PER_UNIT != 0)
2199 return (void *)-1;
2201 /* Extract a pointer base and an offset for the destination. */
2202 lhs = gimple_call_arg (def_stmt, 0);
2203 lhs_offset = 0;
2204 if (TREE_CODE (lhs) == SSA_NAME)
2206 lhs = SSA_VAL (lhs);
2207 if (TREE_CODE (lhs) == SSA_NAME)
2209 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
2210 if (gimple_assign_single_p (def_stmt)
2211 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2212 lhs = gimple_assign_rhs1 (def_stmt);
2215 if (TREE_CODE (lhs) == ADDR_EXPR)
2217 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2218 &lhs_offset);
2219 if (!tem)
2220 return (void *)-1;
2221 if (TREE_CODE (tem) == MEM_REF
2222 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2224 lhs = TREE_OPERAND (tem, 0);
2225 if (TREE_CODE (lhs) == SSA_NAME)
2226 lhs = SSA_VAL (lhs);
2227 lhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2229 else if (DECL_P (tem))
2230 lhs = build_fold_addr_expr (tem);
2231 else
2232 return (void *)-1;
2234 if (TREE_CODE (lhs) != SSA_NAME
2235 && TREE_CODE (lhs) != ADDR_EXPR)
2236 return (void *)-1;
2238 /* Extract a pointer base and an offset for the source. */
2239 rhs = gimple_call_arg (def_stmt, 1);
2240 rhs_offset = 0;
2241 if (TREE_CODE (rhs) == SSA_NAME)
2242 rhs = SSA_VAL (rhs);
2243 if (TREE_CODE (rhs) == ADDR_EXPR)
2245 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2246 &rhs_offset);
2247 if (!tem)
2248 return (void *)-1;
2249 if (TREE_CODE (tem) == MEM_REF
2250 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2252 rhs = TREE_OPERAND (tem, 0);
2253 rhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2255 else if (DECL_P (tem))
2256 rhs = build_fold_addr_expr (tem);
2257 else
2258 return (void *)-1;
2260 if (TREE_CODE (rhs) != SSA_NAME
2261 && TREE_CODE (rhs) != ADDR_EXPR)
2262 return (void *)-1;
2264 copy_size = tree_to_uhwi (gimple_call_arg (def_stmt, 2));
2266 /* The bases of the destination and the references have to agree. */
2267 if ((TREE_CODE (base) != MEM_REF
2268 && !DECL_P (base))
2269 || (TREE_CODE (base) == MEM_REF
2270 && (TREE_OPERAND (base, 0) != lhs
2271 || !tree_fits_uhwi_p (TREE_OPERAND (base, 1))))
2272 || (DECL_P (base)
2273 && (TREE_CODE (lhs) != ADDR_EXPR
2274 || TREE_OPERAND (lhs, 0) != base)))
2275 return (void *)-1;
2277 at = offset / BITS_PER_UNIT;
2278 if (TREE_CODE (base) == MEM_REF)
2279 at += tree_to_uhwi (TREE_OPERAND (base, 1));
2280 /* If the access is completely outside of the memcpy destination
2281 area there is no aliasing. */
2282 if (lhs_offset >= at + maxsize / BITS_PER_UNIT
2283 || lhs_offset + copy_size <= at)
2284 return NULL;
2285 /* And the access has to be contained within the memcpy destination. */
2286 if (lhs_offset > at
2287 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
2288 return (void *)-1;
2290 /* Make room for 2 operands in the new reference. */
2291 if (vr->operands.length () < 2)
2293 vec<vn_reference_op_s> old = vr->operands;
2294 vr->operands.safe_grow_cleared (2);
2295 if (old == shared_lookup_references)
2296 shared_lookup_references = vr->operands;
2298 else
2299 vr->operands.truncate (2);
2301 /* The looked-through reference is a simple MEM_REF. */
2302 memset (&op, 0, sizeof (op));
2303 op.type = vr->type;
2304 op.opcode = MEM_REF;
2305 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
2306 op.off = at - lhs_offset + rhs_offset;
2307 vr->operands[0] = op;
2308 op.type = TREE_TYPE (rhs);
2309 op.opcode = TREE_CODE (rhs);
2310 op.op0 = rhs;
2311 op.off = -1;
2312 vr->operands[1] = op;
2313 vr->hashcode = vn_reference_compute_hash (vr);
2315 /* Try folding the new reference to a constant. */
2316 tree val = fully_constant_vn_reference_p (vr);
2317 if (val)
2318 return vn_reference_lookup_or_insert_for_pieces
2319 (vuse, vr->set, vr->type, vr->operands, val);
2321 /* Adjust *ref from the new operands. */
2322 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2323 return (void *)-1;
2324 /* This can happen with bitfields. */
2325 if (ref->size != r.size)
2326 return (void *)-1;
2327 *ref = r;
2329 /* Do not update last seen VUSE after translating. */
2330 last_vuse_ptr = NULL;
2332 /* Keep looking for the adjusted *REF / VR pair. */
2333 return NULL;
2336 /* Bail out and stop walking. */
2337 return (void *)-1;
2340 /* Return a reference op vector from OP that can be used for
2341 vn_reference_lookup_pieces. The caller is responsible for releasing
2342 the vector. */
2344 vec<vn_reference_op_s>
2345 vn_reference_operands_for_lookup (tree op)
2347 bool valueized;
2348 return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
2351 /* Lookup a reference operation by it's parts, in the current hash table.
2352 Returns the resulting value number if it exists in the hash table,
2353 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2354 vn_reference_t stored in the hashtable if something is found. */
2356 tree
2357 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2358 vec<vn_reference_op_s> operands,
2359 vn_reference_t *vnresult, vn_lookup_kind kind)
2361 struct vn_reference_s vr1;
2362 vn_reference_t tmp;
2363 tree cst;
2365 if (!vnresult)
2366 vnresult = &tmp;
2367 *vnresult = NULL;
2369 vr1.vuse = vuse_ssa_val (vuse);
2370 shared_lookup_references.truncate (0);
2371 shared_lookup_references.safe_grow (operands.length ());
2372 memcpy (shared_lookup_references.address (),
2373 operands.address (),
2374 sizeof (vn_reference_op_s)
2375 * operands.length ());
2376 vr1.operands = operands = shared_lookup_references
2377 = valueize_refs (shared_lookup_references);
2378 vr1.type = type;
2379 vr1.set = set;
2380 vr1.hashcode = vn_reference_compute_hash (&vr1);
2381 if ((cst = fully_constant_vn_reference_p (&vr1)))
2382 return cst;
2384 vn_reference_lookup_1 (&vr1, vnresult);
2385 if (!*vnresult
2386 && kind != VN_NOWALK
2387 && vr1.vuse)
2389 ao_ref r;
2390 vn_walk_kind = kind;
2391 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2392 *vnresult =
2393 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2394 vn_reference_lookup_2,
2395 vn_reference_lookup_3,
2396 vuse_ssa_val, &vr1);
2397 gcc_checking_assert (vr1.operands == shared_lookup_references);
2400 if (*vnresult)
2401 return (*vnresult)->result;
2403 return NULL_TREE;
2406 /* Lookup OP in the current hash table, and return the resulting value
2407 number if it exists in the hash table. Return NULL_TREE if it does
2408 not exist in the hash table or if the result field of the structure
2409 was NULL.. VNRESULT will be filled in with the vn_reference_t
2410 stored in the hashtable if one exists. When TBAA_P is false assume
2411 we are looking up a store and treat it as having alias-set zero. */
2413 tree
2414 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2415 vn_reference_t *vnresult, bool tbaa_p)
2417 vec<vn_reference_op_s> operands;
2418 struct vn_reference_s vr1;
2419 tree cst;
2420 bool valuezied_anything;
2422 if (vnresult)
2423 *vnresult = NULL;
2425 vr1.vuse = vuse_ssa_val (vuse);
2426 vr1.operands = operands
2427 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2428 vr1.type = TREE_TYPE (op);
2429 vr1.set = tbaa_p ? get_alias_set (op) : 0;
2430 vr1.hashcode = vn_reference_compute_hash (&vr1);
2431 if ((cst = fully_constant_vn_reference_p (&vr1)))
2432 return cst;
2434 if (kind != VN_NOWALK
2435 && vr1.vuse)
2437 vn_reference_t wvnresult;
2438 ao_ref r;
2439 /* Make sure to use a valueized reference if we valueized anything.
2440 Otherwise preserve the full reference for advanced TBAA. */
2441 if (!valuezied_anything
2442 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2443 vr1.operands))
2444 ao_ref_init (&r, op);
2445 if (! tbaa_p)
2446 r.ref_alias_set = r.base_alias_set = 0;
2447 vn_walk_kind = kind;
2448 wvnresult =
2449 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2450 vn_reference_lookup_2,
2451 vn_reference_lookup_3,
2452 vuse_ssa_val, &vr1);
2453 gcc_checking_assert (vr1.operands == shared_lookup_references);
2454 if (wvnresult)
2456 if (vnresult)
2457 *vnresult = wvnresult;
2458 return wvnresult->result;
2461 return NULL_TREE;
2464 return vn_reference_lookup_1 (&vr1, vnresult);
2467 /* Lookup CALL in the current hash table and return the entry in
2468 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2470 void
2471 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2472 vn_reference_t vr)
2474 if (vnresult)
2475 *vnresult = NULL;
2477 tree vuse = gimple_vuse (call);
2479 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2480 vr->operands = valueize_shared_reference_ops_from_call (call);
2481 vr->type = gimple_expr_type (call);
2482 vr->set = 0;
2483 vr->hashcode = vn_reference_compute_hash (vr);
2484 vn_reference_lookup_1 (vr, vnresult);
2487 /* Insert OP into the current hash table with a value number of
2488 RESULT, and return the resulting reference structure we created. */
2490 static vn_reference_t
2491 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2493 vn_reference_s **slot;
2494 vn_reference_t vr1;
2495 bool tem;
2497 vr1 = current_info->references_pool->allocate ();
2498 if (TREE_CODE (result) == SSA_NAME)
2499 vr1->value_id = VN_INFO (result)->value_id;
2500 else
2501 vr1->value_id = get_or_alloc_constant_value_id (result);
2502 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2503 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2504 vr1->type = TREE_TYPE (op);
2505 vr1->set = get_alias_set (op);
2506 vr1->hashcode = vn_reference_compute_hash (vr1);
2507 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2508 vr1->result_vdef = vdef;
2510 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2511 INSERT);
2513 /* Because we lookup stores using vuses, and value number failures
2514 using the vdefs (see visit_reference_op_store for how and why),
2515 it's possible that on failure we may try to insert an already
2516 inserted store. This is not wrong, there is no ssa name for a
2517 store that we could use as a differentiator anyway. Thus, unlike
2518 the other lookup functions, you cannot gcc_assert (!*slot)
2519 here. */
2521 /* But free the old slot in case of a collision. */
2522 if (*slot)
2523 free_reference (*slot);
2525 *slot = vr1;
2526 return vr1;
2529 /* Insert a reference by it's pieces into the current hash table with
2530 a value number of RESULT. Return the resulting reference
2531 structure we created. */
2533 vn_reference_t
2534 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2535 vec<vn_reference_op_s> operands,
2536 tree result, unsigned int value_id)
2539 vn_reference_s **slot;
2540 vn_reference_t vr1;
2542 vr1 = current_info->references_pool->allocate ();
2543 vr1->value_id = value_id;
2544 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2545 vr1->operands = valueize_refs (operands);
2546 vr1->type = type;
2547 vr1->set = set;
2548 vr1->hashcode = vn_reference_compute_hash (vr1);
2549 if (result && TREE_CODE (result) == SSA_NAME)
2550 result = SSA_VAL (result);
2551 vr1->result = result;
2553 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2554 INSERT);
2556 /* At this point we should have all the things inserted that we have
2557 seen before, and we should never try inserting something that
2558 already exists. */
2559 gcc_assert (!*slot);
2560 if (*slot)
2561 free_reference (*slot);
2563 *slot = vr1;
2564 return vr1;
2567 /* Compute and return the hash value for nary operation VBO1. */
2569 static hashval_t
2570 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2572 inchash::hash hstate;
2573 unsigned i;
2575 for (i = 0; i < vno1->length; ++i)
2576 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2577 vno1->op[i] = SSA_VAL (vno1->op[i]);
2579 if (((vno1->length == 2
2580 && commutative_tree_code (vno1->opcode))
2581 || (vno1->length == 3
2582 && commutative_ternary_tree_code (vno1->opcode)))
2583 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2584 std::swap (vno1->op[0], vno1->op[1]);
2585 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
2586 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2588 std::swap (vno1->op[0], vno1->op[1]);
2589 vno1->opcode = swap_tree_comparison (vno1->opcode);
2592 hstate.add_int (vno1->opcode);
2593 for (i = 0; i < vno1->length; ++i)
2594 inchash::add_expr (vno1->op[i], hstate);
2596 return hstate.end ();
2599 /* Compare nary operations VNO1 and VNO2 and return true if they are
2600 equivalent. */
2602 bool
2603 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2605 unsigned i;
2607 if (vno1->hashcode != vno2->hashcode)
2608 return false;
2610 if (vno1->length != vno2->length)
2611 return false;
2613 if (vno1->opcode != vno2->opcode
2614 || !types_compatible_p (vno1->type, vno2->type))
2615 return false;
2617 for (i = 0; i < vno1->length; ++i)
2618 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2619 return false;
2621 return true;
2624 /* Initialize VNO from the pieces provided. */
2626 static void
2627 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2628 enum tree_code code, tree type, tree *ops)
2630 vno->opcode = code;
2631 vno->length = length;
2632 vno->type = type;
2633 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2636 /* Initialize VNO from OP. */
2638 static void
2639 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2641 unsigned i;
2643 vno->opcode = TREE_CODE (op);
2644 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2645 vno->type = TREE_TYPE (op);
2646 for (i = 0; i < vno->length; ++i)
2647 vno->op[i] = TREE_OPERAND (op, i);
2650 /* Return the number of operands for a vn_nary ops structure from STMT. */
2652 static unsigned int
2653 vn_nary_length_from_stmt (gimple *stmt)
2655 switch (gimple_assign_rhs_code (stmt))
2657 case REALPART_EXPR:
2658 case IMAGPART_EXPR:
2659 case VIEW_CONVERT_EXPR:
2660 return 1;
2662 case BIT_FIELD_REF:
2663 return 3;
2665 case CONSTRUCTOR:
2666 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2668 default:
2669 return gimple_num_ops (stmt) - 1;
2673 /* Initialize VNO from STMT. */
2675 static void
2676 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
2678 unsigned i;
2680 vno->opcode = gimple_assign_rhs_code (stmt);
2681 vno->type = gimple_expr_type (stmt);
2682 switch (vno->opcode)
2684 case REALPART_EXPR:
2685 case IMAGPART_EXPR:
2686 case VIEW_CONVERT_EXPR:
2687 vno->length = 1;
2688 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2689 break;
2691 case BIT_FIELD_REF:
2692 vno->length = 3;
2693 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2694 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2695 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2696 break;
2698 case CONSTRUCTOR:
2699 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2700 for (i = 0; i < vno->length; ++i)
2701 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2702 break;
2704 default:
2705 gcc_checking_assert (!gimple_assign_single_p (stmt));
2706 vno->length = gimple_num_ops (stmt) - 1;
2707 for (i = 0; i < vno->length; ++i)
2708 vno->op[i] = gimple_op (stmt, i + 1);
2712 /* Compute the hashcode for VNO and look for it in the hash table;
2713 return the resulting value number if it exists in the hash table.
2714 Return NULL_TREE if it does not exist in the hash table or if the
2715 result field of the operation is NULL. VNRESULT will contain the
2716 vn_nary_op_t from the hashtable if it exists. */
2718 static tree
2719 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2721 vn_nary_op_s **slot;
2723 if (vnresult)
2724 *vnresult = NULL;
2726 vno->hashcode = vn_nary_op_compute_hash (vno);
2727 slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2728 NO_INSERT);
2729 if (!slot && current_info == optimistic_info)
2730 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2731 NO_INSERT);
2732 if (!slot)
2733 return NULL_TREE;
2734 if (vnresult)
2735 *vnresult = *slot;
2736 return (*slot)->result;
2739 /* Lookup a n-ary operation by its pieces and return the resulting value
2740 number if it exists in the hash table. Return NULL_TREE if it does
2741 not exist in the hash table or if the result field of the operation
2742 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2743 if it exists. */
2745 tree
2746 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2747 tree type, tree *ops, vn_nary_op_t *vnresult)
2749 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2750 sizeof_vn_nary_op (length));
2751 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2752 return vn_nary_op_lookup_1 (vno1, vnresult);
2755 /* Lookup OP in the current hash table, and return the resulting value
2756 number if it exists in the hash table. Return NULL_TREE if it does
2757 not exist in the hash table or if the result field of the operation
2758 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2759 if it exists. */
2761 tree
2762 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2764 vn_nary_op_t vno1
2765 = XALLOCAVAR (struct vn_nary_op_s,
2766 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2767 init_vn_nary_op_from_op (vno1, op);
2768 return vn_nary_op_lookup_1 (vno1, vnresult);
2771 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2772 value number if it exists in the hash table. Return NULL_TREE if
2773 it does not exist in the hash table. VNRESULT will contain the
2774 vn_nary_op_t from the hashtable if it exists. */
2776 tree
2777 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
2779 vn_nary_op_t vno1
2780 = XALLOCAVAR (struct vn_nary_op_s,
2781 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2782 init_vn_nary_op_from_stmt (vno1, stmt);
2783 return vn_nary_op_lookup_1 (vno1, vnresult);
2786 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2788 static vn_nary_op_t
2789 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2791 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2794 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2795 obstack. */
2797 static vn_nary_op_t
2798 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2800 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2801 &current_info->nary_obstack);
2803 vno1->value_id = value_id;
2804 vno1->length = length;
2805 vno1->result = result;
2807 return vno1;
2810 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2811 VNO->HASHCODE first. */
2813 static vn_nary_op_t
2814 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
2815 bool compute_hash)
2817 vn_nary_op_s **slot;
2819 if (compute_hash)
2820 vno->hashcode = vn_nary_op_compute_hash (vno);
2822 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
2823 /* While we do not want to insert things twice it's awkward to
2824 avoid it in the case where visit_nary_op pattern-matches stuff
2825 and ends up simplifying the replacement to itself. We then
2826 get two inserts, one from visit_nary_op and one from
2827 vn_nary_build_or_lookup.
2828 So allow inserts with the same value number. */
2829 if (*slot && (*slot)->result == vno->result)
2830 return *slot;
2832 gcc_assert (!*slot);
2834 *slot = vno;
2835 return vno;
2838 /* Insert a n-ary operation into the current hash table using it's
2839 pieces. Return the vn_nary_op_t structure we created and put in
2840 the hashtable. */
2842 vn_nary_op_t
2843 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2844 tree type, tree *ops,
2845 tree result, unsigned int value_id)
2847 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2848 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2849 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2852 /* Insert OP into the current hash table with a value number of
2853 RESULT. Return the vn_nary_op_t structure we created and put in
2854 the hashtable. */
2856 vn_nary_op_t
2857 vn_nary_op_insert (tree op, tree result)
2859 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2860 vn_nary_op_t vno1;
2862 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2863 init_vn_nary_op_from_op (vno1, op);
2864 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2867 /* Insert the rhs of STMT into the current hash table with a value number of
2868 RESULT. */
2870 static vn_nary_op_t
2871 vn_nary_op_insert_stmt (gimple *stmt, tree result)
2873 vn_nary_op_t vno1
2874 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2875 result, VN_INFO (result)->value_id);
2876 init_vn_nary_op_from_stmt (vno1, stmt);
2877 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2880 /* Compute a hashcode for PHI operation VP1 and return it. */
2882 static inline hashval_t
2883 vn_phi_compute_hash (vn_phi_t vp1)
2885 inchash::hash hstate (vp1->phiargs.length () > 2
2886 ? vp1->block->index : vp1->phiargs.length ());
2887 tree phi1op;
2888 tree type;
2889 edge e;
2890 edge_iterator ei;
2892 /* If all PHI arguments are constants we need to distinguish
2893 the PHI node via its type. */
2894 type = vp1->type;
2895 hstate.merge_hash (vn_hash_type (type));
2897 FOR_EACH_EDGE (e, ei, vp1->block->preds)
2899 /* Don't hash backedge values they need to be handled as VN_TOP
2900 for optimistic value-numbering. */
2901 if (e->flags & EDGE_DFS_BACK)
2902 continue;
2904 phi1op = vp1->phiargs[e->dest_idx];
2905 if (phi1op == VN_TOP)
2906 continue;
2907 inchash::add_expr (phi1op, hstate);
2910 return hstate.end ();
2914 /* Return true if COND1 and COND2 represent the same condition, set
2915 *INVERTED_P if one needs to be inverted to make it the same as
2916 the other. */
2918 static bool
2919 cond_stmts_equal_p (gcond *cond1, gcond *cond2, bool *inverted_p)
2921 enum tree_code code1 = gimple_cond_code (cond1);
2922 enum tree_code code2 = gimple_cond_code (cond2);
2923 tree lhs1 = gimple_cond_lhs (cond1);
2924 tree lhs2 = gimple_cond_lhs (cond2);
2925 tree rhs1 = gimple_cond_rhs (cond1);
2926 tree rhs2 = gimple_cond_rhs (cond2);
2928 *inverted_p = false;
2929 if (code1 == code2)
2931 else if (code1 == swap_tree_comparison (code2))
2932 std::swap (lhs2, rhs2);
2933 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
2934 *inverted_p = true;
2935 else if (code1 == invert_tree_comparison
2936 (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
2938 std::swap (lhs2, rhs2);
2939 *inverted_p = true;
2941 else
2942 return false;
2944 lhs1 = vn_valueize (lhs1);
2945 rhs1 = vn_valueize (rhs1);
2946 lhs2 = vn_valueize (lhs2);
2947 rhs2 = vn_valueize (rhs2);
2948 return ((expressions_equal_p (lhs1, lhs2)
2949 && expressions_equal_p (rhs1, rhs2))
2950 || (commutative_tree_code (code1)
2951 && expressions_equal_p (lhs1, rhs2)
2952 && expressions_equal_p (rhs1, lhs2)));
2955 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2957 static int
2958 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
2960 if (vp1->hashcode != vp2->hashcode)
2961 return false;
2963 if (vp1->block != vp2->block)
2965 if (vp1->phiargs.length () != vp2->phiargs.length ())
2966 return false;
2968 switch (vp1->phiargs.length ())
2970 case 1:
2971 /* Single-arg PHIs are just copies. */
2972 break;
2974 case 2:
2976 /* Rule out backedges into the PHI. */
2977 if (vp1->block->loop_father->header == vp1->block
2978 || vp2->block->loop_father->header == vp2->block)
2979 return false;
2981 /* If the PHI nodes do not have compatible types
2982 they are not the same. */
2983 if (!types_compatible_p (vp1->type, vp2->type))
2984 return false;
2986 basic_block idom1
2987 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
2988 basic_block idom2
2989 = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
2990 /* If the immediate dominator end in switch stmts multiple
2991 values may end up in the same PHI arg via intermediate
2992 CFG merges. */
2993 if (EDGE_COUNT (idom1->succs) != 2
2994 || EDGE_COUNT (idom2->succs) != 2)
2995 return false;
2997 /* Verify the controlling stmt is the same. */
2998 gimple *last1 = last_stmt (idom1);
2999 gimple *last2 = last_stmt (idom2);
3000 if (gimple_code (last1) != GIMPLE_COND
3001 || gimple_code (last2) != GIMPLE_COND)
3002 return false;
3003 bool inverted_p;
3004 if (! cond_stmts_equal_p (as_a <gcond *> (last1),
3005 as_a <gcond *> (last2), &inverted_p))
3006 return false;
3008 /* Get at true/false controlled edges into the PHI. */
3009 edge te1, te2, fe1, fe2;
3010 if (! extract_true_false_controlled_edges (idom1, vp1->block,
3011 &te1, &fe1)
3012 || ! extract_true_false_controlled_edges (idom2, vp2->block,
3013 &te2, &fe2))
3014 return false;
3016 /* Swap edges if the second condition is the inverted of the
3017 first. */
3018 if (inverted_p)
3019 std::swap (te2, fe2);
3021 /* ??? Handle VN_TOP specially. */
3022 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
3023 vp2->phiargs[te2->dest_idx])
3024 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
3025 vp2->phiargs[fe2->dest_idx]))
3026 return false;
3028 return true;
3031 default:
3032 return false;
3036 /* If the PHI nodes do not have compatible types
3037 they are not the same. */
3038 if (!types_compatible_p (vp1->type, vp2->type))
3039 return false;
3041 /* Any phi in the same block will have it's arguments in the
3042 same edge order, because of how we store phi nodes. */
3043 int i;
3044 tree phi1op;
3045 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
3047 tree phi2op = vp2->phiargs[i];
3048 if (phi1op == VN_TOP || phi2op == VN_TOP)
3049 continue;
3050 if (!expressions_equal_p (phi1op, phi2op))
3051 return false;
3054 return true;
3057 static vec<tree> shared_lookup_phiargs;
3059 /* Lookup PHI in the current hash table, and return the resulting
3060 value number if it exists in the hash table. Return NULL_TREE if
3061 it does not exist in the hash table. */
3063 static tree
3064 vn_phi_lookup (gimple *phi)
3066 vn_phi_s **slot;
3067 struct vn_phi_s vp1;
3068 edge e;
3069 edge_iterator ei;
3071 shared_lookup_phiargs.truncate (0);
3072 shared_lookup_phiargs.safe_grow (gimple_phi_num_args (phi));
3074 /* Canonicalize the SSA_NAME's to their value number. */
3075 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3077 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3078 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3079 shared_lookup_phiargs[e->dest_idx] = def;
3081 vp1.type = TREE_TYPE (gimple_phi_result (phi));
3082 vp1.phiargs = shared_lookup_phiargs;
3083 vp1.block = gimple_bb (phi);
3084 vp1.hashcode = vn_phi_compute_hash (&vp1);
3085 slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3086 NO_INSERT);
3087 if (!slot && current_info == optimistic_info)
3088 slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3089 NO_INSERT);
3090 if (!slot)
3091 return NULL_TREE;
3092 return (*slot)->result;
3095 /* Insert PHI into the current hash table with a value number of
3096 RESULT. */
3098 static vn_phi_t
3099 vn_phi_insert (gimple *phi, tree result)
3101 vn_phi_s **slot;
3102 vn_phi_t vp1 = current_info->phis_pool->allocate ();
3103 vec<tree> args = vNULL;
3104 edge e;
3105 edge_iterator ei;
3107 args.safe_grow (gimple_phi_num_args (phi));
3109 /* Canonicalize the SSA_NAME's to their value number. */
3110 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3112 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3113 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3114 args[e->dest_idx] = def;
3116 vp1->value_id = VN_INFO (result)->value_id;
3117 vp1->type = TREE_TYPE (gimple_phi_result (phi));
3118 vp1->phiargs = args;
3119 vp1->block = gimple_bb (phi);
3120 vp1->result = result;
3121 vp1->hashcode = vn_phi_compute_hash (vp1);
3123 slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
3125 /* Because we iterate over phi operations more than once, it's
3126 possible the slot might already exist here, hence no assert.*/
3127 *slot = vp1;
3128 return vp1;
3132 /* Print set of components in strongly connected component SCC to OUT. */
3134 static void
3135 print_scc (FILE *out, vec<tree> scc)
3137 tree var;
3138 unsigned int i;
3140 fprintf (out, "SCC consists of:");
3141 FOR_EACH_VEC_ELT (scc, i, var)
3143 fprintf (out, " ");
3144 print_generic_expr (out, var, 0);
3146 fprintf (out, "\n");
3149 /* Return true if BB1 is dominated by BB2 taking into account edges
3150 that are not executable. */
3152 static bool
3153 dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
3155 edge_iterator ei;
3156 edge e;
3158 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3159 return true;
3161 /* Before iterating we'd like to know if there exists a
3162 (executable) path from bb2 to bb1 at all, if not we can
3163 directly return false. For now simply iterate once. */
3165 /* Iterate to the single executable bb1 predecessor. */
3166 if (EDGE_COUNT (bb1->preds) > 1)
3168 edge prede = NULL;
3169 FOR_EACH_EDGE (e, ei, bb1->preds)
3170 if (e->flags & EDGE_EXECUTABLE)
3172 if (prede)
3174 prede = NULL;
3175 break;
3177 prede = e;
3179 if (prede)
3181 bb1 = prede->src;
3183 /* Re-do the dominance check with changed bb1. */
3184 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3185 return true;
3189 /* Iterate to the single executable bb2 successor. */
3190 edge succe = NULL;
3191 FOR_EACH_EDGE (e, ei, bb2->succs)
3192 if (e->flags & EDGE_EXECUTABLE)
3194 if (succe)
3196 succe = NULL;
3197 break;
3199 succe = e;
3201 if (succe)
3203 /* Verify the reached block is only reached through succe.
3204 If there is only one edge we can spare us the dominator
3205 check and iterate directly. */
3206 if (EDGE_COUNT (succe->dest->preds) > 1)
3208 FOR_EACH_EDGE (e, ei, succe->dest->preds)
3209 if (e != succe
3210 && (e->flags & EDGE_EXECUTABLE))
3212 succe = NULL;
3213 break;
3216 if (succe)
3218 bb2 = succe->dest;
3220 /* Re-do the dominance check with changed bb2. */
3221 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3222 return true;
3226 /* We could now iterate updating bb1 / bb2. */
3227 return false;
3230 /* Set the value number of FROM to TO, return true if it has changed
3231 as a result. */
3233 static inline bool
3234 set_ssa_val_to (tree from, tree to)
3236 tree currval = SSA_VAL (from);
3237 HOST_WIDE_INT toff, coff;
3239 /* The only thing we allow as value numbers are ssa_names
3240 and invariants. So assert that here. We don't allow VN_TOP
3241 as visiting a stmt should produce a value-number other than
3242 that.
3243 ??? Still VN_TOP can happen for unreachable code, so force
3244 it to varying in that case. Not all code is prepared to
3245 get VN_TOP on valueization. */
3246 if (to == VN_TOP)
3248 if (dump_file && (dump_flags & TDF_DETAILS))
3249 fprintf (dump_file, "Forcing value number to varying on "
3250 "receiving VN_TOP\n");
3251 to = from;
3254 gcc_assert (to != NULL_TREE
3255 && ((TREE_CODE (to) == SSA_NAME
3256 && (to == from || SSA_VAL (to) == to))
3257 || is_gimple_min_invariant (to)));
3259 if (from != to)
3261 if (currval == from)
3263 if (dump_file && (dump_flags & TDF_DETAILS))
3265 fprintf (dump_file, "Not changing value number of ");
3266 print_generic_expr (dump_file, from, 0);
3267 fprintf (dump_file, " from VARYING to ");
3268 print_generic_expr (dump_file, to, 0);
3269 fprintf (dump_file, "\n");
3271 return false;
3273 else if (currval != VN_TOP
3274 && ! is_gimple_min_invariant (currval)
3275 && is_gimple_min_invariant (to))
3277 if (dump_file && (dump_flags & TDF_DETAILS))
3279 fprintf (dump_file, "Forcing VARYING instead of changing "
3280 "value number of ");
3281 print_generic_expr (dump_file, from, 0);
3282 fprintf (dump_file, " from ");
3283 print_generic_expr (dump_file, currval, 0);
3284 fprintf (dump_file, " (non-constant) to ");
3285 print_generic_expr (dump_file, to, 0);
3286 fprintf (dump_file, " (constant)\n");
3288 to = from;
3290 else if (TREE_CODE (to) == SSA_NAME
3291 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
3292 to = from;
3295 if (dump_file && (dump_flags & TDF_DETAILS))
3297 fprintf (dump_file, "Setting value number of ");
3298 print_generic_expr (dump_file, from, 0);
3299 fprintf (dump_file, " to ");
3300 print_generic_expr (dump_file, to, 0);
3303 if (currval != to
3304 && !operand_equal_p (currval, to, 0)
3305 /* ??? For addresses involving volatile objects or types operand_equal_p
3306 does not reliably detect ADDR_EXPRs as equal. We know we are only
3307 getting invariant gimple addresses here, so can use
3308 get_addr_base_and_unit_offset to do this comparison. */
3309 && !(TREE_CODE (currval) == ADDR_EXPR
3310 && TREE_CODE (to) == ADDR_EXPR
3311 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
3312 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
3313 && coff == toff))
3315 /* If we equate two SSA names we have to make the side-band info
3316 of the leader conservative (and remember whatever original value
3317 was present). */
3318 if (TREE_CODE (to) == SSA_NAME)
3320 if (INTEGRAL_TYPE_P (TREE_TYPE (to))
3321 && SSA_NAME_RANGE_INFO (to))
3323 if (SSA_NAME_IS_DEFAULT_DEF (to)
3324 || dominated_by_p_w_unex
3325 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3326 gimple_bb (SSA_NAME_DEF_STMT (to))))
3327 /* Keep the info from the dominator. */
3329 else if (SSA_NAME_IS_DEFAULT_DEF (from)
3330 || dominated_by_p_w_unex
3331 (gimple_bb (SSA_NAME_DEF_STMT (to)),
3332 gimple_bb (SSA_NAME_DEF_STMT (from))))
3334 /* Save old info. */
3335 if (! VN_INFO (to)->info.range_info)
3337 VN_INFO (to)->info.range_info = SSA_NAME_RANGE_INFO (to);
3338 VN_INFO (to)->range_info_anti_range_p
3339 = SSA_NAME_ANTI_RANGE_P (to);
3341 /* Use that from the dominator. */
3342 SSA_NAME_RANGE_INFO (to) = SSA_NAME_RANGE_INFO (from);
3343 SSA_NAME_ANTI_RANGE_P (to) = SSA_NAME_ANTI_RANGE_P (from);
3345 else
3347 /* Save old info. */
3348 if (! VN_INFO (to)->info.range_info)
3350 VN_INFO (to)->info.range_info = SSA_NAME_RANGE_INFO (to);
3351 VN_INFO (to)->range_info_anti_range_p
3352 = SSA_NAME_ANTI_RANGE_P (to);
3354 /* Rather than allocating memory and unioning the info
3355 just clear it. */
3356 SSA_NAME_RANGE_INFO (to) = NULL;
3359 else if (POINTER_TYPE_P (TREE_TYPE (to))
3360 && SSA_NAME_PTR_INFO (to))
3362 if (SSA_NAME_IS_DEFAULT_DEF (to)
3363 || dominated_by_p_w_unex
3364 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3365 gimple_bb (SSA_NAME_DEF_STMT (to))))
3366 /* Keep the info from the dominator. */
3368 else if (SSA_NAME_IS_DEFAULT_DEF (from)
3369 || dominated_by_p_w_unex
3370 (gimple_bb (SSA_NAME_DEF_STMT (to)),
3371 gimple_bb (SSA_NAME_DEF_STMT (from))))
3373 /* Save old info. */
3374 if (! VN_INFO (to)->info.ptr_info)
3375 VN_INFO (to)->info.ptr_info = SSA_NAME_PTR_INFO (to);
3376 /* Use that from the dominator. */
3377 SSA_NAME_PTR_INFO (to) = SSA_NAME_PTR_INFO (from);
3379 else if (! SSA_NAME_PTR_INFO (from)
3380 /* Handle the case of trivially equivalent info. */
3381 || memcmp (SSA_NAME_PTR_INFO (to),
3382 SSA_NAME_PTR_INFO (from),
3383 sizeof (ptr_info_def)) != 0)
3385 /* Save old info. */
3386 if (! VN_INFO (to)->info.ptr_info)
3387 VN_INFO (to)->info.ptr_info = SSA_NAME_PTR_INFO (to);
3388 /* Rather than allocating memory and unioning the info
3389 just clear it. */
3390 SSA_NAME_PTR_INFO (to) = NULL;
3395 VN_INFO (from)->valnum = to;
3396 if (dump_file && (dump_flags & TDF_DETAILS))
3397 fprintf (dump_file, " (changed)\n");
3398 return true;
3400 if (dump_file && (dump_flags & TDF_DETAILS))
3401 fprintf (dump_file, "\n");
3402 return false;
3405 /* Mark as processed all the definitions in the defining stmt of USE, or
3406 the USE itself. */
3408 static void
3409 mark_use_processed (tree use)
3411 ssa_op_iter iter;
3412 def_operand_p defp;
3413 gimple *stmt = SSA_NAME_DEF_STMT (use);
3415 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
3417 VN_INFO (use)->use_processed = true;
3418 return;
3421 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3423 tree def = DEF_FROM_PTR (defp);
3425 VN_INFO (def)->use_processed = true;
3429 /* Set all definitions in STMT to value number to themselves.
3430 Return true if a value number changed. */
3432 static bool
3433 defs_to_varying (gimple *stmt)
3435 bool changed = false;
3436 ssa_op_iter iter;
3437 def_operand_p defp;
3439 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3441 tree def = DEF_FROM_PTR (defp);
3442 changed |= set_ssa_val_to (def, def);
3444 return changed;
3447 /* Visit a copy between LHS and RHS, return true if the value number
3448 changed. */
3450 static bool
3451 visit_copy (tree lhs, tree rhs)
3453 /* Valueize. */
3454 rhs = SSA_VAL (rhs);
3456 return set_ssa_val_to (lhs, rhs);
3459 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
3460 is the same. */
3462 static tree
3463 valueized_wider_op (tree wide_type, tree op)
3465 if (TREE_CODE (op) == SSA_NAME)
3466 op = SSA_VAL (op);
3468 /* Either we have the op widened available. */
3469 tree ops[3] = {};
3470 ops[0] = op;
3471 tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
3472 wide_type, ops, NULL);
3473 if (tem)
3474 return tem;
3476 /* Or the op is truncated from some existing value. */
3477 if (TREE_CODE (op) == SSA_NAME)
3479 gimple *def = SSA_NAME_DEF_STMT (op);
3480 if (is_gimple_assign (def)
3481 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3483 tem = gimple_assign_rhs1 (def);
3484 if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)))
3486 if (TREE_CODE (tem) == SSA_NAME)
3487 tem = SSA_VAL (tem);
3488 return tem;
3493 /* For constants simply extend it. */
3494 if (TREE_CODE (op) == INTEGER_CST)
3495 return wide_int_to_tree (wide_type, op);
3497 return NULL_TREE;
3500 /* Visit a nary operator RHS, value number it, and return true if the
3501 value number of LHS has changed as a result. */
3503 static bool
3504 visit_nary_op (tree lhs, gassign *stmt)
3506 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
3507 if (result)
3508 return set_ssa_val_to (lhs, result);
3510 /* Do some special pattern matching for redundancies of operations
3511 in different types. */
3512 enum tree_code code = gimple_assign_rhs_code (stmt);
3513 tree type = TREE_TYPE (lhs);
3514 tree rhs1 = gimple_assign_rhs1 (stmt);
3515 switch (code)
3517 CASE_CONVERT:
3518 /* Match arithmetic done in a different type where we can easily
3519 substitute the result from some earlier sign-changed or widened
3520 operation. */
3521 if (INTEGRAL_TYPE_P (type)
3522 && TREE_CODE (rhs1) == SSA_NAME
3523 /* We only handle sign-changes or zero-extension -> & mask. */
3524 && ((TYPE_UNSIGNED (TREE_TYPE (rhs1))
3525 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs1)))
3526 || TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (rhs1))))
3528 gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
3529 if (def
3530 && (gimple_assign_rhs_code (def) == PLUS_EXPR
3531 || gimple_assign_rhs_code (def) == MINUS_EXPR
3532 || gimple_assign_rhs_code (def) == MULT_EXPR))
3534 tree ops[3] = {};
3535 /* Either we have the op widened available. */
3536 ops[0] = valueized_wider_op (type,
3537 gimple_assign_rhs1 (def));
3538 if (ops[0])
3539 ops[1] = valueized_wider_op (type,
3540 gimple_assign_rhs2 (def));
3541 if (ops[0] && ops[1])
3543 ops[0] = vn_nary_op_lookup_pieces
3544 (2, gimple_assign_rhs_code (def), type, ops, NULL);
3545 /* We have wider operation available. */
3546 if (ops[0])
3548 unsigned lhs_prec = TYPE_PRECISION (type);
3549 unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1));
3550 if (lhs_prec == rhs_prec)
3552 ops[1] = NULL_TREE;
3553 result = vn_nary_build_or_lookup (NOP_EXPR,
3554 type, ops);
3555 if (result)
3557 bool changed = set_ssa_val_to (lhs, result);
3558 vn_nary_op_insert_stmt (stmt, result);
3559 return changed;
3562 else
3564 ops[1] = wide_int_to_tree (type,
3565 wi::mask (rhs_prec, false,
3566 lhs_prec));
3567 result = vn_nary_build_or_lookup (BIT_AND_EXPR,
3568 TREE_TYPE (lhs),
3569 ops);
3570 if (result)
3572 bool changed = set_ssa_val_to (lhs, result);
3573 vn_nary_op_insert_stmt (stmt, result);
3574 return changed;
3581 default:;
3584 bool changed = set_ssa_val_to (lhs, lhs);
3585 vn_nary_op_insert_stmt (stmt, lhs);
3586 return changed;
3589 /* Visit a call STMT storing into LHS. Return true if the value number
3590 of the LHS has changed as a result. */
3592 static bool
3593 visit_reference_op_call (tree lhs, gcall *stmt)
3595 bool changed = false;
3596 struct vn_reference_s vr1;
3597 vn_reference_t vnresult = NULL;
3598 tree vdef = gimple_vdef (stmt);
3600 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
3601 if (lhs && TREE_CODE (lhs) != SSA_NAME)
3602 lhs = NULL_TREE;
3604 vn_reference_lookup_call (stmt, &vnresult, &vr1);
3605 if (vnresult)
3607 if (vnresult->result_vdef && vdef)
3608 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
3609 else if (vdef)
3610 /* If the call was discovered to be pure or const reflect
3611 that as far as possible. */
3612 changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt)));
3614 if (!vnresult->result && lhs)
3615 vnresult->result = lhs;
3617 if (vnresult->result && lhs)
3618 changed |= set_ssa_val_to (lhs, vnresult->result);
3620 else
3622 vn_reference_t vr2;
3623 vn_reference_s **slot;
3624 tree vdef_val = vdef;
3625 if (vdef)
3627 /* If we value numbered an indirect functions function to
3628 one not clobbering memory value number its VDEF to its
3629 VUSE. */
3630 tree fn = gimple_call_fn (stmt);
3631 if (fn && TREE_CODE (fn) == SSA_NAME)
3633 fn = SSA_VAL (fn);
3634 if (TREE_CODE (fn) == ADDR_EXPR
3635 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
3636 && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
3637 & (ECF_CONST | ECF_PURE)))
3638 vdef_val = vuse_ssa_val (gimple_vuse (stmt));
3640 changed |= set_ssa_val_to (vdef, vdef_val);
3642 if (lhs)
3643 changed |= set_ssa_val_to (lhs, lhs);
3644 vr2 = current_info->references_pool->allocate ();
3645 vr2->vuse = vr1.vuse;
3646 /* As we are not walking the virtual operand chain we know the
3647 shared_lookup_references are still original so we can re-use
3648 them here. */
3649 vr2->operands = vr1.operands.copy ();
3650 vr2->type = vr1.type;
3651 vr2->set = vr1.set;
3652 vr2->hashcode = vr1.hashcode;
3653 vr2->result = lhs;
3654 vr2->result_vdef = vdef_val;
3655 slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3656 INSERT);
3657 gcc_assert (!*slot);
3658 *slot = vr2;
3661 return changed;
3664 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3665 and return true if the value number of the LHS has changed as a result. */
3667 static bool
3668 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
3670 bool changed = false;
3671 tree last_vuse;
3672 tree result;
3674 last_vuse = gimple_vuse (stmt);
3675 last_vuse_ptr = &last_vuse;
3676 result = vn_reference_lookup (op, gimple_vuse (stmt),
3677 default_vn_walk_kind, NULL, true);
3678 last_vuse_ptr = NULL;
3680 /* We handle type-punning through unions by value-numbering based
3681 on offset and size of the access. Be prepared to handle a
3682 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3683 if (result
3684 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3686 /* We will be setting the value number of lhs to the value number
3687 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3688 So first simplify and lookup this expression to see if it
3689 is already available. */
3690 code_helper rcode = VIEW_CONVERT_EXPR;
3691 tree ops[3] = { result };
3692 result = vn_nary_build_or_lookup (rcode, TREE_TYPE (op), ops);
3695 if (result)
3696 changed = set_ssa_val_to (lhs, result);
3697 else
3699 changed = set_ssa_val_to (lhs, lhs);
3700 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
3703 return changed;
3707 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3708 and return true if the value number of the LHS has changed as a result. */
3710 static bool
3711 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
3713 bool changed = false;
3714 vn_reference_t vnresult = NULL;
3715 tree assign;
3716 bool resultsame = false;
3717 tree vuse = gimple_vuse (stmt);
3718 tree vdef = gimple_vdef (stmt);
3720 if (TREE_CODE (op) == SSA_NAME)
3721 op = SSA_VAL (op);
3723 /* First we want to lookup using the *vuses* from the store and see
3724 if there the last store to this location with the same address
3725 had the same value.
3727 The vuses represent the memory state before the store. If the
3728 memory state, address, and value of the store is the same as the
3729 last store to this location, then this store will produce the
3730 same memory state as that store.
3732 In this case the vdef versions for this store are value numbered to those
3733 vuse versions, since they represent the same memory state after
3734 this store.
3736 Otherwise, the vdefs for the store are used when inserting into
3737 the table, since the store generates a new memory state. */
3739 vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
3740 if (vnresult
3741 && vnresult->result)
3743 tree result = vnresult->result;
3744 if (TREE_CODE (result) == SSA_NAME)
3745 result = SSA_VAL (result);
3746 resultsame = expressions_equal_p (result, op);
3747 if (resultsame)
3749 /* If the TBAA state isn't compatible for downstream reads
3750 we cannot value-number the VDEFs the same. */
3751 alias_set_type set = get_alias_set (lhs);
3752 if (vnresult->set != set
3753 && ! alias_set_subset_of (set, vnresult->set))
3754 resultsame = false;
3758 if (!resultsame)
3760 /* Only perform the following when being called from PRE
3761 which embeds tail merging. */
3762 if (default_vn_walk_kind == VN_WALK)
3764 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3765 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
3766 if (vnresult)
3768 VN_INFO (vdef)->use_processed = true;
3769 return set_ssa_val_to (vdef, vnresult->result_vdef);
3773 if (dump_file && (dump_flags & TDF_DETAILS))
3775 fprintf (dump_file, "No store match\n");
3776 fprintf (dump_file, "Value numbering store ");
3777 print_generic_expr (dump_file, lhs, 0);
3778 fprintf (dump_file, " to ");
3779 print_generic_expr (dump_file, op, 0);
3780 fprintf (dump_file, "\n");
3782 /* Have to set value numbers before insert, since insert is
3783 going to valueize the references in-place. */
3784 if (vdef)
3785 changed |= set_ssa_val_to (vdef, vdef);
3787 /* Do not insert structure copies into the tables. */
3788 if (is_gimple_min_invariant (op)
3789 || is_gimple_reg (op))
3790 vn_reference_insert (lhs, op, vdef, NULL);
3792 /* Only perform the following when being called from PRE
3793 which embeds tail merging. */
3794 if (default_vn_walk_kind == VN_WALK)
3796 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3797 vn_reference_insert (assign, lhs, vuse, vdef);
3800 else
3802 /* We had a match, so value number the vdef to have the value
3803 number of the vuse it came from. */
3805 if (dump_file && (dump_flags & TDF_DETAILS))
3806 fprintf (dump_file, "Store matched earlier value, "
3807 "value numbering store vdefs to matching vuses.\n");
3809 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3812 return changed;
3815 /* Visit and value number PHI, return true if the value number
3816 changed. */
3818 static bool
3819 visit_phi (gimple *phi)
3821 bool changed = false;
3822 tree result;
3823 tree sameval = VN_TOP;
3824 bool allsame = true;
3825 unsigned n_executable = 0;
3827 /* TODO: We could check for this in init_sccvn, and replace this
3828 with a gcc_assert. */
3829 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3830 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3832 /* See if all non-TOP arguments have the same value. TOP is
3833 equivalent to everything, so we can ignore it. */
3834 edge_iterator ei;
3835 edge e;
3836 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3837 if (e->flags & EDGE_EXECUTABLE)
3839 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3841 ++n_executable;
3842 if (TREE_CODE (def) == SSA_NAME)
3843 def = SSA_VAL (def);
3844 if (def == VN_TOP)
3845 continue;
3846 if (sameval == VN_TOP)
3847 sameval = def;
3848 else if (!expressions_equal_p (def, sameval))
3850 allsame = false;
3851 break;
3855 /* If none of the edges was executable or all incoming values are
3856 undefined keep the value-number at VN_TOP. If only a single edge
3857 is exectuable use its value. */
3858 if (sameval == VN_TOP
3859 || n_executable == 1)
3860 return set_ssa_val_to (PHI_RESULT (phi), sameval);
3862 /* First see if it is equivalent to a phi node in this block. We prefer
3863 this as it allows IV elimination - see PRs 66502 and 67167. */
3864 result = vn_phi_lookup (phi);
3865 if (result)
3866 changed = set_ssa_val_to (PHI_RESULT (phi), result);
3867 /* Otherwise all value numbered to the same value, the phi node has that
3868 value. */
3869 else if (allsame)
3870 changed = set_ssa_val_to (PHI_RESULT (phi), sameval);
3871 else
3873 vn_phi_insert (phi, PHI_RESULT (phi));
3874 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3877 return changed;
3880 /* Try to simplify RHS using equivalences and constant folding. */
3882 static tree
3883 try_to_simplify (gassign *stmt)
3885 enum tree_code code = gimple_assign_rhs_code (stmt);
3886 tree tem;
3888 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3889 in this case, there is no point in doing extra work. */
3890 if (code == SSA_NAME)
3891 return NULL_TREE;
3893 /* First try constant folding based on our current lattice. */
3894 mprts_hook = vn_lookup_simplify_result;
3895 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
3896 mprts_hook = NULL;
3897 if (tem
3898 && (TREE_CODE (tem) == SSA_NAME
3899 || is_gimple_min_invariant (tem)))
3900 return tem;
3902 return NULL_TREE;
3905 /* Visit and value number USE, return true if the value number
3906 changed. */
3908 static bool
3909 visit_use (tree use)
3911 bool changed = false;
3912 gimple *stmt = SSA_NAME_DEF_STMT (use);
3914 mark_use_processed (use);
3916 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3917 if (dump_file && (dump_flags & TDF_DETAILS)
3918 && !SSA_NAME_IS_DEFAULT_DEF (use))
3920 fprintf (dump_file, "Value numbering ");
3921 print_generic_expr (dump_file, use, 0);
3922 fprintf (dump_file, " stmt = ");
3923 print_gimple_stmt (dump_file, stmt, 0, 0);
3926 /* Handle uninitialized uses. */
3927 if (SSA_NAME_IS_DEFAULT_DEF (use))
3928 changed = set_ssa_val_to (use, use);
3929 else if (gimple_code (stmt) == GIMPLE_PHI)
3930 changed = visit_phi (stmt);
3931 else if (gimple_has_volatile_ops (stmt))
3932 changed = defs_to_varying (stmt);
3933 else if (gassign *ass = dyn_cast <gassign *> (stmt))
3935 enum tree_code code = gimple_assign_rhs_code (ass);
3936 tree lhs = gimple_assign_lhs (ass);
3937 tree rhs1 = gimple_assign_rhs1 (ass);
3938 tree simplified;
3940 /* Shortcut for copies. Simplifying copies is pointless,
3941 since we copy the expression and value they represent. */
3942 if (code == SSA_NAME
3943 && TREE_CODE (lhs) == SSA_NAME)
3945 changed = visit_copy (lhs, rhs1);
3946 goto done;
3948 simplified = try_to_simplify (ass);
3949 if (simplified)
3951 if (dump_file && (dump_flags & TDF_DETAILS))
3953 fprintf (dump_file, "RHS ");
3954 print_gimple_expr (dump_file, ass, 0, 0);
3955 fprintf (dump_file, " simplified to ");
3956 print_generic_expr (dump_file, simplified, 0);
3957 fprintf (dump_file, "\n");
3960 /* Setting value numbers to constants will occasionally
3961 screw up phi congruence because constants are not
3962 uniquely associated with a single ssa name that can be
3963 looked up. */
3964 if (simplified
3965 && is_gimple_min_invariant (simplified)
3966 && TREE_CODE (lhs) == SSA_NAME)
3968 changed = set_ssa_val_to (lhs, simplified);
3969 goto done;
3971 else if (simplified
3972 && TREE_CODE (simplified) == SSA_NAME
3973 && TREE_CODE (lhs) == SSA_NAME)
3975 changed = visit_copy (lhs, simplified);
3976 goto done;
3979 if ((TREE_CODE (lhs) == SSA_NAME
3980 /* We can substitute SSA_NAMEs that are live over
3981 abnormal edges with their constant value. */
3982 && !(gimple_assign_copy_p (ass)
3983 && is_gimple_min_invariant (rhs1))
3984 && !(simplified
3985 && is_gimple_min_invariant (simplified))
3986 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3987 /* Stores or copies from SSA_NAMEs that are live over
3988 abnormal edges are a problem. */
3989 || (code == SSA_NAME
3990 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3991 changed = defs_to_varying (ass);
3992 else if (REFERENCE_CLASS_P (lhs)
3993 || DECL_P (lhs))
3994 changed = visit_reference_op_store (lhs, rhs1, ass);
3995 else if (TREE_CODE (lhs) == SSA_NAME)
3997 if ((gimple_assign_copy_p (ass)
3998 && is_gimple_min_invariant (rhs1))
3999 || (simplified
4000 && is_gimple_min_invariant (simplified)))
4002 if (simplified)
4003 changed = set_ssa_val_to (lhs, simplified);
4004 else
4005 changed = set_ssa_val_to (lhs, rhs1);
4007 else
4009 /* Visit the original statement. */
4010 switch (vn_get_stmt_kind (ass))
4012 case VN_NARY:
4013 changed = visit_nary_op (lhs, ass);
4014 break;
4015 case VN_REFERENCE:
4016 changed = visit_reference_op_load (lhs, rhs1, ass);
4017 break;
4018 default:
4019 changed = defs_to_varying (ass);
4020 break;
4024 else
4025 changed = defs_to_varying (ass);
4027 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
4029 tree lhs = gimple_call_lhs (call_stmt);
4030 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4032 /* Try constant folding based on our current lattice. */
4033 tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
4034 vn_valueize);
4035 if (simplified)
4037 if (dump_file && (dump_flags & TDF_DETAILS))
4039 fprintf (dump_file, "call ");
4040 print_gimple_expr (dump_file, call_stmt, 0, 0);
4041 fprintf (dump_file, " simplified to ");
4042 print_generic_expr (dump_file, simplified, 0);
4043 fprintf (dump_file, "\n");
4046 /* Setting value numbers to constants will occasionally
4047 screw up phi congruence because constants are not
4048 uniquely associated with a single ssa name that can be
4049 looked up. */
4050 if (simplified
4051 && is_gimple_min_invariant (simplified))
4053 changed = set_ssa_val_to (lhs, simplified);
4054 if (gimple_vdef (call_stmt))
4055 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4056 SSA_VAL (gimple_vuse (call_stmt)));
4057 goto done;
4059 else if (simplified
4060 && TREE_CODE (simplified) == SSA_NAME)
4062 changed = visit_copy (lhs, simplified);
4063 if (gimple_vdef (call_stmt))
4064 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4065 SSA_VAL (gimple_vuse (call_stmt)));
4066 goto done;
4068 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4070 changed = defs_to_varying (call_stmt);
4071 goto done;
4075 /* Pick up flags from a devirtualization target. */
4076 tree fn = gimple_call_fn (stmt);
4077 int extra_fnflags = 0;
4078 if (fn && TREE_CODE (fn) == SSA_NAME)
4080 fn = SSA_VAL (fn);
4081 if (TREE_CODE (fn) == ADDR_EXPR
4082 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
4083 extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0));
4085 if (!gimple_call_internal_p (call_stmt)
4086 && (/* Calls to the same function with the same vuse
4087 and the same operands do not necessarily return the same
4088 value, unless they're pure or const. */
4089 ((gimple_call_flags (call_stmt) | extra_fnflags)
4090 & (ECF_PURE | ECF_CONST))
4091 /* If calls have a vdef, subsequent calls won't have
4092 the same incoming vuse. So, if 2 calls with vdef have the
4093 same vuse, we know they're not subsequent.
4094 We can value number 2 calls to the same function with the
4095 same vuse and the same operands which are not subsequent
4096 the same, because there is no code in the program that can
4097 compare the 2 values... */
4098 || (gimple_vdef (call_stmt)
4099 /* ... unless the call returns a pointer which does
4100 not alias with anything else. In which case the
4101 information that the values are distinct are encoded
4102 in the IL. */
4103 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
4104 /* Only perform the following when being called from PRE
4105 which embeds tail merging. */
4106 && default_vn_walk_kind == VN_WALK)))
4107 changed = visit_reference_op_call (lhs, call_stmt);
4108 else
4109 changed = defs_to_varying (call_stmt);
4111 else
4112 changed = defs_to_varying (stmt);
4113 done:
4114 return changed;
4117 /* Compare two operands by reverse postorder index */
4119 static int
4120 compare_ops (const void *pa, const void *pb)
4122 const tree opa = *((const tree *)pa);
4123 const tree opb = *((const tree *)pb);
4124 gimple *opstmta = SSA_NAME_DEF_STMT (opa);
4125 gimple *opstmtb = SSA_NAME_DEF_STMT (opb);
4126 basic_block bba;
4127 basic_block bbb;
4129 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
4130 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4131 else if (gimple_nop_p (opstmta))
4132 return -1;
4133 else if (gimple_nop_p (opstmtb))
4134 return 1;
4136 bba = gimple_bb (opstmta);
4137 bbb = gimple_bb (opstmtb);
4139 if (!bba && !bbb)
4140 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4141 else if (!bba)
4142 return -1;
4143 else if (!bbb)
4144 return 1;
4146 if (bba == bbb)
4148 if (gimple_code (opstmta) == GIMPLE_PHI
4149 && gimple_code (opstmtb) == GIMPLE_PHI)
4150 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4151 else if (gimple_code (opstmta) == GIMPLE_PHI)
4152 return -1;
4153 else if (gimple_code (opstmtb) == GIMPLE_PHI)
4154 return 1;
4155 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
4156 return gimple_uid (opstmta) - gimple_uid (opstmtb);
4157 else
4158 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4160 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
4163 /* Sort an array containing members of a strongly connected component
4164 SCC so that the members are ordered by RPO number.
4165 This means that when the sort is complete, iterating through the
4166 array will give you the members in RPO order. */
4168 static void
4169 sort_scc (vec<tree> scc)
4171 scc.qsort (compare_ops);
4174 /* Insert the no longer used nary ONARY to the hash INFO. */
4176 static void
4177 copy_nary (vn_nary_op_t onary, vn_tables_t info)
4179 size_t size = sizeof_vn_nary_op (onary->length);
4180 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
4181 &info->nary_obstack);
4182 memcpy (nary, onary, size);
4183 vn_nary_op_insert_into (nary, info->nary, false);
4186 /* Insert the no longer used phi OPHI to the hash INFO. */
4188 static void
4189 copy_phi (vn_phi_t ophi, vn_tables_t info)
4191 vn_phi_t phi = info->phis_pool->allocate ();
4192 vn_phi_s **slot;
4193 memcpy (phi, ophi, sizeof (*phi));
4194 ophi->phiargs.create (0);
4195 slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
4196 gcc_assert (!*slot);
4197 *slot = phi;
4200 /* Insert the no longer used reference OREF to the hash INFO. */
4202 static void
4203 copy_reference (vn_reference_t oref, vn_tables_t info)
4205 vn_reference_t ref;
4206 vn_reference_s **slot;
4207 ref = info->references_pool->allocate ();
4208 memcpy (ref, oref, sizeof (*ref));
4209 oref->operands.create (0);
4210 slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
4211 if (*slot)
4212 free_reference (*slot);
4213 *slot = ref;
4216 /* Process a strongly connected component in the SSA graph. */
4218 static void
4219 process_scc (vec<tree> scc)
4221 tree var;
4222 unsigned int i;
4223 unsigned int iterations = 0;
4224 bool changed = true;
4225 vn_nary_op_iterator_type hin;
4226 vn_phi_iterator_type hip;
4227 vn_reference_iterator_type hir;
4228 vn_nary_op_t nary;
4229 vn_phi_t phi;
4230 vn_reference_t ref;
4232 /* If the SCC has a single member, just visit it. */
4233 if (scc.length () == 1)
4235 tree use = scc[0];
4236 if (VN_INFO (use)->use_processed)
4237 return;
4238 /* We need to make sure it doesn't form a cycle itself, which can
4239 happen for self-referential PHI nodes. In that case we would
4240 end up inserting an expression with VN_TOP operands into the
4241 valid table which makes us derive bogus equivalences later.
4242 The cheapest way to check this is to assume it for all PHI nodes. */
4243 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
4244 /* Fallthru to iteration. */ ;
4245 else
4247 visit_use (use);
4248 return;
4252 if (dump_file && (dump_flags & TDF_DETAILS))
4253 print_scc (dump_file, scc);
4255 /* Iterate over the SCC with the optimistic table until it stops
4256 changing. */
4257 current_info = optimistic_info;
4258 while (changed)
4260 changed = false;
4261 iterations++;
4262 if (dump_file && (dump_flags & TDF_DETAILS))
4263 fprintf (dump_file, "Starting iteration %d\n", iterations);
4264 /* As we are value-numbering optimistically we have to
4265 clear the expression tables and the simplified expressions
4266 in each iteration until we converge. */
4267 optimistic_info->nary->empty ();
4268 optimistic_info->phis->empty ();
4269 optimistic_info->references->empty ();
4270 obstack_free (&optimistic_info->nary_obstack, NULL);
4271 gcc_obstack_init (&optimistic_info->nary_obstack);
4272 optimistic_info->phis_pool->release ();
4273 optimistic_info->references_pool->release ();
4274 FOR_EACH_VEC_ELT (scc, i, var)
4275 gcc_assert (!VN_INFO (var)->needs_insertion
4276 && VN_INFO (var)->expr == NULL);
4277 FOR_EACH_VEC_ELT (scc, i, var)
4278 changed |= visit_use (var);
4281 if (dump_file && (dump_flags & TDF_DETAILS))
4282 fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
4283 statistics_histogram_event (cfun, "SCC iterations", iterations);
4285 /* Finally, copy the contents of the no longer used optimistic
4286 table to the valid table. */
4287 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
4288 copy_nary (nary, valid_info);
4289 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
4290 copy_phi (phi, valid_info);
4291 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
4292 ref, vn_reference_t, hir)
4293 copy_reference (ref, valid_info);
4295 current_info = valid_info;
4299 /* Pop the components of the found SCC for NAME off the SCC stack
4300 and process them. Returns true if all went well, false if
4301 we run into resource limits. */
4303 static bool
4304 extract_and_process_scc_for_name (tree name)
4306 auto_vec<tree> scc;
4307 tree x;
4309 /* Found an SCC, pop the components off the SCC stack and
4310 process them. */
4313 x = sccstack.pop ();
4315 VN_INFO (x)->on_sccstack = false;
4316 scc.safe_push (x);
4317 } while (x != name);
4319 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
4320 if (scc.length ()
4321 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
4323 if (dump_file)
4324 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
4325 "SCC size %u exceeding %u\n", scc.length (),
4326 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
4328 return false;
4331 if (scc.length () > 1)
4332 sort_scc (scc);
4334 process_scc (scc);
4336 return true;
4339 /* Depth first search on NAME to discover and process SCC's in the SSA
4340 graph.
4341 Execution of this algorithm relies on the fact that the SCC's are
4342 popped off the stack in topological order.
4343 Returns true if successful, false if we stopped processing SCC's due
4344 to resource constraints. */
4346 static bool
4347 DFS (tree name)
4349 auto_vec<ssa_op_iter> itervec;
4350 auto_vec<tree> namevec;
4351 use_operand_p usep = NULL;
4352 gimple *defstmt;
4353 tree use;
4354 ssa_op_iter iter;
4356 start_over:
4357 /* SCC info */
4358 VN_INFO (name)->dfsnum = next_dfs_num++;
4359 VN_INFO (name)->visited = true;
4360 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
4362 sccstack.safe_push (name);
4363 VN_INFO (name)->on_sccstack = true;
4364 defstmt = SSA_NAME_DEF_STMT (name);
4366 /* Recursively DFS on our operands, looking for SCC's. */
4367 if (!gimple_nop_p (defstmt))
4369 /* Push a new iterator. */
4370 if (gphi *phi = dyn_cast <gphi *> (defstmt))
4371 usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
4372 else
4373 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
4375 else
4376 clear_and_done_ssa_iter (&iter);
4378 while (1)
4380 /* If we are done processing uses of a name, go up the stack
4381 of iterators and process SCCs as we found them. */
4382 if (op_iter_done (&iter))
4384 /* See if we found an SCC. */
4385 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
4386 if (!extract_and_process_scc_for_name (name))
4387 return false;
4389 /* Check if we are done. */
4390 if (namevec.is_empty ())
4391 return true;
4393 /* Restore the last use walker and continue walking there. */
4394 use = name;
4395 name = namevec.pop ();
4396 memcpy (&iter, &itervec.last (),
4397 sizeof (ssa_op_iter));
4398 itervec.pop ();
4399 goto continue_walking;
4402 use = USE_FROM_PTR (usep);
4404 /* Since we handle phi nodes, we will sometimes get
4405 invariants in the use expression. */
4406 if (TREE_CODE (use) == SSA_NAME)
4408 if (! (VN_INFO (use)->visited))
4410 /* Recurse by pushing the current use walking state on
4411 the stack and starting over. */
4412 itervec.safe_push (iter);
4413 namevec.safe_push (name);
4414 name = use;
4415 goto start_over;
4417 continue_walking:
4418 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
4419 VN_INFO (use)->low);
4421 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
4422 && VN_INFO (use)->on_sccstack)
4424 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
4425 VN_INFO (name)->low);
4429 usep = op_iter_next_use (&iter);
4433 /* Allocate a value number table. */
4435 static void
4436 allocate_vn_table (vn_tables_t table)
4438 table->phis = new vn_phi_table_type (23);
4439 table->nary = new vn_nary_op_table_type (23);
4440 table->references = new vn_reference_table_type (23);
4442 gcc_obstack_init (&table->nary_obstack);
4443 table->phis_pool = new object_allocator<vn_phi_s> ("VN phis");
4444 table->references_pool = new object_allocator<vn_reference_s>
4445 ("VN references");
4448 /* Free a value number table. */
4450 static void
4451 free_vn_table (vn_tables_t table)
4453 delete table->phis;
4454 table->phis = NULL;
4455 delete table->nary;
4456 table->nary = NULL;
4457 delete table->references;
4458 table->references = NULL;
4459 obstack_free (&table->nary_obstack, NULL);
4460 delete table->phis_pool;
4461 delete table->references_pool;
4464 static void
4465 init_scc_vn (void)
4467 int j;
4468 int *rpo_numbers_temp;
4470 calculate_dominance_info (CDI_DOMINATORS);
4471 mark_dfs_back_edges ();
4473 sccstack.create (0);
4474 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
4476 constant_value_ids = BITMAP_ALLOC (NULL);
4478 next_dfs_num = 1;
4479 next_value_id = 1;
4481 vn_ssa_aux_table.create (num_ssa_names + 1);
4482 /* VEC_alloc doesn't actually grow it to the right size, it just
4483 preallocates the space to do so. */
4484 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
4485 gcc_obstack_init (&vn_ssa_aux_obstack);
4487 shared_lookup_phiargs.create (0);
4488 shared_lookup_references.create (0);
4489 rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
4490 rpo_numbers_temp =
4491 XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
4492 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4494 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4495 the i'th block in RPO order is bb. We want to map bb's to RPO
4496 numbers, so we need to rearrange this array. */
4497 for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
4498 rpo_numbers[rpo_numbers_temp[j]] = j;
4500 XDELETE (rpo_numbers_temp);
4502 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
4504 renumber_gimple_stmt_uids ();
4506 /* Create the valid and optimistic value numbering tables. */
4507 valid_info = XCNEW (struct vn_tables_s);
4508 allocate_vn_table (valid_info);
4509 optimistic_info = XCNEW (struct vn_tables_s);
4510 allocate_vn_table (optimistic_info);
4511 current_info = valid_info;
4513 /* Create the VN_INFO structures, and initialize value numbers to
4514 TOP or VARYING for parameters. */
4515 size_t i;
4516 tree name;
4518 FOR_EACH_SSA_NAME (i, name, cfun)
4520 VN_INFO_GET (name)->valnum = VN_TOP;
4521 VN_INFO (name)->needs_insertion = false;
4522 VN_INFO (name)->expr = NULL;
4523 VN_INFO (name)->value_id = 0;
4525 if (!SSA_NAME_IS_DEFAULT_DEF (name))
4526 continue;
4528 switch (TREE_CODE (SSA_NAME_VAR (name)))
4530 case VAR_DECL:
4531 /* Undefined vars keep TOP. */
4532 break;
4534 case PARM_DECL:
4535 /* Parameters are VARYING but we can record a condition
4536 if we know it is a non-NULL pointer. */
4537 VN_INFO (name)->visited = true;
4538 VN_INFO (name)->valnum = name;
4539 if (POINTER_TYPE_P (TREE_TYPE (name))
4540 && nonnull_arg_p (SSA_NAME_VAR (name)))
4542 tree ops[2];
4543 ops[0] = name;
4544 ops[1] = build_int_cst (TREE_TYPE (name), 0);
4545 vn_nary_op_insert_pieces (2, NE_EXPR, boolean_type_node, ops,
4546 boolean_true_node, 0);
4547 if (dump_file && (dump_flags & TDF_DETAILS))
4549 fprintf (dump_file, "Recording ");
4550 print_generic_expr (dump_file, name, TDF_SLIM);
4551 fprintf (dump_file, " != 0\n");
4554 break;
4556 case RESULT_DECL:
4557 /* If the result is passed by invisible reference the default
4558 def is initialized, otherwise it's uninitialized. */
4559 if (DECL_BY_REFERENCE (SSA_NAME_VAR (name)))
4561 VN_INFO (name)->visited = true;
4562 VN_INFO (name)->valnum = name;
4564 break;
4566 default:
4567 gcc_unreachable ();
4572 /* Restore SSA info that has been reset on value leaders. */
4574 void
4575 scc_vn_restore_ssa_info (void)
4577 unsigned i;
4578 tree name;
4580 FOR_EACH_SSA_NAME (i, name, cfun)
4582 if (has_VN_INFO (name))
4584 if (VN_INFO (name)->needs_insertion)
4586 else if (POINTER_TYPE_P (TREE_TYPE (name))
4587 && VN_INFO (name)->info.ptr_info)
4588 SSA_NAME_PTR_INFO (name) = VN_INFO (name)->info.ptr_info;
4589 else if (INTEGRAL_TYPE_P (TREE_TYPE (name))
4590 && VN_INFO (name)->info.range_info)
4592 SSA_NAME_RANGE_INFO (name) = VN_INFO (name)->info.range_info;
4593 SSA_NAME_ANTI_RANGE_P (name)
4594 = VN_INFO (name)->range_info_anti_range_p;
4600 void
4601 free_scc_vn (void)
4603 size_t i;
4604 tree name;
4606 delete constant_to_value_id;
4607 constant_to_value_id = NULL;
4608 BITMAP_FREE (constant_value_ids);
4609 shared_lookup_phiargs.release ();
4610 shared_lookup_references.release ();
4611 XDELETEVEC (rpo_numbers);
4613 FOR_EACH_SSA_NAME (i, name, cfun)
4615 if (has_VN_INFO (name)
4616 && VN_INFO (name)->needs_insertion)
4617 release_ssa_name (name);
4619 obstack_free (&vn_ssa_aux_obstack, NULL);
4620 vn_ssa_aux_table.release ();
4622 sccstack.release ();
4623 free_vn_table (valid_info);
4624 XDELETE (valid_info);
4625 free_vn_table (optimistic_info);
4626 XDELETE (optimistic_info);
4628 BITMAP_FREE (const_parms);
4631 /* Set *ID according to RESULT. */
4633 static void
4634 set_value_id_for_result (tree result, unsigned int *id)
4636 if (result && TREE_CODE (result) == SSA_NAME)
4637 *id = VN_INFO (result)->value_id;
4638 else if (result && is_gimple_min_invariant (result))
4639 *id = get_or_alloc_constant_value_id (result);
4640 else
4641 *id = get_next_value_id ();
4644 /* Set the value ids in the valid hash tables. */
4646 static void
4647 set_hashtable_value_ids (void)
4649 vn_nary_op_iterator_type hin;
4650 vn_phi_iterator_type hip;
4651 vn_reference_iterator_type hir;
4652 vn_nary_op_t vno;
4653 vn_reference_t vr;
4654 vn_phi_t vp;
4656 /* Now set the value ids of the things we had put in the hash
4657 table. */
4659 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4660 set_value_id_for_result (vno->result, &vno->value_id);
4662 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4663 set_value_id_for_result (vp->result, &vp->value_id);
4665 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4666 hir)
4667 set_value_id_for_result (vr->result, &vr->value_id);
4670 class sccvn_dom_walker : public dom_walker
4672 public:
4673 sccvn_dom_walker ()
4674 : dom_walker (CDI_DOMINATORS, true), fail (false), cond_stack (0) {}
4676 virtual edge before_dom_children (basic_block);
4677 virtual void after_dom_children (basic_block);
4679 void record_cond (basic_block,
4680 enum tree_code code, tree lhs, tree rhs, bool value);
4681 void record_conds (basic_block,
4682 enum tree_code code, tree lhs, tree rhs, bool value);
4684 bool fail;
4685 auto_vec<std::pair <basic_block, std::pair <vn_nary_op_t, vn_nary_op_t> > >
4686 cond_stack;
4689 /* Record a temporary condition for the BB and its dominated blocks. */
4691 void
4692 sccvn_dom_walker::record_cond (basic_block bb,
4693 enum tree_code code, tree lhs, tree rhs,
4694 bool value)
4696 tree ops[2] = { lhs, rhs };
4697 vn_nary_op_t old = NULL;
4698 if (vn_nary_op_lookup_pieces (2, code, boolean_type_node, ops, &old))
4699 current_info->nary->remove_elt_with_hash (old, old->hashcode);
4700 vn_nary_op_t cond
4701 = vn_nary_op_insert_pieces (2, code, boolean_type_node, ops,
4702 value
4703 ? boolean_true_node
4704 : boolean_false_node, 0);
4705 if (dump_file && (dump_flags & TDF_DETAILS))
4707 fprintf (dump_file, "Recording temporarily ");
4708 print_generic_expr (dump_file, ops[0], TDF_SLIM);
4709 fprintf (dump_file, " %s ", get_tree_code_name (code));
4710 print_generic_expr (dump_file, ops[1], TDF_SLIM);
4711 fprintf (dump_file, " == %s%s\n",
4712 value ? "true" : "false",
4713 old ? " (old entry saved)" : "");
4715 cond_stack.safe_push (std::make_pair (bb, std::make_pair (cond, old)));
4718 /* Record temporary conditions for the BB and its dominated blocks
4719 according to LHS CODE RHS == VALUE and its dominated conditions. */
4721 void
4722 sccvn_dom_walker::record_conds (basic_block bb,
4723 enum tree_code code, tree lhs, tree rhs,
4724 bool value)
4726 /* Record the original condition. */
4727 record_cond (bb, code, lhs, rhs, value);
4729 if (!value)
4730 return;
4732 /* Record dominated conditions if the condition is true. Note that
4733 the inversion is already recorded. */
4734 switch (code)
4736 case LT_EXPR:
4737 case GT_EXPR:
4738 record_cond (bb, code == LT_EXPR ? LE_EXPR : GE_EXPR, lhs, rhs, true);
4739 record_cond (bb, NE_EXPR, lhs, rhs, true);
4740 record_cond (bb, EQ_EXPR, lhs, rhs, false);
4741 break;
4743 case EQ_EXPR:
4744 record_cond (bb, LE_EXPR, lhs, rhs, true);
4745 record_cond (bb, GE_EXPR, lhs, rhs, true);
4746 record_cond (bb, LT_EXPR, lhs, rhs, false);
4747 record_cond (bb, GT_EXPR, lhs, rhs, false);
4748 break;
4750 default:
4751 break;
4755 /* Restore expressions and values derived from conditionals. */
4757 void
4758 sccvn_dom_walker::after_dom_children (basic_block bb)
4760 while (!cond_stack.is_empty ()
4761 && cond_stack.last ().first == bb)
4763 vn_nary_op_t cond = cond_stack.last ().second.first;
4764 vn_nary_op_t old = cond_stack.last ().second.second;
4765 current_info->nary->remove_elt_with_hash (cond, cond->hashcode);
4766 if (old)
4767 vn_nary_op_insert_into (old, current_info->nary, false);
4768 cond_stack.pop ();
4772 /* Value number all statements in BB. */
4774 edge
4775 sccvn_dom_walker::before_dom_children (basic_block bb)
4777 edge e;
4778 edge_iterator ei;
4780 if (fail)
4781 return NULL;
4783 if (dump_file && (dump_flags & TDF_DETAILS))
4784 fprintf (dump_file, "Visiting BB %d\n", bb->index);
4786 /* If we have a single predecessor record the equivalence from a
4787 possible condition on the predecessor edge. */
4788 edge pred_e = NULL;
4789 FOR_EACH_EDGE (e, ei, bb->preds)
4791 /* Ignore simple backedges from this to allow recording conditions
4792 in loop headers. */
4793 if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
4794 continue;
4795 if (! pred_e)
4796 pred_e = e;
4797 else
4799 pred_e = NULL;
4800 break;
4803 if (pred_e)
4805 /* Check if there are multiple executable successor edges in
4806 the source block. Otherwise there is no additional info
4807 to be recorded. */
4808 edge e2;
4809 FOR_EACH_EDGE (e2, ei, pred_e->src->succs)
4810 if (e2 != pred_e
4811 && e2->flags & EDGE_EXECUTABLE)
4812 break;
4813 if (e2 && (e2->flags & EDGE_EXECUTABLE))
4815 gimple *stmt = last_stmt (pred_e->src);
4816 if (stmt
4817 && gimple_code (stmt) == GIMPLE_COND)
4819 enum tree_code code = gimple_cond_code (stmt);
4820 tree lhs = gimple_cond_lhs (stmt);
4821 tree rhs = gimple_cond_rhs (stmt);
4822 record_conds (bb, code, lhs, rhs,
4823 (pred_e->flags & EDGE_TRUE_VALUE) != 0);
4824 code = invert_tree_comparison (code, HONOR_NANS (lhs));
4825 if (code != ERROR_MARK)
4826 record_conds (bb, code, lhs, rhs,
4827 (pred_e->flags & EDGE_TRUE_VALUE) == 0);
4832 /* Value-number all defs in the basic-block. */
4833 for (gphi_iterator gsi = gsi_start_phis (bb);
4834 !gsi_end_p (gsi); gsi_next (&gsi))
4836 gphi *phi = gsi.phi ();
4837 tree res = PHI_RESULT (phi);
4838 if (!VN_INFO (res)->visited
4839 && !DFS (res))
4841 fail = true;
4842 return NULL;
4845 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
4846 !gsi_end_p (gsi); gsi_next (&gsi))
4848 ssa_op_iter i;
4849 tree op;
4850 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
4851 if (!VN_INFO (op)->visited
4852 && !DFS (op))
4854 fail = true;
4855 return NULL;
4859 /* Finally look at the last stmt. */
4860 gimple *stmt = last_stmt (bb);
4861 if (!stmt)
4862 return NULL;
4864 enum gimple_code code = gimple_code (stmt);
4865 if (code != GIMPLE_COND
4866 && code != GIMPLE_SWITCH
4867 && code != GIMPLE_GOTO)
4868 return NULL;
4870 if (dump_file && (dump_flags & TDF_DETAILS))
4872 fprintf (dump_file, "Visiting control stmt ending BB %d: ", bb->index);
4873 print_gimple_stmt (dump_file, stmt, 0, 0);
4876 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
4877 if value-numbering can prove they are not reachable. Handling
4878 computed gotos is also possible. */
4879 tree val;
4880 switch (code)
4882 case GIMPLE_COND:
4884 tree lhs = vn_valueize (gimple_cond_lhs (stmt));
4885 tree rhs = vn_valueize (gimple_cond_rhs (stmt));
4886 val = gimple_simplify (gimple_cond_code (stmt),
4887 boolean_type_node, lhs, rhs,
4888 NULL, vn_valueize);
4889 /* If that didn't simplify to a constant see if we have recorded
4890 temporary expressions from taken edges. */
4891 if (!val || TREE_CODE (val) != INTEGER_CST)
4893 tree ops[2];
4894 ops[0] = lhs;
4895 ops[1] = rhs;
4896 val = vn_nary_op_lookup_pieces (2, gimple_cond_code (stmt),
4897 boolean_type_node, ops, NULL);
4899 break;
4901 case GIMPLE_SWITCH:
4902 val = gimple_switch_index (as_a <gswitch *> (stmt));
4903 break;
4904 case GIMPLE_GOTO:
4905 val = gimple_goto_dest (stmt);
4906 break;
4907 default:
4908 gcc_unreachable ();
4910 if (!val)
4911 return NULL;
4913 edge taken = find_taken_edge (bb, vn_valueize (val));
4914 if (!taken)
4915 return NULL;
4917 if (dump_file && (dump_flags & TDF_DETAILS))
4918 fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
4919 "not executable\n", bb->index, bb->index, taken->dest->index);
4921 return taken;
4924 /* Do SCCVN. Returns true if it finished, false if we bailed out
4925 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
4926 how we use the alias oracle walking during the VN process. */
4928 bool
4929 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
4931 size_t i;
4933 default_vn_walk_kind = default_vn_walk_kind_;
4935 init_scc_vn ();
4937 /* Collect pointers we know point to readonly memory. */
4938 const_parms = BITMAP_ALLOC (NULL);
4939 tree fnspec = lookup_attribute ("fn spec",
4940 TYPE_ATTRIBUTES (TREE_TYPE (cfun->decl)));
4941 if (fnspec)
4943 fnspec = TREE_VALUE (TREE_VALUE (fnspec));
4944 i = 1;
4945 for (tree arg = DECL_ARGUMENTS (cfun->decl);
4946 arg; arg = DECL_CHAIN (arg), ++i)
4948 if (i >= (unsigned) TREE_STRING_LENGTH (fnspec))
4949 break;
4950 if (TREE_STRING_POINTER (fnspec)[i] == 'R'
4951 || TREE_STRING_POINTER (fnspec)[i] == 'r')
4953 tree name = ssa_default_def (cfun, arg);
4954 if (name)
4955 bitmap_set_bit (const_parms, SSA_NAME_VERSION (name));
4960 /* Walk all blocks in dominator order, value-numbering stmts
4961 SSA defs and decide whether outgoing edges are not executable. */
4962 sccvn_dom_walker walker;
4963 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
4964 if (walker.fail)
4966 scc_vn_restore_ssa_info ();
4967 free_scc_vn ();
4968 return false;
4971 /* Initialize the value ids and prune out remaining VN_TOPs
4972 from dead code. */
4973 tree name;
4975 FOR_EACH_SSA_NAME (i, name, cfun)
4977 vn_ssa_aux_t info = VN_INFO (name);
4978 if (!info->visited)
4979 info->valnum = name;
4980 if (info->valnum == name
4981 || info->valnum == VN_TOP)
4982 info->value_id = get_next_value_id ();
4983 else if (is_gimple_min_invariant (info->valnum))
4984 info->value_id = get_or_alloc_constant_value_id (info->valnum);
4987 /* Propagate. */
4988 FOR_EACH_SSA_NAME (i, name, cfun)
4990 vn_ssa_aux_t info = VN_INFO (name);
4991 if (TREE_CODE (info->valnum) == SSA_NAME
4992 && info->valnum != name
4993 && info->value_id != VN_INFO (info->valnum)->value_id)
4994 info->value_id = VN_INFO (info->valnum)->value_id;
4997 set_hashtable_value_ids ();
4999 if (dump_file && (dump_flags & TDF_DETAILS))
5001 fprintf (dump_file, "Value numbers:\n");
5002 FOR_EACH_SSA_NAME (i, name, cfun)
5004 if (VN_INFO (name)->visited
5005 && SSA_VAL (name) != name)
5007 print_generic_expr (dump_file, name, 0);
5008 fprintf (dump_file, " = ");
5009 print_generic_expr (dump_file, SSA_VAL (name), 0);
5010 fprintf (dump_file, "\n");
5015 return true;
5018 /* Return the maximum value id we have ever seen. */
5020 unsigned int
5021 get_max_value_id (void)
5023 return next_value_id;
5026 /* Return the next unique value id. */
5028 unsigned int
5029 get_next_value_id (void)
5031 return next_value_id++;
5035 /* Compare two expressions E1 and E2 and return true if they are equal. */
5037 bool
5038 expressions_equal_p (tree e1, tree e2)
5040 /* The obvious case. */
5041 if (e1 == e2)
5042 return true;
5044 /* If either one is VN_TOP consider them equal. */
5045 if (e1 == VN_TOP || e2 == VN_TOP)
5046 return true;
5048 /* If only one of them is null, they cannot be equal. */
5049 if (!e1 || !e2)
5050 return false;
5052 /* Now perform the actual comparison. */
5053 if (TREE_CODE (e1) == TREE_CODE (e2)
5054 && operand_equal_p (e1, e2, OEP_PURE_SAME))
5055 return true;
5057 return false;
5061 /* Return true if the nary operation NARY may trap. This is a copy
5062 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
5064 bool
5065 vn_nary_may_trap (vn_nary_op_t nary)
5067 tree type;
5068 tree rhs2 = NULL_TREE;
5069 bool honor_nans = false;
5070 bool honor_snans = false;
5071 bool fp_operation = false;
5072 bool honor_trapv = false;
5073 bool handled, ret;
5074 unsigned i;
5076 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
5077 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
5078 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
5080 type = nary->type;
5081 fp_operation = FLOAT_TYPE_P (type);
5082 if (fp_operation)
5084 honor_nans = flag_trapping_math && !flag_finite_math_only;
5085 honor_snans = flag_signaling_nans != 0;
5087 else if (INTEGRAL_TYPE_P (type)
5088 && TYPE_OVERFLOW_TRAPS (type))
5089 honor_trapv = true;
5091 if (nary->length >= 2)
5092 rhs2 = nary->op[1];
5093 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
5094 honor_trapv,
5095 honor_nans, honor_snans, rhs2,
5096 &handled);
5097 if (handled
5098 && ret)
5099 return true;
5101 for (i = 0; i < nary->length; ++i)
5102 if (tree_could_trap_p (nary->op[i]))
5103 return true;
5105 return false;